2018-05-31 14:59:17 +00:00
|
|
|
#!/usr/bin/python3
|
|
|
|
import json
|
|
|
|
import os
|
|
|
|
from collections import defaultdict
|
|
|
|
|
2018-05-31 19:32:56 +00:00
|
|
|
base = os.path.abspath(os.path.dirname(__file__))
|
|
|
|
|
|
|
|
keywords = json.load(open(os.path.join(base, 'keywords.json')))
|
|
|
|
ontology = json.load(open(os.path.join(base, 'ontology.json')))
|
|
|
|
|
2018-05-31 14:59:17 +00:00
|
|
|
def find_path(parent, root=None, path=None):
|
|
|
|
if root is None:
|
|
|
|
root = ontology
|
2018-05-31 19:32:56 +00:00
|
|
|
if path is None:
|
2018-05-31 14:59:17 +00:00
|
|
|
path = []
|
2018-08-10 11:41:54 +00:00
|
|
|
if parent == 'location':
|
|
|
|
return ['film', 'architecture', 'rhythmanalysis', 'the everyday', 'environment', 'location']
|
2018-05-31 14:59:17 +00:00
|
|
|
for key in root:
|
|
|
|
if key == parent:
|
|
|
|
return path + [key]
|
|
|
|
elif root[key]:
|
|
|
|
r = find_path(parent, root[key], path + [key])
|
|
|
|
if r:
|
|
|
|
return r
|
|
|
|
|
2018-05-31 19:32:56 +00:00
|
|
|
def get_node(name, children, parent=None):
|
2018-08-10 11:18:49 +00:00
|
|
|
children_ = [get_node(child, children[child], name) for child in children]
|
|
|
|
children_.sort(key=lambda c: c['name'])
|
2018-05-31 14:59:17 +00:00
|
|
|
node = {
|
|
|
|
"size": len(children) + 100,
|
|
|
|
"name": name,
|
2018-08-10 11:18:49 +00:00
|
|
|
"children": children_
|
2018-05-31 14:59:17 +00:00
|
|
|
}
|
|
|
|
if not node['children']:
|
|
|
|
del node['children']
|
2018-05-31 19:32:56 +00:00
|
|
|
key = '%s: %s' % (parent, name)
|
|
|
|
if key in keywords:
|
|
|
|
node['size'] = keywords[key]
|
2018-05-31 14:59:17 +00:00
|
|
|
return node
|
|
|
|
|
|
|
|
|
2019-03-24 03:43:51 +00:00
|
|
|
def render_children(root, indent=0):
|
|
|
|
txt = ('\t' * indent) + root['name']
|
|
|
|
if 'children' in root:
|
|
|
|
parts = ''
|
|
|
|
for child in root['children']:
|
|
|
|
parts += '\n' + render_children(child, indent+1)
|
|
|
|
txt += '\n'.join([('\t' * indent) + p for p in parts.split('\n')])
|
|
|
|
return '\n'.join([l.rstrip() for l in txt.split('\n')])
|
|
|
|
|
2018-05-31 14:59:17 +00:00
|
|
|
if __name__ == '__main__':
|
|
|
|
os.chdir(base)
|
|
|
|
|
|
|
|
tree = defaultdict(dict)
|
|
|
|
|
2019-03-24 03:43:51 +00:00
|
|
|
ontology_txt = ''
|
|
|
|
|
2018-05-31 14:59:17 +00:00
|
|
|
for keyword in keywords:
|
2018-08-06 18:19:20 +00:00
|
|
|
if ': ' not in keyword:
|
|
|
|
parent = 'other'
|
|
|
|
child = keyword
|
|
|
|
else:
|
|
|
|
parent, child = keyword.split(': ', 1)
|
2018-05-31 14:59:17 +00:00
|
|
|
path = find_path(parent)
|
|
|
|
if path:
|
|
|
|
p = tree
|
|
|
|
for part in path:
|
|
|
|
if part not in p:
|
|
|
|
p[part] = {}
|
|
|
|
p = p[part]
|
|
|
|
p[child] = {}
|
|
|
|
else:
|
2018-05-31 19:37:16 +00:00
|
|
|
if parent not in tree['missing']:
|
|
|
|
tree['missing'][parent] = {}
|
|
|
|
tree['missing'][parent][child] = {}
|
2018-08-10 11:26:48 +00:00
|
|
|
#print('missing root - %s: %s' % (parent, child))
|
2018-05-31 14:59:17 +00:00
|
|
|
|
|
|
|
#print(json.dumps(tree, indent=4, sort_keys=True))
|
|
|
|
sized_ontology = {
|
|
|
|
"size": len(tree),
|
2019-05-13 11:28:41 +00:00
|
|
|
"name": "CineUrban",
|
2018-05-31 14:59:17 +00:00
|
|
|
"children": []
|
|
|
|
}
|
|
|
|
for name in tree:
|
|
|
|
children = tree[name]
|
2018-05-31 19:32:56 +00:00
|
|
|
child = get_node(name, tree[name], name)
|
2018-05-31 14:59:17 +00:00
|
|
|
sized_ontology['children'].append(child)
|
|
|
|
|
2019-03-24 03:43:51 +00:00
|
|
|
|
|
|
|
ontology_txt = render_children(sized_ontology)
|
|
|
|
|
2018-05-31 14:59:17 +00:00
|
|
|
with open('../static/ontology/sized_ontology.json', 'w') as fd:
|
|
|
|
json.dump(sized_ontology, fd, indent=4, sort_keys=True)
|
2019-03-24 03:43:51 +00:00
|
|
|
with open('../static/ontology/ontology.txt', 'w') as fd:
|
|
|
|
fd.write(ontology_txt)
|