2017-08-09 17:02:37 +00:00
|
|
|
#!/usr/bin/python3
|
|
|
|
import os
|
|
|
|
import json
|
2017-08-13 17:24:50 +00:00
|
|
|
import math
|
2017-08-09 17:02:37 +00:00
|
|
|
import ox
|
|
|
|
import ox.web.auth
|
|
|
|
|
|
|
|
base_url = 'https://0xdb.org'
|
|
|
|
credentials = ox.web.auth.get('0xdb.org')
|
|
|
|
prefix = '/Cinema'
|
|
|
|
|
|
|
|
if os.path.exists('files.json'):
|
|
|
|
files = json.load(open('files.json'))
|
|
|
|
else:
|
|
|
|
files = {}
|
|
|
|
|
|
|
|
def get_info(api, oshash):
|
|
|
|
if oshash not in files:
|
|
|
|
r = api.findMedia({
|
|
|
|
'query': {
|
|
|
|
'conditions': [{'key': 'oshash', 'value': oshash}]
|
|
|
|
},
|
|
|
|
'keys': ['id', 'instances', 'resolution']
|
|
|
|
})['data']
|
2017-08-12 08:59:08 +00:00
|
|
|
if not r['items'][0]['instances']:
|
|
|
|
print(r)
|
|
|
|
raise Exception('item without instance')
|
2017-08-09 17:02:37 +00:00
|
|
|
files[oshash] = {
|
2017-08-10 10:04:54 +00:00
|
|
|
'path': os.path.join(prefix, r['items'][0]['instances'][0]['path']),
|
2017-08-09 17:02:37 +00:00
|
|
|
'resolution': r['items'][0]['resolution']
|
|
|
|
}
|
|
|
|
return files[oshash]
|
|
|
|
|
2017-08-10 10:04:54 +00:00
|
|
|
def normalize(name):
|
|
|
|
return name.replace(':', '_').replace('/', '_')
|
|
|
|
|
2017-08-11 10:08:34 +00:00
|
|
|
def sort_clips(edit, sort):
|
|
|
|
clips = edit['clips']
|
2017-08-09 17:02:37 +00:00
|
|
|
reverse = sort.startswith('-')
|
|
|
|
last = '' if reverse else 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx'
|
|
|
|
sort = sort.lstrip('-')
|
2017-08-12 08:59:08 +00:00
|
|
|
if sort == 'manual':
|
|
|
|
s = clips
|
|
|
|
if reverse:
|
|
|
|
s = reversed(s)
|
|
|
|
elif sort in [
|
2017-08-11 10:08:34 +00:00
|
|
|
'id', 'index', 'in', 'out', 'duration',
|
|
|
|
'title', 'director', 'year', 'videoRatio'
|
|
|
|
]:
|
2017-08-13 17:24:50 +00:00
|
|
|
s = sorted(clips, key=lambda c: (str(c.get(sort, last)), c['title'], c['in'], c['out']))
|
2017-08-11 10:08:34 +00:00
|
|
|
if reverse:
|
|
|
|
s = reversed(s)
|
|
|
|
else:
|
|
|
|
ids = api.sortClips({
|
|
|
|
'edit': edit['id'],
|
|
|
|
'sort': [{'key': sort, 'operator': '-' if reverse else '+'}]
|
|
|
|
})['data']['clips']
|
|
|
|
print(set(c['id'] for c in clips) - set(ids))
|
|
|
|
print(set(ids) - set(c['id'] for c in clips))
|
|
|
|
|
|
|
|
s = sorted(clips, key=lambda c: ids.index(c['id']) if c['id'] in ids else -1)
|
2017-08-09 17:02:37 +00:00
|
|
|
return s
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
import sys
|
|
|
|
edit_id = sys.argv[1]
|
2017-08-10 12:56:11 +00:00
|
|
|
if len(sys.argv) > 2:
|
|
|
|
sort_by = sys.argv[2]
|
|
|
|
else:
|
|
|
|
sort_by = 'year'
|
2017-08-09 17:02:37 +00:00
|
|
|
|
|
|
|
api = ox.API(base_url + '/api/')
|
|
|
|
api.signin(**credentials)
|
|
|
|
edit = api.getEdit(id=edit_id)['data']
|
|
|
|
videos = []
|
|
|
|
subtitles = []
|
|
|
|
position = 0
|
2017-08-11 10:08:34 +00:00
|
|
|
for clip in sort_clips(edit, sort_by):
|
2017-08-09 17:02:37 +00:00
|
|
|
|
2017-08-13 17:24:50 +00:00
|
|
|
clip_subtitles = []
|
|
|
|
for sub in clip['layers']['subtitles']:
|
|
|
|
subtitles.append({
|
|
|
|
'in': position,
|
|
|
|
'out': position + (sub['out'] - sub['in']),
|
|
|
|
'value': sub['value'].replace('<br/>', '\n').replace('<br>', '\n').replace('\n\n', '\n'),
|
|
|
|
})
|
|
|
|
clip_subtitles.append(sub['value'].replace('<br/>', '\n').replace('<br>', '\n').replace('\n\n', '\n'))
|
|
|
|
|
2017-08-09 17:02:37 +00:00
|
|
|
part_pos = 0
|
|
|
|
for i, duration in enumerate(clip['durations']):
|
|
|
|
if part_pos + duration < clip['in']:
|
|
|
|
part_pos += duration
|
|
|
|
elif part_pos <= clip['in']:
|
|
|
|
stream_in = clip['in'] - part_pos
|
|
|
|
stream_out = min(clip['out'] - part_pos, duration)
|
2017-08-13 17:24:50 +00:00
|
|
|
|
|
|
|
stream_in = math.ceil(stream_in / (1/25)) * 1/25
|
|
|
|
stream_out = math.ceil(stream_out / (1/25)) * 1/25
|
|
|
|
|
2017-08-09 17:02:37 +00:00
|
|
|
part_pos += duration
|
|
|
|
info = get_info(api, clip['streams'][i])
|
|
|
|
videos.append({
|
|
|
|
'oshash': clip['streams'][i],
|
|
|
|
'path': os.path.join(prefix, info['path']),
|
|
|
|
'resolution': info['resolution'],
|
|
|
|
'in': stream_in,
|
2017-08-13 17:24:50 +00:00
|
|
|
'out': stream_out,
|
|
|
|
'subtitles': '\n'.join(clip_subtitles)
|
2017-08-09 17:02:37 +00:00
|
|
|
})
|
|
|
|
elif clip['out'] > part_pos:
|
|
|
|
stream_in = part_pos
|
|
|
|
stream_out = min(clip['out'] - part_pos, duration)
|
2017-08-13 17:24:50 +00:00
|
|
|
stream_in = math.ceil(stream_in / (1/25)) * 1/25
|
|
|
|
stream_out = math.ceil(stream_out / (1/25)) * 1/25
|
2017-08-09 17:02:37 +00:00
|
|
|
part_pos += duration
|
|
|
|
info = get_info(api, clip['streams'][i])
|
|
|
|
videos.append({
|
|
|
|
'oshash': clip['streams'][i],
|
|
|
|
'path': info['path'],
|
|
|
|
'resolution': info['resolution'],
|
|
|
|
'in': stream_in,
|
2017-08-13 17:24:50 +00:00
|
|
|
'out': stream_out,
|
|
|
|
'subtitles': '\n'.join(clip_subtitles)
|
2017-08-09 17:02:37 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
|
|
|
|
position += clip['duration']
|
2017-08-13 17:24:50 +00:00
|
|
|
position = math.ceil(position / (1/25)) * 1/25
|
2017-08-09 17:02:37 +00:00
|
|
|
|
2017-08-11 10:08:34 +00:00
|
|
|
name = normalize(edit_id)
|
|
|
|
if sort_by != 'year':
|
|
|
|
name += '_' + sort_by
|
|
|
|
with open('%s.srt' % name, 'wb') as fd:
|
2017-08-09 17:02:37 +00:00
|
|
|
fd.write(ox.srt.encode(subtitles))
|
2017-08-11 10:08:34 +00:00
|
|
|
with open('%s.json' % name, 'w') as fd:
|
2017-08-09 17:02:37 +00:00
|
|
|
json.dump(videos, fd, indent=4, ensure_ascii=False)
|
|
|
|
|
|
|
|
with open('files.json', 'w') as fd:
|
|
|
|
json.dump(files, fd, indent=4, ensure_ascii=False)
|