2018-02-09 11:28:01 +00:00
|
|
|
#!/usr/bin/env python3
|
2018-02-09 11:20:28 +00:00
|
|
|
from argparse import ArgumentParser
|
|
|
|
import getpass
|
2017-08-09 17:02:37 +00:00
|
|
|
import json
|
2017-08-13 17:24:50 +00:00
|
|
|
import math
|
2018-02-09 11:20:28 +00:00
|
|
|
import os
|
|
|
|
import sys
|
|
|
|
import urllib.parse
|
|
|
|
import urllib.request
|
|
|
|
|
2018-03-14 11:53:07 +00:00
|
|
|
import ox
|
|
|
|
import ox.web.auth
|
|
|
|
|
|
|
|
|
2018-02-09 11:20:28 +00:00
|
|
|
base_url = None
|
|
|
|
prefix = '/mnt'
|
|
|
|
render = './cache'
|
|
|
|
pandora_client_config = {}
|
|
|
|
use_local = False
|
|
|
|
|
2018-03-14 11:53:07 +00:00
|
|
|
|
2018-02-09 11:20:28 +00:00
|
|
|
class API(ox.API):
|
|
|
|
|
|
|
|
def save_url(self, url, filename, overwrite=False):
|
|
|
|
if not os.path.exists(filename) or overwrite:
|
|
|
|
dirname = os.path.dirname(filename)
|
|
|
|
if dirname and not os.path.exists(dirname):
|
|
|
|
os.makedirs(dirname)
|
|
|
|
chunk_size = 16 * 1024
|
|
|
|
request = urllib.request.Request(url)
|
|
|
|
remote = self._opener.open(request)
|
|
|
|
with open(filename, 'wb') as f:
|
|
|
|
for chunk in iter(lambda: remote.read(chunk_size), b''):
|
|
|
|
f.write(chunk)
|
2017-08-09 17:02:37 +00:00
|
|
|
|
|
|
|
|
|
|
|
if os.path.exists('files.json'):
|
|
|
|
files = json.load(open('files.json'))
|
|
|
|
else:
|
|
|
|
files = {}
|
|
|
|
|
2018-02-09 11:20:28 +00:00
|
|
|
def get_volume_prefix(name):
|
|
|
|
path = pandora_client_config.get('volumes', {}).get(name)
|
|
|
|
if path:
|
|
|
|
return path
|
|
|
|
else:
|
|
|
|
return os.path.join(prefix, name)
|
|
|
|
|
|
|
|
def get_info(api, oshash, item, part):
|
2017-08-09 17:02:37 +00:00
|
|
|
if oshash not in files:
|
|
|
|
r = api.findMedia({
|
|
|
|
'query': {
|
|
|
|
'conditions': [{'key': 'oshash', 'value': oshash}]
|
|
|
|
},
|
|
|
|
'keys': ['id', 'instances', 'resolution']
|
|
|
|
})['data']
|
2017-08-12 08:59:08 +00:00
|
|
|
if not r['items'][0]['instances']:
|
2018-02-09 11:20:28 +00:00
|
|
|
if use_local:
|
2018-03-14 11:53:07 +00:00
|
|
|
print(r, item, part)
|
2018-02-09 11:20:28 +00:00
|
|
|
raise Exception('item without instance')
|
2017-08-09 17:02:37 +00:00
|
|
|
files[oshash] = {
|
|
|
|
'resolution': r['items'][0]['resolution']
|
|
|
|
}
|
2018-02-09 11:20:28 +00:00
|
|
|
if r['items'][0]['instances']:
|
|
|
|
volume_prefix = get_volume_prefix(r['items'][0]['instances'][0]['volume'])
|
|
|
|
files[oshash]['path'] = os.path.join(volume_prefix, r['items'][0]['instances'][0]['path'])
|
2017-08-09 17:02:37 +00:00
|
|
|
return files[oshash]
|
|
|
|
|
2017-08-10 10:04:54 +00:00
|
|
|
def normalize(name):
|
|
|
|
return name.replace(':', '_').replace('/', '_')
|
|
|
|
|
2017-08-11 10:08:34 +00:00
|
|
|
def sort_clips(edit, sort):
|
|
|
|
clips = edit['clips']
|
2018-11-12 11:52:02 +00:00
|
|
|
idx = 0
|
|
|
|
for clip in clips:
|
|
|
|
clip['index'] = idx
|
|
|
|
idx += 1
|
2017-08-09 17:02:37 +00:00
|
|
|
reverse = sort.startswith('-')
|
|
|
|
last = '' if reverse else 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx'
|
|
|
|
sort = sort.lstrip('-')
|
2017-08-12 08:59:08 +00:00
|
|
|
if sort == 'manual':
|
|
|
|
s = clips
|
|
|
|
if reverse:
|
|
|
|
s = reversed(s)
|
|
|
|
elif sort in [
|
2017-08-11 10:08:34 +00:00
|
|
|
'id', 'index', 'in', 'out', 'duration',
|
2018-02-09 11:20:28 +00:00
|
|
|
'title', 'director', 'year', 'videoRatio',
|
2017-08-11 10:08:34 +00:00
|
|
|
]:
|
2018-11-12 11:52:02 +00:00
|
|
|
s = sorted(clips, key=lambda c: (ox.sort_string(str(c.get(sort, last))), c['title'], c['in'], c['out']))
|
2017-08-11 10:08:34 +00:00
|
|
|
if reverse:
|
|
|
|
s = reversed(s)
|
|
|
|
else:
|
|
|
|
ids = api.sortClips({
|
|
|
|
'edit': edit['id'],
|
|
|
|
'sort': [{'key': sort, 'operator': '-' if reverse else '+'}]
|
|
|
|
})['data']['clips']
|
2018-02-09 11:20:28 +00:00
|
|
|
#print(set(c['id'] for c in clips) - set(ids))
|
|
|
|
#print(set(ids) - set(c['id'] for c in clips))
|
2017-08-11 10:08:34 +00:00
|
|
|
|
|
|
|
s = sorted(clips, key=lambda c: ids.index(c['id']) if c['id'] in ids else -1)
|
2017-08-09 17:02:37 +00:00
|
|
|
return s
|
|
|
|
|
2018-11-12 14:41:02 +00:00
|
|
|
def cache_clips(api, videos, use_source=False):
|
2018-02-09 11:20:28 +00:00
|
|
|
for clip in videos:
|
|
|
|
out = '%s/%s.mp4' % (render, clip['oshash'])
|
|
|
|
if 'path' in clip:
|
|
|
|
clip['src'] = clip['path']
|
|
|
|
clip['path'] = out
|
|
|
|
if not os.path.exists(out):
|
2018-11-12 14:41:02 +00:00
|
|
|
url = clip['url']
|
|
|
|
if use_source:
|
|
|
|
name = url.split('/')[-1].split('.')[0]
|
|
|
|
resolution, part = name.split('p')
|
|
|
|
if part and part.isdigit():
|
|
|
|
part = int(part)
|
|
|
|
else:
|
|
|
|
part = 1
|
|
|
|
url = '/'.join(url.split('/')[:-1] + ['download', 'source', str(part)])
|
|
|
|
print(url, out)
|
|
|
|
api.save_url(url, out)
|
2017-08-09 17:02:37 +00:00
|
|
|
|
|
|
|
|
2018-02-09 11:20:28 +00:00
|
|
|
if __name__ == '__main__':
|
|
|
|
usage = "usage: %(prog)s [options] edit-url"
|
|
|
|
parser = ArgumentParser(usage=usage)
|
|
|
|
parser.add_argument('-p', '--prefix', dest='prefix', type=str,
|
|
|
|
help="prefix to use instead of pandora_client config", default='')
|
|
|
|
parser.add_argument('-s', '--source', dest='source', type=str,
|
|
|
|
help="source, local or site", default='site')
|
|
|
|
parser.add_argument('-r', '--resolution', dest='stream_resolution', type=int,
|
|
|
|
help="resolution of streams to download i.e. 480, 240, 96 default 480", default=480)
|
|
|
|
parser.add_argument('-c', '--config', dest='config',
|
|
|
|
help='config.json containing config',
|
|
|
|
default='~/.ox/client.json')
|
|
|
|
parser.add_argument('url', metavar='url', type=str,
|
|
|
|
help='edit url')
|
|
|
|
opts = parser.parse_args()
|
|
|
|
edit_url = opts.url
|
|
|
|
use_local = opts.source == 'local'
|
2018-11-12 14:41:02 +00:00
|
|
|
use_source = opts.source == 'source'
|
2018-02-09 11:20:28 +00:00
|
|
|
prefix = opts.prefix
|
|
|
|
parts = edit_url.split('/')
|
|
|
|
site = parts[2]
|
|
|
|
base_url = '/'.join(parts[:3])
|
|
|
|
edit_id = urllib.parse.unquote(parts[4]).replace('_', ' ').replace('\t', '_')
|
|
|
|
sort_by = parts[6] if len(parts) >= 7 else 'year'
|
|
|
|
stream_resolution = opts.stream_resolution
|
|
|
|
|
|
|
|
credentials = None
|
|
|
|
config = os.path.expanduser(opts.config)
|
|
|
|
if config and os.path.exists(config):
|
|
|
|
with open(config) as fd:
|
|
|
|
data = json.load(fd)
|
|
|
|
if data['url'].startswith(base_url):
|
|
|
|
pandora_client_config = data
|
|
|
|
credentials = {
|
|
|
|
'username': data['username'],
|
|
|
|
'password': data['password']
|
|
|
|
}
|
|
|
|
|
2018-03-14 11:26:49 +00:00
|
|
|
update = False
|
2018-02-09 11:20:28 +00:00
|
|
|
if not credentials:
|
|
|
|
try:
|
|
|
|
credentials = ox.web.auth.get(site)
|
|
|
|
except:
|
|
|
|
credentials = {}
|
2018-02-09 17:07:30 +00:00
|
|
|
print('Please provide your username and password for %s:' % site)
|
2018-02-09 11:20:28 +00:00
|
|
|
credentials['username'] = input('Username: ')
|
|
|
|
credentials['password'] = getpass.getpass('Password: ')
|
2018-03-14 11:26:49 +00:00
|
|
|
update = True
|
2018-02-09 11:20:28 +00:00
|
|
|
|
|
|
|
api = API(base_url + '/api/')
|
|
|
|
r = api.signin(**credentials)
|
|
|
|
if 'errors' in r.get('data', {}):
|
|
|
|
for kv in r['data']['errors'].items():
|
|
|
|
print('%s: %s' % kv)
|
|
|
|
sys.exit(1)
|
2018-03-14 11:26:49 +00:00
|
|
|
if update:
|
|
|
|
ox.web.auth.update(site, credentials)
|
2018-02-09 17:07:30 +00:00
|
|
|
print('Edit:', edit_id)
|
|
|
|
print('Sort:', sort_by)
|
2018-02-09 11:20:28 +00:00
|
|
|
r = api.getEdit(id=edit_id)
|
|
|
|
if 'data' not in r:
|
|
|
|
print(r)
|
|
|
|
sys.exit(1)
|
|
|
|
if r.get('status', {}).get('code') == 404:
|
|
|
|
print(r.get('status', {}).get('text'))
|
|
|
|
sys.exit(1)
|
|
|
|
edit = r['data']
|
2017-08-09 17:02:37 +00:00
|
|
|
videos = []
|
|
|
|
subtitles = []
|
|
|
|
position = 0
|
2017-08-11 10:08:34 +00:00
|
|
|
for clip in sort_clips(edit, sort_by):
|
2017-08-09 17:02:37 +00:00
|
|
|
|
2017-08-13 17:24:50 +00:00
|
|
|
clip_subtitles = []
|
2018-02-09 11:20:28 +00:00
|
|
|
for sub in clip['layers'].get('subtitles', []):
|
2017-08-13 17:24:50 +00:00
|
|
|
subtitles.append({
|
|
|
|
'in': position,
|
|
|
|
'out': position + (sub['out'] - sub['in']),
|
|
|
|
'value': sub['value'].replace('<br/>', '\n').replace('<br>', '\n').replace('\n\n', '\n'),
|
|
|
|
})
|
|
|
|
clip_subtitles.append(sub['value'].replace('<br/>', '\n').replace('<br>', '\n').replace('\n\n', '\n'))
|
|
|
|
|
2017-08-09 17:02:37 +00:00
|
|
|
part_pos = 0
|
|
|
|
for i, duration in enumerate(clip['durations']):
|
2018-02-09 17:07:30 +00:00
|
|
|
stream_out = stream_in = None
|
2017-08-09 17:02:37 +00:00
|
|
|
if part_pos + duration < clip['in']:
|
|
|
|
part_pos += duration
|
|
|
|
elif part_pos <= clip['in']:
|
|
|
|
stream_in = clip['in'] - part_pos
|
|
|
|
stream_out = min(clip['out'] - part_pos, duration)
|
2018-02-09 17:07:30 +00:00
|
|
|
elif clip['out'] > part_pos:
|
|
|
|
stream_in = 0
|
|
|
|
stream_out = min(clip['out'] - part_pos, duration)
|
2017-08-13 17:24:50 +00:00
|
|
|
|
2018-02-09 17:07:30 +00:00
|
|
|
if stream_in is not None and stream_out is not None:
|
2017-08-13 17:24:50 +00:00
|
|
|
stream_in = math.ceil(stream_in / (1/25)) * 1/25
|
|
|
|
stream_out = math.ceil(stream_out / (1/25)) * 1/25
|
2018-02-09 11:20:28 +00:00
|
|
|
info = get_info(api, clip['streams'][i], clip['item'], i+1)
|
2018-02-10 12:17:45 +00:00
|
|
|
clip_duration = stream_out - stream_in
|
|
|
|
if clip_duration > 0:
|
2018-02-09 11:20:28 +00:00
|
|
|
videos.append({
|
|
|
|
'oshash': clip['streams'][i],
|
|
|
|
'url': '%s/%s/%sp%s.mp4' % (base_url, clip['item'], stream_resolution, i + 1),
|
|
|
|
'item': clip['item'],
|
|
|
|
'annotation': clip.get('annotation'),
|
|
|
|
'resolution': info['resolution'],
|
|
|
|
'in': stream_in,
|
|
|
|
'out': stream_out,
|
|
|
|
})
|
|
|
|
if 'path' in info:
|
|
|
|
videos[-1]['path'] = os.path.join(prefix, info['path'])
|
|
|
|
if clip_subtitles:
|
|
|
|
videos[-1]['subtitles'] = '\n'.join(clip_subtitles)
|
2017-08-09 17:02:37 +00:00
|
|
|
part_pos += duration
|
|
|
|
|
|
|
|
position += clip['duration']
|
2017-08-13 17:24:50 +00:00
|
|
|
position = math.ceil(position / (1/25)) * 1/25
|
2017-08-09 17:02:37 +00:00
|
|
|
|
2018-02-09 11:20:28 +00:00
|
|
|
if not use_local:
|
2018-11-12 14:41:02 +00:00
|
|
|
cache_clips(api, videos, use_source)
|
2018-02-09 11:20:28 +00:00
|
|
|
|
2017-08-11 10:08:34 +00:00
|
|
|
name = normalize(edit_id)
|
|
|
|
if sort_by != 'year':
|
|
|
|
name += '_' + sort_by
|
2018-02-09 11:20:28 +00:00
|
|
|
if subtitles:
|
|
|
|
with open('%s.srt' % name, 'wb') as fd:
|
|
|
|
fd.write(ox.srt.encode(subtitles))
|
2017-08-11 10:08:34 +00:00
|
|
|
with open('%s.json' % name, 'w') as fd:
|
2017-08-09 17:02:37 +00:00
|
|
|
json.dump(videos, fd, indent=4, ensure_ascii=False)
|
|
|
|
|
|
|
|
with open('files.json', 'w') as fd:
|
|
|
|
json.dump(files, fd, indent=4, ensure_ascii=False)
|
2018-02-09 11:20:28 +00:00
|
|
|
print('created: %s' % '%s.json' % name)
|