pandora_client
This commit is contained in:
commit
176b059ade
4 changed files with 578 additions and 0 deletions
322
pandora_client/__init__.py
Normal file
322
pandora_client/__init__.py
Normal file
|
@ -0,0 +1,322 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
# vi:si:et:sw=4:sts=4:ts=4
|
||||
# GPL 2010
|
||||
from __future__ import division, with_statement
|
||||
import os
|
||||
import urllib2
|
||||
import cookielib
|
||||
import json
|
||||
import sqlite3
|
||||
import time
|
||||
import shutil
|
||||
|
||||
from firefogg import Firefogg
|
||||
import ox
|
||||
|
||||
import extract
|
||||
import utils
|
||||
|
||||
|
||||
__version__ = '0.1'
|
||||
DEBUG = True
|
||||
prefix = os.environ.get('oxMEDIA', os.path.expanduser('~/.ox/media'))
|
||||
|
||||
def encode(filename, profile):
|
||||
info = utils.avinfo(filename)
|
||||
oshash = info['oshash']
|
||||
frames = []
|
||||
for pos in utils.video_frame_positions(info['duration']):
|
||||
frame_name = '%s.png' % pos
|
||||
cache = os.path.join(prefix, os.path.join(*utils.hash_prefix(oshash)))
|
||||
frame_f = os.path.join(cache, frame_name)
|
||||
if not os.path.exists(frame_f):
|
||||
print frame_f
|
||||
extract.frame(filename, frame_f, pos)
|
||||
frames.append(frame_f)
|
||||
video_f = os.path.join(cache, '%s.webm' % profile)
|
||||
if not os.path.exists(video_f):
|
||||
print video_f
|
||||
extract.video(filename, video_f, profile, info)
|
||||
return {
|
||||
'info': info,
|
||||
'oshash': oshash,
|
||||
'frames': frames,
|
||||
'video': video_f
|
||||
}
|
||||
|
||||
class Client(object):
|
||||
def __init__(self, config):
|
||||
with open(config) as f:
|
||||
self._config = json.loads(f.read())
|
||||
self.api = API(self._config['url'])
|
||||
r = self.api.login({'username':self._config['username'], 'password':self._config['password']})
|
||||
if r['status']['code'] == 200:
|
||||
self.user = r['data']['user']
|
||||
else:
|
||||
print 'login failed'
|
||||
|
||||
conn, c = self._conn()
|
||||
|
||||
c.execute('''CREATE TABLE IF NOT EXISTS setting (key varchar(1024) unique, value text)''')
|
||||
|
||||
if int(self.get('version', 0)) < 1:
|
||||
self.set('version', 1)
|
||||
db = [
|
||||
'''CREATE TABLE IF NOT EXISTS file (
|
||||
path varchar(1024) unique,
|
||||
oshash varchar(16),
|
||||
atime FLOAT,
|
||||
ctime FLOAT,
|
||||
mtime FLOAT,
|
||||
size INT,
|
||||
info TEXT,
|
||||
created INT,
|
||||
modified INT,
|
||||
deleted INT)''',
|
||||
'''CREATE INDEX IF NOT EXISTS path_idx ON file (path)''',
|
||||
'''CREATE INDEX IF NOT EXISTS oshash_idx ON file (oshash)''',
|
||||
]
|
||||
for i in db:
|
||||
c.execute(i)
|
||||
conn.commit()
|
||||
|
||||
def _conn(self):
|
||||
db_conn = os.path.expanduser(self._config['cache'])
|
||||
conn = sqlite3.connect(db_conn, timeout=10)
|
||||
conn.text_factory = sqlite3.OptimizedUnicode
|
||||
return conn, conn.cursor()
|
||||
|
||||
def get(self, key, default=None):
|
||||
conn, c = self._conn()
|
||||
c.execute('SELECT value FROM setting WHERE key = ?', (key, ))
|
||||
for row in c:
|
||||
return row[0]
|
||||
return default
|
||||
|
||||
def set(self, key, value):
|
||||
conn, c = self._conn()
|
||||
c.execute(u'INSERT OR REPLACE INTO setting values (?, ?)', (key, str(value)))
|
||||
conn.commit()
|
||||
|
||||
def scan_file(self, path):
|
||||
conn, c = self._conn()
|
||||
|
||||
update = True
|
||||
modified = time.mktime(time.localtime())
|
||||
created = modified
|
||||
|
||||
sql = 'SELECT atime, ctime, mtime, size, created FROM file WHERE deleted < 0 AND path=?'
|
||||
c.execute(sql, [path])
|
||||
stat = os.stat(path)
|
||||
for row in c:
|
||||
if stat.st_atime == row[0] and stat.st_ctime == row[1] and stat.st_mtime == row[2] and stat.st_size == row[3]:
|
||||
created = row[4]
|
||||
update = False
|
||||
break
|
||||
if update:
|
||||
info = utils.avinfo(path)
|
||||
oshash = info['oshash']
|
||||
deleted = -1
|
||||
t = (path, oshash, stat.st_atime, stat.st_ctime, stat.st_mtime,
|
||||
stat.st_size, json.dumps(info), created, modified, deleted)
|
||||
c.execute(u'INSERT OR REPLACE INTO file values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', t)
|
||||
conn.commit()
|
||||
|
||||
def scan(self):
|
||||
print "check for new files"
|
||||
for name in self._config['volumes']:
|
||||
path = self._config['volumes'][name]
|
||||
path = os.path.normpath(path)
|
||||
files = []
|
||||
for dirpath, dirnames, filenames in os.walk(path):
|
||||
if isinstance(dirpath, str):
|
||||
dirpath = dirpath.decode('utf-8')
|
||||
if filenames:
|
||||
for filename in sorted(filenames):
|
||||
if isinstance(filename, str):
|
||||
filename = filename.decode('utf-8')
|
||||
if not filename.startswith('._') and not filename in ('.DS_Store', ):
|
||||
file_path = os.path.join(dirpath, filename)
|
||||
files.append(file_path)
|
||||
self.scan_file(file_path)
|
||||
|
||||
conn, c = self._conn()
|
||||
c.execute('SELECT path FROM file WHERE path LIKE ? AND deleted < 0', ["%s%%"%path])
|
||||
known_files = [r[0] for r in c.fetchall()]
|
||||
deleted_files = filter(lambda f: f not in files, known_files)
|
||||
if deleted_files:
|
||||
deleted = time.mktime(time.localtime())
|
||||
for f in deleted_files:
|
||||
c.execute('UPDATE file SET deleted=? WHERE path=?', (deleted, f))
|
||||
conn.commit()
|
||||
|
||||
def sync(self):
|
||||
conn, c = self._conn()
|
||||
|
||||
volumes = {}
|
||||
for name in self._config['volumes']:
|
||||
path = self._config['volumes'][name]
|
||||
path = os.path.normpath(path)
|
||||
|
||||
volumes[name] = {}
|
||||
volumes[name]['path'] = path
|
||||
if os.path.exists(path):
|
||||
volumes[name]['available'] = True
|
||||
else:
|
||||
volumes[name]['available'] = False
|
||||
|
||||
profile = self.api.encodingProfile()['data']['profile']
|
||||
for name in volumes:
|
||||
if volumes[name]['available']:
|
||||
prefix = volumes[name]['path']
|
||||
files = self.files(prefix)
|
||||
files['volume'] = name
|
||||
r = self.api.update(files)
|
||||
if r['status']['code'] == 200:
|
||||
|
||||
if r['data']['info']:
|
||||
post = {'info': {}}
|
||||
for oshash in r['data']['info']:
|
||||
post['info'][oshash] = files['info'][oshash]
|
||||
r2 = self.api.update(post)
|
||||
#FIXME: should r2 be merged with r?
|
||||
|
||||
filenames = {}
|
||||
for f in files['files']:
|
||||
filenames[f['oshash']] = f['path']
|
||||
|
||||
if r['data']['data']:
|
||||
for oshash in r['data']['data']:
|
||||
data = {}
|
||||
filename = filenames[oshash]
|
||||
self.api.uploadVideo(os.path.join(prefix, filename), data, profile)
|
||||
|
||||
if r['data']['file']:
|
||||
for oshash in r['data']['file']:
|
||||
filename = filenames[oshash]
|
||||
self.api.uploadData(os.path.join(prefix, filename), oshash)
|
||||
else:
|
||||
print "updating volume", name, "failed"
|
||||
|
||||
def files(self, prefix):
|
||||
conn, c = self._conn()
|
||||
files = {}
|
||||
files['info'] = {}
|
||||
files['files'] = []
|
||||
sql = 'SELECT path, oshash, info, atime, ctime, mtime FROM file WHERE deleted < 0 AND path LIKE ? ORDER BY path'
|
||||
t = [u"%s%%"%prefix]
|
||||
c.execute(sql, t)
|
||||
for row in c:
|
||||
path = row[0]
|
||||
oshash = row[1]
|
||||
info = json.loads(row[2])
|
||||
for key in ('atime', 'ctime', 'mtime', 'path'):
|
||||
if key in info:
|
||||
del info[key]
|
||||
files['info'][oshash] = info
|
||||
files['files'].append({
|
||||
'oshash': oshash,
|
||||
'path': path[len(prefix)+1:],
|
||||
'atime': row[3],
|
||||
'ctime': row[4],
|
||||
'mtime': row[5],
|
||||
})
|
||||
return files
|
||||
|
||||
def clean(self):
|
||||
print "remove temp videos and stills"
|
||||
if os.path.exists(prefix):
|
||||
shutil.rmtree(prefix)
|
||||
|
||||
class API(object):
|
||||
def __init__(self, url, cj=None):
|
||||
if cj:
|
||||
self._cj = cj
|
||||
else:
|
||||
self._cj = cookielib.CookieJar()
|
||||
self._opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self._cj))
|
||||
urllib2.install_opener(self._opener)
|
||||
|
||||
self.url = url
|
||||
r = self._request('api', {})
|
||||
self._actions = r['data']['actions']
|
||||
for a in r['data']['actions']:
|
||||
setattr(self.__class__, a, self._action(a))
|
||||
|
||||
def _action(self, action):
|
||||
def f(self, data=None):
|
||||
return self._request(action, data)
|
||||
return f
|
||||
#return lambda self, data:
|
||||
|
||||
def _json_request(self, url, form):
|
||||
try:
|
||||
request = urllib2.Request(url)
|
||||
request.add_header('User-agent', 'pandora_client/%s' % __version__)
|
||||
body = str(form)
|
||||
request.add_header('Content-type', form.get_content_type())
|
||||
request.add_header('Content-length', len(body))
|
||||
request.add_data(body)
|
||||
result = urllib2.urlopen(request).read().strip()
|
||||
return json.loads(result)
|
||||
except urllib2.HTTPError, e:
|
||||
if DEBUG:
|
||||
if e.code >= 500:
|
||||
with open('/tmp/error.html', 'w') as f:
|
||||
f.write(e.read())
|
||||
os.system('firefox /tmp/error.html')
|
||||
|
||||
result = e.read()
|
||||
try:
|
||||
result = json.loads(result)
|
||||
except:
|
||||
result = {'status':{}}
|
||||
result['status']['code'] = e.code
|
||||
result['status']['text'] = str(e)
|
||||
return result
|
||||
except:
|
||||
if DEBUG:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
with open('/tmp/error.html', 'w') as f:
|
||||
f.write(result)
|
||||
os.system('firefox /tmp/error.html')
|
||||
raise
|
||||
|
||||
def _request(self, action, data=None):
|
||||
form = ox.MultiPartForm()
|
||||
form.add_field('action', action)
|
||||
if data:
|
||||
form.add_field('data', json.dumps(data))
|
||||
return self._json_request(self.url, form)
|
||||
|
||||
def uploadVideo(self, filename, data, profile):
|
||||
i = encode(filename, profile)
|
||||
|
||||
#upload frames
|
||||
form = ox.MultiPartForm()
|
||||
form.add_field('action', 'upload')
|
||||
form.add_field('oshash', str(i['oshash']))
|
||||
for key in data:
|
||||
form.add_field(str(key), data[key].encode('utf-8'))
|
||||
for frame in i['frames']:
|
||||
form.add_file('frame', os.path.basename(frame), open(frame, 'rb'))
|
||||
r = self._json_request(self.url, form)
|
||||
|
||||
#upload video in chunks
|
||||
url = self.url + 'upload/' + '?profile=' + str(profile) + '&oshash=' + i['oshash']
|
||||
ogg = Firefogg(cj=self._cj, debug=True)
|
||||
ogg.upload(url, i['video'], data)
|
||||
print "done"
|
||||
|
||||
def uploadData(self, filename, oshash):
|
||||
form = ox.MultiPartForm()
|
||||
form.add_field('action', 'upload')
|
||||
form.add_field('oshash', str(oshash))
|
||||
fname = os.path.basename(filename)
|
||||
if isinstance(fname, unicode): fname = fname.encode('utf-8')
|
||||
form.add_file('file', fname, open(filename, 'rb'))
|
||||
r = self._json_request(self.url, form)
|
||||
return r
|
||||
|
147
pandora_client/extract.py
Normal file
147
pandora_client/extract.py
Normal file
|
@ -0,0 +1,147 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
# vi:si:et:sw=4:sts=4:ts=4
|
||||
# GPL 2010
|
||||
from __future__ import division, with_statement
|
||||
|
||||
import fractions
|
||||
from glob import glob
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import sqlite3
|
||||
import subprocess
|
||||
import sys
|
||||
import shutil
|
||||
import tempfile
|
||||
import time
|
||||
|
||||
import ox
|
||||
|
||||
from utils import avinfo, AspectRatio, run_command
|
||||
|
||||
|
||||
def frame(video, target, position):
|
||||
fdir = os.path.dirname(target)
|
||||
if fdir and not os.path.exists(fdir):
|
||||
os.makedirs(fdir)
|
||||
|
||||
#mplayer
|
||||
cwd = os.getcwd()
|
||||
target = os.path.abspath(target)
|
||||
framedir = tempfile.mkdtemp()
|
||||
os.chdir(framedir)
|
||||
cmd = ['mplayer', '-noautosub', video, '-ss', str(position), '-frames', '2', '-vo', 'png:z=9', '-ao', 'null']
|
||||
print cmd
|
||||
r = run_command(cmd)
|
||||
images = glob('%s/*.png' % framedir)
|
||||
if images:
|
||||
shutil.move(images[-1], target)
|
||||
r = 0
|
||||
else:
|
||||
r = 1
|
||||
os.chdir(cwd)
|
||||
shutil.rmtree(framedir)
|
||||
return r == 0
|
||||
|
||||
def video(video, target, profile, info):
|
||||
if not os.path.exists(target):
|
||||
fdir = os.path.dirname(target)
|
||||
if not os.path.exists(fdir):
|
||||
os.makedirs(fdir)
|
||||
|
||||
dar = AspectRatio(info['video'][0]['display_aspect_ratio'])
|
||||
profile_cmd = []
|
||||
'''
|
||||
look into
|
||||
lag
|
||||
mb_static_threshold
|
||||
qmax/qmin
|
||||
rc_buf_aggressivity=0.95
|
||||
token_partitions=4
|
||||
level / speedlevel
|
||||
bt?
|
||||
|
||||
'''
|
||||
if profile == '720p':
|
||||
height = 720
|
||||
|
||||
audiorate = 48000
|
||||
audioquality = 5
|
||||
audiobitrate = None
|
||||
audiochannels = None
|
||||
if profile == '480p':
|
||||
height = 480
|
||||
|
||||
audiorate = 44100
|
||||
audioquality = 2
|
||||
audiobitrate = None
|
||||
audiochannels = 2
|
||||
elif profile == '360p':
|
||||
height = 360
|
||||
|
||||
audiorate = 44100
|
||||
audioquality = 1
|
||||
audiobitrate = None
|
||||
audiochannels = 1
|
||||
elif profile == '270p':
|
||||
height = 270
|
||||
|
||||
audiorate = 44100
|
||||
audioquality = 0
|
||||
audiobitrate = None
|
||||
audiochannels = 1
|
||||
else:
|
||||
height = 96
|
||||
|
||||
audiorate = 22050
|
||||
audioquality = -1
|
||||
audiobitrate = '22k'
|
||||
audiochannels = 1
|
||||
|
||||
bpp = 0.17
|
||||
fps = AspectRatio(info['video'][0]['framerate'])
|
||||
|
||||
width = int(dar * height)
|
||||
width += width % 2
|
||||
|
||||
bitrate = height*width*fps*bpp/1000
|
||||
aspect = dar.ratio
|
||||
#use 1:1 pixel aspect ratio if dar is close to that
|
||||
if abs(width/height - dar) < 0.02:
|
||||
aspect = '%s:%s' % (width, height)
|
||||
|
||||
if info['audio']:
|
||||
audio_settings = ['-ar', str(audiorate), '-aq', str(audioquality)]
|
||||
if audiochannels and 'channels' in info['audio'][0] and info['audio'][0]['channels'] > audiochannels:
|
||||
audio_settings += ['-ac', str(audiochannels)]
|
||||
if audiobitrate:
|
||||
audio_settings += ['-ab', audiobitrate]
|
||||
audio_settings +=['-acodec', 'libvorbis']
|
||||
else:
|
||||
audio_settings = ['-an']
|
||||
|
||||
video_settings = [
|
||||
'-vb', '%dk'%bitrate, '-g', '%d' % int(fps*2),
|
||||
'-s', '%dx%d'%(width, height),
|
||||
'-aspect', aspect,
|
||||
]
|
||||
cmd = ['ffmpeg', '-y', '-threads', '2', '-i', video] \
|
||||
+ audio_settings \
|
||||
+ video_settings \
|
||||
+ ['-f','webm', target]
|
||||
print cmd
|
||||
|
||||
#r = run_command(cmd, -1)
|
||||
p = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
line = p.stderr.readline()
|
||||
while line:
|
||||
if line.startswith('frame='):
|
||||
frames = line.split('=')[1].strip().split(' ')[0]
|
||||
line = p.stderr.readline()
|
||||
|
||||
p.wait()
|
||||
r = p.returncode
|
||||
print "done"
|
||||
return r == 0
|
||||
|
77
pandora_client/utils.py
Normal file
77
pandora_client/utils.py
Normal file
|
@ -0,0 +1,77 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
# vi:si:et:sw=4:sts=4:ts=4
|
||||
# GPL 2010
|
||||
from __future__ import division, with_statement
|
||||
|
||||
import fractions
|
||||
from glob import glob
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import sqlite3
|
||||
import subprocess
|
||||
import sys
|
||||
import shutil
|
||||
import tempfile
|
||||
import time
|
||||
|
||||
import ox
|
||||
|
||||
|
||||
class AspectRatio(fractions.Fraction):
|
||||
def __new__(cls, numerator, denominator=None):
|
||||
if not denominator:
|
||||
ratio = map(int, numerator.split(':'))
|
||||
if len(ratio) == 1: ratio.append(1)
|
||||
numerator = ratio[0]
|
||||
denominator = ratio[1]
|
||||
#if its close enough to the common aspect ratios rather use that
|
||||
if abs(numerator/denominator - 4/3) < 0.03:
|
||||
numerator = 4
|
||||
denominator = 3
|
||||
elif abs(numerator/denominator - 16/9) < 0.02:
|
||||
numerator = 16
|
||||
denominator = 9
|
||||
return super(AspectRatio, cls).__new__(cls, numerator, denominator)
|
||||
|
||||
@property
|
||||
def ratio(self):
|
||||
return "%d:%d" % (self.numerator, self.denominator)
|
||||
|
||||
def avinfo(filename):
|
||||
if os.path.getsize(filename):
|
||||
info = ox.avinfo(filename)
|
||||
if 'video' in info and info['video']:
|
||||
if not 'display_aspect_ratio' in info['video'][0]:
|
||||
dar = AspectRatio(info['video'][0]['width'], info['video'][0]['height'])
|
||||
info['video'][0]['display_aspect_ratio'] = dar.ratio
|
||||
del info['path']
|
||||
if os.path.splitext(filename)[-1] in ('.srt', '.sub', '.idx', '.rar') and 'error' in info:
|
||||
del info['error']
|
||||
if 'code' in info and info['code'] == 'badfile':
|
||||
del info['code']
|
||||
return info
|
||||
return {'path': filename, 'size': 0}
|
||||
|
||||
def hash_prefix(h):
|
||||
return [h[:2], h[2:4], h[4:6], h[6:]]
|
||||
|
||||
def run_command(cmd, timeout=25):
|
||||
#print cmd
|
||||
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
|
||||
while timeout > 0:
|
||||
time.sleep(0.2)
|
||||
timeout -= 0.2
|
||||
if p.poll() != None:
|
||||
return p.returncode
|
||||
if p.poll() == None:
|
||||
os.kill(p.pid, 9)
|
||||
killedpid, stat = os.waitpid(p.pid, os.WNOHANG)
|
||||
return p.returncode
|
||||
|
||||
def video_frame_positions(duration):
|
||||
pos = duration / 2
|
||||
#return [pos/4, pos/2, pos/2+pos/4, pos, pos+pos/2, pos+pos/2+pos/4]
|
||||
return map(int, [pos/2, pos, pos+pos/2])
|
||||
|
32
setup.py
Normal file
32
setup.py
Normal file
|
@ -0,0 +1,32 @@
|
|||
#!/usr/bin/env python
|
||||
# vi:si:et:sw=4:sts=4:ts=4
|
||||
# encoding: utf-8
|
||||
from distutils.core import setup
|
||||
|
||||
setup(
|
||||
name="pandora_client",
|
||||
version="1.0",
|
||||
description='''pandora_client - headless archive client for pan.do/ra
|
||||
|
||||
can be used instead of OxFF to keep archive and pan.do/ra instance in sync.
|
||||
''',
|
||||
author="j",
|
||||
author_email="j@mailb.org",
|
||||
url="http://code.0x2620.org/pandora_client",
|
||||
download_url="http://code.0x2620.org/pandora_client/download",
|
||||
license="GPLv3",
|
||||
scripts = [
|
||||
'bin/pandora_client',
|
||||
],
|
||||
packages=[
|
||||
'pandora_client'
|
||||
],
|
||||
keywords = [
|
||||
],
|
||||
classifiers = [
|
||||
'Operating System :: OS Independent',
|
||||
'Programming Language :: Python',
|
||||
'License :: OSI Approved :: GNU General Public License (GPL)',
|
||||
],
|
||||
)
|
||||
|
Loading…
Reference in a new issue