some python client
This commit is contained in:
commit
5f3b638a33
2 changed files with 25824 additions and 0 deletions
120
client.py
Normal file
120
client.py
Normal file
|
@ -0,0 +1,120 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# vi:si:et:sw=4:sts=4:ts=4
|
||||
# GPL 2009
|
||||
from __future__ import division
|
||||
import os
|
||||
import hashlib
|
||||
import sys
|
||||
import urllib
|
||||
import urllib2
|
||||
import cookielib
|
||||
import time
|
||||
import subprocess
|
||||
import tempfile
|
||||
import shutil
|
||||
|
||||
try:
|
||||
import json
|
||||
except:
|
||||
import simplejson as json
|
||||
|
||||
import oxlib
|
||||
from oxlib import MultiPartForm
|
||||
from firefogg import Firefogg
|
||||
|
||||
class Backend:
|
||||
url = 'http://127.0.0.1:8000/api/'
|
||||
files = {}
|
||||
def __init__(self, name):
|
||||
self.cj = cookielib.CookieJar()
|
||||
self.opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self.cj))
|
||||
urllib2.install_opener(self.opener)
|
||||
|
||||
self.name = name
|
||||
|
||||
def load_files(self, files_json='files.json'):
|
||||
with open(files_json) as f:
|
||||
self.files = json.loads(f.read())
|
||||
|
||||
def json_request(self, url, form):
|
||||
try:
|
||||
request = urllib2.Request(url)
|
||||
request.add_header('User-agent', 'oxclient')
|
||||
body = str(form)
|
||||
request.add_header('Content-type', form.get_content_type())
|
||||
request.add_header('Content-length', len(body))
|
||||
request.add_data(body)
|
||||
result = urllib2.urlopen(request).read().strip()
|
||||
return json.loads(result)
|
||||
except urllib2.HTTPError, e:
|
||||
if e.code >= 500:
|
||||
with open('/tmp/error.html', 'w') as f:
|
||||
f.write(e.read())
|
||||
os.system('firefox /tmp/error.html')
|
||||
result = e.read()
|
||||
try:
|
||||
result = json.loads(result)
|
||||
except:
|
||||
result = {'status':{}}
|
||||
result['status']['code'] = e.code
|
||||
result['status']['text'] = str(e)
|
||||
return result
|
||||
|
||||
def api(self, function, data, files={}):
|
||||
form = MultiPartForm()
|
||||
form.add_field('action', function)
|
||||
form.add_field('data', json.dumps(data))
|
||||
if files:
|
||||
for key in files:
|
||||
value = files[key]
|
||||
form.add_file(key, os.path.basename(value.name), value)
|
||||
return self.json_request(self.url, form)
|
||||
|
||||
def login(self, username, password):
|
||||
r = self.api('login', {'username': username, 'password': password})
|
||||
return r['status']['code'] == 200
|
||||
|
||||
def update(self):
|
||||
data = {
|
||||
'files': self.files,
|
||||
'archive': self.name,
|
||||
}
|
||||
result = self.api('update', data)
|
||||
print result
|
||||
|
||||
def upload(self, oshash, filename):
|
||||
url = "%supload/" % self.url
|
||||
ogg = Firefogg(cj=self.cj, debug=True)
|
||||
ogg.upload(url, filename, {"oshash": oshash})
|
||||
|
||||
def upload_frames(self, oshash, filename):
|
||||
info = oxlib.avinfo(filename)
|
||||
pos = info['duration'] / 2
|
||||
def get_frame(filename, output, pos):
|
||||
cmd = ['oxframe', '-i', filename, '-o', output, '-p', str(pos)]
|
||||
p = subprocess.Popen(cmd)
|
||||
p.wait()
|
||||
return p.returncode == 0
|
||||
tmp = tempfile.mkdtemp()
|
||||
for p in [pos/2, pos, pos+pos/2]:
|
||||
output = os.path.join(tmp, '%s.png' % oxlib.formatDuration(p*1000).replace(':', '.'))
|
||||
if get_frame(filename, output, p):
|
||||
print self.api('upload', {
|
||||
'item': 'frame',
|
||||
'oshash': oshash,
|
||||
'position': p},
|
||||
files={'file': open(output)})
|
||||
shutil.rmtree(tmp)
|
||||
|
||||
if __name__ == "__main__":
|
||||
c = Backend('TextArchive')
|
||||
#c.url = 'http://oxdev.local/api/'
|
||||
c.login('test', 'test2')
|
||||
c.api('addArchive', {'name': 'TextArchive'})
|
||||
c.load_files()
|
||||
c.update()
|
||||
|
||||
c.upload('6000aeaa8c90c092', '/home/j/Downloads/Elgincinema_03.ogv')
|
||||
c.upload_frames('6000aeaa8c90c092', '/home/j/Downloads/Elgincinema_03.ogv')
|
||||
|
||||
|
25704
files.json
Normal file
25704
files.json
Normal file
File diff suppressed because it is too large
Load diff
Loading…
Reference in a new issue