2011-10-13 17:28:18 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
# vi:si:et:sw=4:sts=4:ts=4
|
|
|
|
# GPL 2011
|
2018-09-18 20:37:13 +00:00
|
|
|
from __future__ import print_function
|
2018-03-09 11:47:14 +00:00
|
|
|
from types import MethodType
|
|
|
|
import gzip
|
2023-07-06 13:05:13 +00:00
|
|
|
import mimetypes
|
2018-03-09 11:47:14 +00:00
|
|
|
import os
|
|
|
|
import shutil
|
2018-08-05 19:45:49 +00:00
|
|
|
import sys
|
2018-08-07 13:07:45 +00:00
|
|
|
import time
|
2011-10-13 17:28:18 +00:00
|
|
|
|
2023-07-27 11:07:13 +00:00
|
|
|
from http import cookiejar as cookielib
|
|
|
|
from io import BytesIO
|
|
|
|
import urllib
|
|
|
|
from urllib.parse import urlparse
|
2023-07-06 13:05:13 +00:00
|
|
|
import requests
|
2011-10-13 17:28:18 +00:00
|
|
|
|
2013-11-03 15:39:57 +00:00
|
|
|
from . import __version__
|
2011-10-13 17:28:18 +00:00
|
|
|
from .utils import json
|
|
|
|
|
2013-11-03 15:39:57 +00:00
|
|
|
__all__ = ['getAPI', 'API']
|
2011-10-13 17:28:18 +00:00
|
|
|
|
2018-12-23 19:16:31 +00:00
|
|
|
|
2018-08-05 19:45:49 +00:00
|
|
|
CHUNK_SIZE = 1024*1024*5
|
|
|
|
|
2013-11-03 15:39:57 +00:00
|
|
|
def getAPI(url, cj=None):
|
2014-01-02 19:24:49 +00:00
|
|
|
return API(url, cj)
|
2011-10-13 17:28:18 +00:00
|
|
|
|
2013-11-03 15:39:57 +00:00
|
|
|
class API(object):
|
|
|
|
__version__ = __version__
|
|
|
|
__name__ = 'ox'
|
|
|
|
DEBUG = False
|
|
|
|
debuglevel = 0
|
2011-10-13 17:28:18 +00:00
|
|
|
|
2013-11-03 15:39:57 +00:00
|
|
|
def __init__(self, url, cj=None):
|
|
|
|
if cj:
|
|
|
|
self._cj = cj
|
|
|
|
else:
|
|
|
|
self._cj = cookielib.CookieJar()
|
2011-10-13 17:28:18 +00:00
|
|
|
|
2023-07-06 13:05:13 +00:00
|
|
|
self._requests_session = requests.Session()
|
|
|
|
self._requests_session.cookies = self._cj
|
|
|
|
self._requests_session.headers = {
|
|
|
|
'User-Agent': '%s/%s' % (self.__name__, self.__version__),
|
|
|
|
'Accept-Encoding': 'gzip, deflate',
|
|
|
|
}
|
2013-11-03 15:39:57 +00:00
|
|
|
self.url = url
|
|
|
|
r = self._request('api', {'docs': True})
|
|
|
|
self._properties = r['data']['actions']
|
|
|
|
self._actions = r['data']['actions'].keys()
|
|
|
|
for a in r['data']['actions']:
|
|
|
|
self._add_action(a)
|
2011-10-13 17:28:18 +00:00
|
|
|
|
2013-11-03 15:39:57 +00:00
|
|
|
def _add_method(self, method, name):
|
|
|
|
if name is None:
|
|
|
|
name = method.func_name
|
2023-07-27 11:07:13 +00:00
|
|
|
setattr(self, name, MethodType(method, self))
|
2011-10-13 17:28:18 +00:00
|
|
|
|
2013-11-03 15:39:57 +00:00
|
|
|
def _add_action(self, action):
|
|
|
|
def method(self, *args, **kw):
|
2015-08-02 13:57:48 +00:00
|
|
|
if args and kw:
|
|
|
|
raise ValueError('pass either a dictionary or kwargs, not both')
|
2013-11-03 15:39:57 +00:00
|
|
|
if not kw:
|
|
|
|
if args:
|
|
|
|
kw = args[0]
|
|
|
|
else:
|
|
|
|
kw = None
|
|
|
|
return self._request(action, kw)
|
2013-11-23 14:17:59 +00:00
|
|
|
if 'doc' in self._properties[action]:
|
|
|
|
method.__doc__ = self._properties[action]['doc']
|
2023-07-27 11:07:13 +00:00
|
|
|
method.func_name = action
|
2013-11-03 15:39:57 +00:00
|
|
|
self._add_method(method, action)
|
|
|
|
|
2023-07-06 13:05:13 +00:00
|
|
|
def _json_request(self, url, data, files=None):
|
2013-11-03 15:39:57 +00:00
|
|
|
result = {}
|
|
|
|
try:
|
2023-07-06 13:05:13 +00:00
|
|
|
request = self._requests_session.post(url, data=data, files=files)
|
|
|
|
result = request.json()
|
|
|
|
return result
|
2014-09-30 19:04:46 +00:00
|
|
|
except urllib.error.HTTPError as e:
|
2013-11-03 15:39:57 +00:00
|
|
|
if self.DEBUG:
|
|
|
|
import webbrowser
|
|
|
|
if e.code >= 500:
|
2021-06-18 11:23:10 +00:00
|
|
|
with open('/tmp/error.html', 'wb') as f:
|
2013-11-03 15:39:57 +00:00
|
|
|
f.write(e.read())
|
|
|
|
webbrowser.open_new_tab('/tmp/error.html')
|
|
|
|
|
|
|
|
result = e.read()
|
2011-10-13 17:28:18 +00:00
|
|
|
try:
|
2011-10-13 18:07:01 +00:00
|
|
|
result = result.decode('utf-8')
|
2013-11-03 15:39:57 +00:00
|
|
|
result = json.loads(result)
|
2011-10-13 17:28:18 +00:00
|
|
|
except:
|
2016-06-08 13:32:46 +00:00
|
|
|
result = {'status': {}}
|
2013-11-03 15:39:57 +00:00
|
|
|
result['status']['code'] = e.code
|
|
|
|
result['status']['text'] = str(e)
|
|
|
|
return result
|
|
|
|
except:
|
|
|
|
if self.DEBUG:
|
|
|
|
import webbrowser
|
|
|
|
import traceback
|
|
|
|
traceback.print_exc()
|
|
|
|
if result:
|
|
|
|
with open('/tmp/error.html', 'w') as f:
|
|
|
|
f.write(str(result))
|
|
|
|
webbrowser.open_new_tab('/tmp/error.html')
|
|
|
|
raise
|
2013-11-02 16:40:01 +00:00
|
|
|
|
2013-11-03 15:39:57 +00:00
|
|
|
def _request(self, action, data=None):
|
2023-07-06 13:05:13 +00:00
|
|
|
form = {
|
|
|
|
'action': action
|
|
|
|
}
|
2013-11-03 15:39:57 +00:00
|
|
|
if data:
|
2023-07-06 13:05:13 +00:00
|
|
|
form['data'] = json.dumps(data)
|
2013-11-03 15:39:57 +00:00
|
|
|
return self._json_request(self.url, form)
|
2011-10-13 17:28:18 +00:00
|
|
|
|
2019-03-15 11:45:08 +00:00
|
|
|
def get_url(self, url):
|
2023-07-06 13:05:13 +00:00
|
|
|
return self._requests_session.get(url).content
|
2019-03-15 11:45:08 +00:00
|
|
|
|
2018-03-09 11:47:14 +00:00
|
|
|
def save_url(self, url, filename, overwrite=False):
|
|
|
|
chunk_size = 16 * 1024
|
|
|
|
if not os.path.exists(filename) or overwrite:
|
|
|
|
dirname = os.path.dirname(filename)
|
|
|
|
if dirname and not os.path.exists(dirname):
|
|
|
|
os.makedirs(dirname)
|
|
|
|
tmpname = filename + '.tmp'
|
|
|
|
with open(tmpname, 'wb') as fd:
|
2023-07-06 13:05:13 +00:00
|
|
|
r = self._requests_session.get(url)
|
2024-05-22 06:21:01 +00:00
|
|
|
for chunk in r.iter_content(chunk_size=chunk_size):
|
2018-03-09 11:47:14 +00:00
|
|
|
fd.write(chunk)
|
|
|
|
shutil.move(tmpname, filename)
|
2018-05-09 17:58:56 +00:00
|
|
|
|
2021-02-18 23:10:59 +00:00
|
|
|
def upload_chunks(self, url, filename, data=None, silent=False):
|
2023-07-06 13:05:13 +00:00
|
|
|
data = self._json_request(url, data)
|
2018-08-05 19:45:49 +00:00
|
|
|
|
|
|
|
def full_url(path):
|
|
|
|
if path.startswith('/'):
|
|
|
|
u = urlparse(url)
|
|
|
|
path = '%s://%s%s' % (u.scheme, u.netloc, path)
|
|
|
|
return path
|
|
|
|
|
|
|
|
if 'uploadUrl' in data:
|
|
|
|
uploadUrl = full_url(data['uploadUrl'])
|
|
|
|
f = open(filename, 'rb')
|
|
|
|
fsize = os.stat(filename).st_size
|
|
|
|
done = 0
|
|
|
|
if 'offset' in data and data['offset'] < fsize:
|
|
|
|
done = data['offset']
|
|
|
|
f.seek(done)
|
|
|
|
resume_offset = done
|
|
|
|
else:
|
|
|
|
resume_offset = 0
|
|
|
|
chunk = f.read(CHUNK_SIZE)
|
|
|
|
fname = os.path.basename(filename)
|
2023-07-06 13:05:13 +00:00
|
|
|
mime_type = mimetypes.guess_type(fname)[0] or 'application/octet-stream'
|
2018-08-05 19:45:49 +00:00
|
|
|
if not isinstance(fname, bytes):
|
|
|
|
fname = fname.encode('utf-8')
|
|
|
|
while chunk:
|
2023-07-06 13:05:13 +00:00
|
|
|
meta = {
|
|
|
|
'offset': str(done)
|
|
|
|
}
|
2018-08-05 19:45:49 +00:00
|
|
|
if len(chunk) < CHUNK_SIZE or f.tell() == fsize:
|
2023-07-06 13:05:13 +00:00
|
|
|
meta['done'] = '1'
|
|
|
|
files = [
|
|
|
|
('chunk', (fname, chunk, mime_type))
|
|
|
|
]
|
2018-08-05 19:45:49 +00:00
|
|
|
try:
|
2023-07-06 13:05:13 +00:00
|
|
|
data = self._json_request(uploadUrl, meta, files=files)
|
2018-08-05 19:45:49 +00:00
|
|
|
except KeyboardInterrupt:
|
2024-03-20 23:08:40 +00:00
|
|
|
if not silent:
|
2021-02-18 23:10:59 +00:00
|
|
|
print("\ninterrupted by user.")
|
2018-08-05 19:45:49 +00:00
|
|
|
sys.exit(1)
|
|
|
|
except:
|
2024-03-20 23:08:40 +00:00
|
|
|
if not silent:
|
2021-02-18 23:10:59 +00:00
|
|
|
print("uploading chunk failed, will try again in 5 seconds\r", end='')
|
2018-08-05 19:45:49 +00:00
|
|
|
sys.stdout.flush()
|
|
|
|
data = {'result': -1}
|
|
|
|
time.sleep(5)
|
|
|
|
if data and 'status' in data:
|
|
|
|
if data['status']['code'] == 403:
|
2024-03-20 23:08:40 +00:00
|
|
|
if not silent:
|
2021-02-18 23:10:59 +00:00
|
|
|
print("login required")
|
2018-08-05 19:45:49 +00:00
|
|
|
return False
|
|
|
|
if data['status']['code'] != 200:
|
2024-03-20 23:08:40 +00:00
|
|
|
if not silent:
|
2021-02-18 23:10:59 +00:00
|
|
|
print("request returned error, will try again in 5 seconds")
|
2019-07-23 14:09:07 +00:00
|
|
|
if self.DEBUG:
|
2018-08-05 19:45:49 +00:00
|
|
|
print(data)
|
|
|
|
time.sleep(5)
|
|
|
|
if data and data.get('result') == 1:
|
|
|
|
done += len(chunk)
|
|
|
|
if data.get('offset') not in (None, done):
|
2024-03-20 23:08:40 +00:00
|
|
|
if not silent:
|
2021-02-18 23:10:59 +00:00
|
|
|
print('server offset out of sync, continue from', data['offset'])
|
2018-08-05 19:45:49 +00:00
|
|
|
done = data['offset']
|
|
|
|
f.seek(done)
|
|
|
|
chunk = f.read(CHUNK_SIZE)
|
|
|
|
if data and 'result' in data and data.get('result') == 1:
|
|
|
|
return data.get('id', True)
|
|
|
|
else:
|
|
|
|
return False
|
|
|
|
return False
|
2018-05-09 17:58:56 +00:00
|
|
|
|
2018-05-10 09:29:41 +00:00
|
|
|
def signin(url):
|
2018-05-09 18:03:15 +00:00
|
|
|
import sys
|
2018-05-09 17:58:56 +00:00
|
|
|
from getpass import getpass
|
|
|
|
from .web import auth
|
2018-05-09 18:03:15 +00:00
|
|
|
|
2018-05-10 09:29:41 +00:00
|
|
|
if not url.startswith('http'):
|
|
|
|
site = url
|
|
|
|
url = 'https://%s/api/' % url
|
|
|
|
else:
|
|
|
|
site = url.split('/')[2]
|
2019-04-30 16:44:33 +00:00
|
|
|
if not url.endswith('/'):
|
|
|
|
url += '/'
|
2018-05-10 09:29:41 +00:00
|
|
|
api = API(url)
|
2018-05-09 17:58:56 +00:00
|
|
|
update = False
|
|
|
|
try:
|
|
|
|
credentials = auth.get(site)
|
|
|
|
except:
|
|
|
|
credentials = {}
|
|
|
|
print('Please provide your username and password for %s:' % site)
|
|
|
|
credentials['username'] = input('Username: ')
|
|
|
|
credentials['password'] = getpass('Password: ')
|
|
|
|
update = True
|
|
|
|
r = api.signin(**credentials)
|
|
|
|
if 'errors' in r.get('data', {}):
|
|
|
|
for kv in r['data']['errors'].items():
|
|
|
|
print('%s: %s' % kv)
|
|
|
|
sys.exit(1)
|
|
|
|
if update:
|
|
|
|
auth.update(site, credentials)
|
|
|
|
return api
|
|
|
|
|