import/lists/autocompleteFolder
This commit is contained in:
parent
94443ee667
commit
d6f350e5a1
42 changed files with 955 additions and 436 deletions
75
oml/api.py
75
oml/api.py
|
|
@ -2,5 +2,80 @@
|
|||
# vi:si:et:sw=4:sts=4:ts=4
|
||||
from __future__ import division
|
||||
|
||||
import subprocess
|
||||
import json
|
||||
import os
|
||||
|
||||
import ox
|
||||
from oxflask.api import actions
|
||||
from oxflask.shortcuts import returns_json
|
||||
|
||||
import item.api
|
||||
import user.api
|
||||
|
||||
@returns_json
|
||||
def selectFolder(request):
|
||||
'''
|
||||
returns {
|
||||
path
|
||||
}
|
||||
'''
|
||||
data = json.loads(request.form['data']) if 'data' in request.form else {}
|
||||
cmd = ['./ctl', 'ui', 'folder']
|
||||
p = subprocess.Popen(cmd, stdout=subprocess.PIPE)
|
||||
stdout, stderr = p.communicate()
|
||||
path = stdout.decode('utf-8').strip()
|
||||
return {
|
||||
'path': path
|
||||
}
|
||||
actions.register(selectFolder, cache=False)
|
||||
|
||||
@returns_json
|
||||
def selectFile(request):
|
||||
'''
|
||||
returns {
|
||||
path
|
||||
}
|
||||
'''
|
||||
data = json.loads(request.form['data']) if 'data' in request.form else {}
|
||||
cmd = ['./ctl', 'ui', 'file']
|
||||
p = subprocess.Popen(cmd, stdout=subprocess.PIPE)
|
||||
stdout, stderr = p.communicate()
|
||||
path = stdout.decode('utf-8').strip()
|
||||
return {
|
||||
'path': path
|
||||
}
|
||||
actions.register(selectFile, cache=False)
|
||||
|
||||
|
||||
@returns_json
|
||||
def autocompleteFolder(request):
|
||||
'''
|
||||
takes {
|
||||
path
|
||||
}
|
||||
returns {
|
||||
items
|
||||
}
|
||||
'''
|
||||
data = json.loads(request.form['data']) if 'data' in request.form else {}
|
||||
path = data['path']
|
||||
path = os.path.expanduser(path)
|
||||
if os.path.isdir(path):
|
||||
if path.endswith('/') and path != '/':
|
||||
path = path[:-1]
|
||||
folder = path
|
||||
name = ''
|
||||
else:
|
||||
folder, name = os.path.split(path)
|
||||
if os.path.exists(folder):
|
||||
prefix, folders, files = os.walk(folder).next()
|
||||
folders = [os.path.join(prefix, f) for f in folders if not name or f.startswith(name)]
|
||||
if prefix == path:
|
||||
folders = [path] + folders
|
||||
else:
|
||||
folders = []
|
||||
return {
|
||||
'items': ox.sorted_strings(folders)
|
||||
}
|
||||
actions.register(autocompleteFolder, cache=False)
|
||||
|
|
|
|||
|
|
@ -18,8 +18,7 @@ import item.models
|
|||
import user.models
|
||||
import item.person
|
||||
|
||||
import item.api
|
||||
import user.api
|
||||
import api
|
||||
|
||||
import item.views
|
||||
import commands
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ class Downloads(Thread):
|
|||
import item.models
|
||||
for i in item.models.Item.query.filter(
|
||||
item.models.Item.transferadded!=None).filter(
|
||||
item.models.Item.transferprogress<1):
|
||||
item.models.Item.transferprogress<1).order_by(item.models.Item.transferadded):
|
||||
logger.debug('DOWNLOAD %s %s', i, i.users)
|
||||
for p in i.users:
|
||||
if state.nodes.check_online(p.id):
|
||||
|
|
|
|||
115
oml/item/api.py
115
oml/item/api.py
|
|
@ -2,11 +2,12 @@
|
|||
# vi:si:et:sw=4:sts=4:ts=4
|
||||
from __future__ import division
|
||||
|
||||
import logging
|
||||
|
||||
import os
|
||||
import json
|
||||
|
||||
from oxflask.api import actions
|
||||
from oxflask.shortcuts import returns_json
|
||||
from sqlalchemy.orm import load_only
|
||||
|
||||
import query
|
||||
|
||||
|
|
@ -18,12 +19,22 @@ import meta
|
|||
|
||||
import utils
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger('oml.item.api')
|
||||
|
||||
@returns_json
|
||||
def find(request):
|
||||
'''
|
||||
find items
|
||||
takes {
|
||||
query {
|
||||
conditions [{}]
|
||||
operator string
|
||||
}
|
||||
group string
|
||||
keys [string]
|
||||
sort [{}]
|
||||
range [int, int]
|
||||
}
|
||||
'''
|
||||
response = {}
|
||||
data = json.loads(request.form['data']) if 'data' in request.form else {}
|
||||
|
|
@ -31,7 +42,7 @@ def find(request):
|
|||
if 'group' in q:
|
||||
names = {}
|
||||
groups = {}
|
||||
items = [i.id for i in q['qs']]
|
||||
items = [i.id for i in q['qs'].options(load_only('id'))]
|
||||
qs = models.Find.query.filter_by(key=q['group'])
|
||||
if items:
|
||||
qs = qs.filter(models.Find.item_id.in_(items))
|
||||
|
|
@ -58,16 +69,12 @@ def find(request):
|
|||
else:
|
||||
response['items'] = len(g)
|
||||
elif 'position' in data:
|
||||
ids = [i.id for i in q['qs']]
|
||||
ids = [i.id for i in q['qs'].options(load_only('id'))]
|
||||
response['position'] = utils.get_positions(ids, [data['qs'][0].id])[0]
|
||||
elif 'positions' in data:
|
||||
ids = [i.id for i in q['qs']]
|
||||
ids = [i.id for i in q['qs'].options(load_only('id'))]
|
||||
response['positions'] = utils.get_positions(ids, data['positions'])
|
||||
elif 'keys' in data:
|
||||
'''
|
||||
qs = qs[q['range'][0]:q['range'][1]]
|
||||
response['items'] = [p.json(data['keys']) for p in qs]
|
||||
'''
|
||||
response['items'] = []
|
||||
for i in q['qs'][q['range'][0]:q['range'][1]]:
|
||||
j = i.json()
|
||||
|
|
@ -77,12 +84,18 @@ def find(request):
|
|||
#from sqlalchemy.sql import func
|
||||
#models.db.session.query(func.sum(models.Item.sort_size).label("size"))
|
||||
#response['size'] = x.scalar()
|
||||
response['size'] = sum([i.sort_size or 0 for i in q['qs']])
|
||||
response['size'] = sum([i.sort_size or 0 for i in q['qs'].options(load_only('id', 'sort_size'))])
|
||||
return response
|
||||
actions.register(find)
|
||||
|
||||
@returns_json
|
||||
def get(request):
|
||||
'''
|
||||
takes {
|
||||
id
|
||||
keys
|
||||
}
|
||||
'''
|
||||
response = {}
|
||||
data = json.loads(request.form['data']) if 'data' in request.form else {}
|
||||
item = models.Item.get(data['id'])
|
||||
|
|
@ -93,29 +106,48 @@ actions.register(get)
|
|||
|
||||
@returns_json
|
||||
def edit(request):
|
||||
'''
|
||||
takes {
|
||||
id
|
||||
...
|
||||
}
|
||||
setting identifier or base metadata is possible not both at the same time
|
||||
'''
|
||||
response = {}
|
||||
data = json.loads(request.form['data']) if 'data' in request.form else {}
|
||||
logger.debug('edit', data)
|
||||
logger.debug('edit %s', data)
|
||||
item = models.Item.get(data['id'])
|
||||
keys = filter(lambda k: k in models.Item.id_keys, data.keys())
|
||||
logger.debug(item, keys)
|
||||
if item and keys and item.json()['mediastate'] == 'available':
|
||||
key = keys[0]
|
||||
logger.debug('update mainid %s %s', key, data[key])
|
||||
if key in ('isbn10', 'isbn13'):
|
||||
data[key] = utils.normalize_isbn(data[key])
|
||||
item.update_mainid(key, data[key])
|
||||
response = item.json()
|
||||
logger.debug('edit of %s id keys: %s', item, keys)
|
||||
if item and item.json()['mediastate'] == 'available':
|
||||
if keys:
|
||||
key = keys[0]
|
||||
logger.debug('update mainid %s %s', key, data[key])
|
||||
if key in ('isbn10', 'isbn13'):
|
||||
data[key] = utils.normalize_isbn(data[key])
|
||||
item.update_mainid(key, data[key])
|
||||
response = item.json()
|
||||
elif not item.meta.get('mainid'):
|
||||
logger.debug('chustom data %s', data)
|
||||
for key in ('title', 'author', 'date', 'publisher', 'edition'):
|
||||
if key in data:
|
||||
item.meta[key] = data[key]
|
||||
item.update()
|
||||
logger.debug('FIXME: custom metadata not published to changelog!!!')
|
||||
else:
|
||||
logger.info('can only edit available items')
|
||||
response = item.json()
|
||||
return response
|
||||
actions.register(edit, cache=False)
|
||||
|
||||
@returns_json
|
||||
def remove(request):
|
||||
'''
|
||||
takes {
|
||||
id
|
||||
}
|
||||
'''
|
||||
data = json.loads(request.form['data']) if 'data' in request.form else {}
|
||||
logger.debug('remove files', data)
|
||||
logger.debug('remove files %s', data)
|
||||
if 'ids' in data and data['ids']:
|
||||
for i in models.Item.query.filter(models.Item.id.in_(data['ids'])):
|
||||
i.remove_file()
|
||||
|
|
@ -132,10 +164,11 @@ def findMetadata(request):
|
|||
date: string
|
||||
}
|
||||
returns {
|
||||
title: string,
|
||||
autor: [string],
|
||||
date: string,
|
||||
items: [{
|
||||
key: value
|
||||
}]
|
||||
}
|
||||
key is one of the supported identifiers: isbn10, isbn13...
|
||||
'''
|
||||
response = {}
|
||||
data = json.loads(request.form['data']) if 'data' in request.form else {}
|
||||
|
|
@ -146,18 +179,30 @@ actions.register(findMetadata)
|
|||
|
||||
@returns_json
|
||||
def getMetadata(request):
|
||||
'''
|
||||
takes {
|
||||
key: value
|
||||
}
|
||||
key can be one of the supported identifiers: isbn10, isbn13, oclc, olid,...
|
||||
'''
|
||||
data = json.loads(request.form['data']) if 'data' in request.form else {}
|
||||
logger.debug('getMetadata %s', data)
|
||||
key, value = data.iteritems().next()
|
||||
if key in ('isbn10', 'isbn13'):
|
||||
value = utils.normalize_isbn(value)
|
||||
response = meta.lookup(key, value)
|
||||
response['mainid'] = key
|
||||
if response:
|
||||
response['mainid'] = key
|
||||
return response
|
||||
actions.register(getMetadata)
|
||||
|
||||
@returns_json
|
||||
def download(request):
|
||||
'''
|
||||
takes {
|
||||
id
|
||||
}
|
||||
'''
|
||||
response = {}
|
||||
data = json.loads(request.form['data']) if 'data' in request.form else {}
|
||||
item = models.Item.get(data['id'])
|
||||
|
|
@ -170,6 +215,11 @@ actions.register(download, cache=False)
|
|||
|
||||
@returns_json
|
||||
def cancelDownloads(request):
|
||||
'''
|
||||
takes {
|
||||
ids
|
||||
}
|
||||
'''
|
||||
response = {}
|
||||
data = json.loads(request.form['data']) if 'data' in request.form else {}
|
||||
ids = data['ids']
|
||||
|
|
@ -195,8 +245,21 @@ actions.register(scan, cache=False)
|
|||
|
||||
@returns_json
|
||||
def _import(request):
|
||||
'''
|
||||
takes {
|
||||
path absolute path to import
|
||||
list listename (add new items to this list)
|
||||
mode copy|move
|
||||
}
|
||||
'''
|
||||
data = json.loads(request.form['data']) if 'data' in request.form else {}
|
||||
logger.debug('api.import %s', data)
|
||||
state.main.add_callback(state.websockets[0].put, json.dumps(['import', data]))
|
||||
return {}
|
||||
actions.register(_import, 'import', cache=False)
|
||||
|
||||
@returns_json
|
||||
def cancelImport(request):
|
||||
state.activity['cancel'] = True
|
||||
return {}
|
||||
actions.register(cancelImport, cache=False)
|
||||
|
|
|
|||
|
|
@ -33,6 +33,7 @@ from oxflask.db import MutableDict
|
|||
from covers import covers
|
||||
from changelog import Changelog
|
||||
from websocket import trigger_event
|
||||
from utils import remove_empty_folders
|
||||
|
||||
logger = logging.getLogger('oml.item.model')
|
||||
|
||||
|
|
@ -296,8 +297,8 @@ class Item(db.Model):
|
|||
|
||||
def extract_cover(self):
|
||||
path = self.get_path()
|
||||
if not path:
|
||||
return getattr(media, self.meta['extensions']).cover(path)
|
||||
if path:
|
||||
return getattr(media, self.info['extension']).cover(path)
|
||||
|
||||
def update_cover(self):
|
||||
cover = None
|
||||
|
|
@ -327,8 +328,6 @@ class Item(db.Model):
|
|||
if mainid:
|
||||
m = meta.lookup(mainid, self.meta[mainid])
|
||||
self.meta.update(m)
|
||||
else:
|
||||
logger.debug('FIX UPDATE %s', mainid)
|
||||
self.update()
|
||||
|
||||
def queue_download(self):
|
||||
|
|
@ -360,6 +359,7 @@ class Item(db.Model):
|
|||
Changelog.record(u, 'additem', self.id, self.info)
|
||||
self.update()
|
||||
f.move()
|
||||
self.update_cover()
|
||||
trigger_event('transfer', {
|
||||
'id': self.id, 'progress': 1
|
||||
})
|
||||
|
|
@ -376,6 +376,7 @@ class Item(db.Model):
|
|||
logger.debug('remove file %s', path)
|
||||
if os.path.exists(path):
|
||||
os.unlink(path)
|
||||
remove_empty_folders(os.path.dirname(path))
|
||||
db.session.delete(f)
|
||||
user = state.user()
|
||||
self.users.remove(user)
|
||||
|
|
@ -399,7 +400,7 @@ for key in config['itemKeys']:
|
|||
col = db.Column(db.String(1000), index=True)
|
||||
setattr(Item, 'sort_%s' % key['id'], col)
|
||||
|
||||
Item.id_keys = ['isbn10', 'isbn13', 'lccn', 'olid', 'oclc']
|
||||
Item.id_keys = ['isbn10', 'isbn13', 'lccn', 'olid', 'oclc', 'asin']
|
||||
Item.item_keys = config['itemKeys']
|
||||
Item.filter_keys = [k['id'] for k in config['itemKeys'] if k.get('filter')]
|
||||
|
||||
|
|
|
|||
112
oml/item/scan.py
112
oml/item/scan.py
|
|
@ -19,6 +19,10 @@ from changelog import Changelog
|
|||
import media
|
||||
from websocket import trigger_event
|
||||
import state
|
||||
from utils import remove_empty_folders
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger('oml.item.scan')
|
||||
|
||||
extensions = ['epub', 'pdf', 'txt']
|
||||
|
||||
|
|
@ -35,6 +39,29 @@ def remove_missing():
|
|||
if dirty:
|
||||
db.session.commit()
|
||||
|
||||
def add_file(id, f, prefix):
|
||||
user = state.user()
|
||||
path = f[len(prefix):]
|
||||
data = media.metadata(f)
|
||||
file = File.get_or_create(id, data, path)
|
||||
item = file.item
|
||||
if 'mainid' in file.info:
|
||||
del file.info['mainid']
|
||||
db.session.add(file)
|
||||
if 'mainid' in item.info:
|
||||
item.meta['mainid'] = item.info.pop('mainid')
|
||||
item.meta[item.meta['mainid']] = item.info[item.meta['mainid']]
|
||||
db.session.add(item)
|
||||
item.users.append(user)
|
||||
Changelog.record(user, 'additem', item.id, item.info)
|
||||
if item.meta.get('mainid'):
|
||||
Changelog.record(user, 'edititem', item.id, {
|
||||
item.meta['mainid']: item.meta[item.meta['mainid']]
|
||||
})
|
||||
item.added = datetime.now()
|
||||
item.scrape()
|
||||
return file
|
||||
|
||||
def run_scan():
|
||||
remove_missing()
|
||||
with app.app_context():
|
||||
|
|
@ -42,7 +69,6 @@ def run_scan():
|
|||
prefix = os.path.join(os.path.expanduser(prefs['libraryPath']), 'Books/')
|
||||
if not prefix[-1] == '/':
|
||||
prefix += '/'
|
||||
user = User.get_or_create(settings.USER_ID)
|
||||
assert isinstance(prefix, unicode)
|
||||
books = []
|
||||
for root, folders, files in os.walk(prefix):
|
||||
|
|
@ -61,29 +87,8 @@ def run_scan():
|
|||
position += 1
|
||||
id = media.get_id(f)
|
||||
file = File.get(id)
|
||||
path = f[len(prefix):]
|
||||
if not file:
|
||||
data = media.metadata(f)
|
||||
ext = f.split('.')[-1]
|
||||
data['extension'] = ext
|
||||
data['size'] = os.stat(f).st_size
|
||||
file = File.get_or_create(id, data, path)
|
||||
item = file.item
|
||||
if 'mainid' in file.info:
|
||||
del file.info['mainid']
|
||||
db.session.add(file)
|
||||
if 'mainid' in item.info:
|
||||
item.meta['mainid'] = item.info.pop('mainid')
|
||||
item.meta[item.meta['mainid']] = item.info[item.meta['mainid']]
|
||||
db.session.add(item)
|
||||
item.users.append(user)
|
||||
Changelog.record(user, 'additem', item.id, item.info)
|
||||
if item.meta.get('mainid'):
|
||||
Changelog.record(user, 'edititem', item.id, {
|
||||
item.meta['mainid']: item.meta[item.meta['mainid']]
|
||||
})
|
||||
item.added = datetime.now()
|
||||
item.scrape()
|
||||
file = add_file(id, f, prefix)
|
||||
added += 1
|
||||
trigger_event('change', {})
|
||||
|
||||
|
|
@ -93,18 +98,28 @@ def run_import(options=None):
|
|||
with app.app_context():
|
||||
prefs = settings.preferences
|
||||
prefix = os.path.expanduser(options.get('path', prefs['importPath']))
|
||||
if os.path.islink(prefix):
|
||||
prefix = os.path.realpath(prefix)
|
||||
if not prefix[-1] == '/':
|
||||
prefix += '/'
|
||||
prefix_books = os.path.join(os.path.expanduser(prefs['libraryPath']), 'Books/')
|
||||
prefix_imported = os.path.join(prefix_books, 'Imported/')
|
||||
if not os.path.exists(prefix):
|
||||
if prefix_books.startswith(prefix) or prefix.startswith(prefix_books):
|
||||
error = 'invalid path'
|
||||
elif not os.path.exists(prefix):
|
||||
error = 'path not found'
|
||||
elif not os.path.isdir(prefix):
|
||||
error = 'path must be a folder'
|
||||
else:
|
||||
error = None
|
||||
if error:
|
||||
trigger_event('activity', {
|
||||
'activity': 'import',
|
||||
'progress': [0, 0],
|
||||
'status': {'code': 404, 'text': 'path not found'}
|
||||
'status': {'code': 404, 'text': error}
|
||||
})
|
||||
state.activity = {}
|
||||
user = User.get_or_create(settings.USER_ID)
|
||||
return
|
||||
listname = options.get('list')
|
||||
if listname:
|
||||
listitems = []
|
||||
|
|
@ -122,6 +137,7 @@ def run_import(options=None):
|
|||
|
||||
state.activity = {
|
||||
'activity': 'import',
|
||||
'path': prefix,
|
||||
'progress': [0, len(books)],
|
||||
}
|
||||
trigger_event('activity', state.activity)
|
||||
|
|
@ -133,7 +149,6 @@ def run_import(options=None):
|
|||
continue
|
||||
id = media.get_id(f)
|
||||
file = File.get(id)
|
||||
path = f[len(prefix):]
|
||||
if not file:
|
||||
f_import = f
|
||||
f = f.replace(prefix, prefix_imported)
|
||||
|
|
@ -142,45 +157,38 @@ def run_import(options=None):
|
|||
shutil.move(f_import, f)
|
||||
else:
|
||||
shutil.copy(f_import, f)
|
||||
path = f[len(prefix_books):]
|
||||
data = media.metadata(f)
|
||||
ext = f.split('.')[-1]
|
||||
data['extension'] = ext
|
||||
data['size'] = os.stat(f).st_size
|
||||
file = File.get_or_create(id, data, path)
|
||||
item = file.item
|
||||
if 'mainid' in file.info:
|
||||
del file.info['mainid']
|
||||
db.session.add(file)
|
||||
if 'mainid' in item.info:
|
||||
item.meta['mainid'] = item.info.pop('mainid')
|
||||
item.meta[item.meta['mainid']] = item.info[item.meta['mainid']]
|
||||
db.session.add(item)
|
||||
item.users.append(user)
|
||||
Changelog.record(user, 'additem', item.id, item.info)
|
||||
if item.meta.get('mainid'):
|
||||
Changelog.record(user, 'edititem', item.id, {
|
||||
item.meta['mainid']: item.meta[item.meta['mainid']]
|
||||
})
|
||||
item.scrape()
|
||||
file = add_file(id, f, prefix_books)
|
||||
file.move()
|
||||
item = file.item
|
||||
if listname:
|
||||
listitems.append(item.id)
|
||||
added += 1
|
||||
if state.activity.get('cancel'):
|
||||
state.activity = {}
|
||||
trigger_event('activity', {
|
||||
'activity': 'import',
|
||||
'status': {'code': 200, 'text': 'canceled'}
|
||||
})
|
||||
return
|
||||
state.activity = {
|
||||
'activity': 'import',
|
||||
'progress': [position, len(books)],
|
||||
'path': path,
|
||||
'path': prefix,
|
||||
'added': added,
|
||||
}
|
||||
trigger_event('activity', state.activity)
|
||||
if listname:
|
||||
l = List.get_or_create(settings.USER_ID, listname)
|
||||
l.add_items(listitems)
|
||||
if listname and listitems:
|
||||
l = List.get(settings.USER_ID, listname)
|
||||
if l:
|
||||
l.add_items(listitems)
|
||||
trigger_event('activity', {
|
||||
'activity': 'import',
|
||||
'progress': [position, len(books)],
|
||||
'path': prefix,
|
||||
'status': {'code': 200, 'text': ''},
|
||||
'added': added,
|
||||
})
|
||||
state.activity = {}
|
||||
remove_empty_folders(prefix_books)
|
||||
if options.get('mode') == 'move':
|
||||
remove_empty_folders(prefix)
|
||||
|
|
|
|||
|
|
@ -22,6 +22,8 @@ def get_id(f=None, data=None):
|
|||
def metadata(f):
|
||||
ext = f.split('.')[-1]
|
||||
data = {}
|
||||
data['extension'] = ext
|
||||
data['size'] = os.stat(f).st_size
|
||||
if ext == 'pdf':
|
||||
info = pdf.info(f)
|
||||
elif ext == 'epub':
|
||||
|
|
|
|||
|
|
@ -71,9 +71,11 @@ def info(pdf):
|
|||
with open(pdf, 'rb') as fd:
|
||||
try:
|
||||
pdfreader = PdfFileReader(fd)
|
||||
data['pages'] = pdfreader.numPages
|
||||
info = pdfreader.getDocumentInfo()
|
||||
if info:
|
||||
for key in info:
|
||||
print key, info
|
||||
if info[key]:
|
||||
data[key[1:].lower()] = info[key]
|
||||
xmp =pdfreader.getXmpMetadata()
|
||||
|
|
|
|||
|
|
@ -2,8 +2,7 @@
|
|||
# vi:si:et:sw=4:sts=4:ts=4
|
||||
from __future__ import division
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger('meta')
|
||||
import stdnum.isbn
|
||||
|
||||
import abebooks
|
||||
import loc
|
||||
|
|
@ -13,6 +12,10 @@ import worldcat
|
|||
import google
|
||||
import duckduckgo
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger('meta')
|
||||
|
||||
|
||||
providers = [
|
||||
('openlibrary', 'olid'),
|
||||
('loc', 'lccn'),
|
||||
|
|
@ -32,6 +35,8 @@ def find(title, author=None, publisher=None, date=None):
|
|||
return results
|
||||
|
||||
def lookup(key, value):
|
||||
if not isvalid_id(key, value):
|
||||
return {}
|
||||
data = {key: value}
|
||||
ids = [(key, value)]
|
||||
provider_data = {}
|
||||
|
|
@ -59,4 +64,13 @@ def lookup(key, value):
|
|||
data[k_] = v_
|
||||
return data
|
||||
|
||||
def isvalid_id(key, value):
|
||||
if key in ('isbn10', 'isbn13'):
|
||||
if 'isbn%d'%len(value) != key or not stdnum.isbn.is_valid(value):
|
||||
return False
|
||||
if key == 'asin' and len(value) != 10:
|
||||
return False
|
||||
if key == 'olid' and not (value.startswith('OL') and value.endswith('M')):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
|
|
|||
|
|
@ -9,10 +9,11 @@ import lxml.html
|
|||
import logging
|
||||
logger = logging.getLogger('meta.abebooks')
|
||||
|
||||
base = 'http://www.abebooks.com'
|
||||
|
||||
def get_ids(key, value):
|
||||
ids = []
|
||||
if key in ('isbn10', 'isbn13'):
|
||||
base = 'http://www.abebooks.com'
|
||||
url = '%s/servlet/SearchResults?isbn=%s&sts=t' % (base, id)
|
||||
data = read_url(url)
|
||||
urls = re.compile('href="(/servlet/BookDetailsPL[^"]+)"').findall(data)
|
||||
|
|
@ -24,21 +25,20 @@ def get_ids(key, value):
|
|||
|
||||
def lookup(id):
|
||||
logger.debug('lookup %s', id)
|
||||
return {}
|
||||
|
||||
def get_data(id):
|
||||
info = {}
|
||||
base = 'http://www.abebooks.com'
|
||||
data = {}
|
||||
url = '%s/servlet/SearchResults?isbn=%s&sts=t' % (base, id)
|
||||
data = read_url(url)
|
||||
urls = re.compile('href="(/servlet/BookDetailsPL[^"]+)"').findall(data)
|
||||
html = read_url(url)
|
||||
urls = re.compile('href="(/servlet/BookDetailsPL[^"]+)"').findall(html)
|
||||
keys = {
|
||||
'pubdate': 'date'
|
||||
}
|
||||
if urls:
|
||||
details = '%s%s' % (base, urls[0])
|
||||
data = read_url(details)
|
||||
doc = lxml.html.document_fromstring(data)
|
||||
html = read_url(details)
|
||||
doc = lxml.html.document_fromstring(html)
|
||||
for e in doc.xpath("//*[contains(@id, 'biblio')]"):
|
||||
key = e.attrib['id'].replace('biblio-', '')
|
||||
value = e.text_content()
|
||||
if value and key not in ('bookcondition', 'binding'):
|
||||
info[key] = value
|
||||
return info
|
||||
if value and key not in ('bookcondition', 'binding', 'edition-amz'):
|
||||
data[keys.get(key, key)] = value
|
||||
return data
|
||||
|
|
|
|||
|
|
@ -37,6 +37,6 @@ def find(title, author=None, publisher=None, date=None):
|
|||
done.add(isbn)
|
||||
if len(isbn) == 10:
|
||||
done.add(stdnum.isbn.to_isbn13(isbn))
|
||||
if len(isbn) == 13:
|
||||
if len(isbn) == 13 and isbn.startswith('978'):
|
||||
done.add(stdnum.isbn.to_isbn10(isbn))
|
||||
return results
|
||||
|
|
|
|||
|
|
@ -45,9 +45,9 @@ def lookup(id):
|
|||
}
|
||||
for key in keys:
|
||||
r[key] = find_re(data, '<span class="title">%s:</span>(.*?)</li>'% re.escape(keys[key]))
|
||||
if r[key] == '--':
|
||||
r[key] = ''
|
||||
if key == 'pages' and r[key]:
|
||||
if r[key] == '--' or not r[key]:
|
||||
del r[key]
|
||||
if key == 'pages' and key in r:
|
||||
r[key] = int(r[key])
|
||||
desc = find_re(data, '<h2>Description:<\/h2>(.*?)<div ')
|
||||
desc = desc.replace('<br /><br />', ' ').replace('<br /> ', ' ').replace('<br />', ' ')
|
||||
|
|
|
|||
|
|
@ -62,6 +62,8 @@ def api_requestPeering(app, user_id, username, message):
|
|||
def api_acceptPeering(app, user_id, username, message):
|
||||
user = User.get(user_id)
|
||||
logger.debug('incoming acceptPeering event: pending: %s', user.pending)
|
||||
if user and user.peered:
|
||||
return True
|
||||
if user and user.pending == 'sent':
|
||||
if not user.info:
|
||||
user.info = {}
|
||||
|
|
|
|||
|
|
@ -120,7 +120,7 @@ def publish_node(app):
|
|||
for u in user.models.User.query.filter_by(queued=True):
|
||||
logger.debug('adding queued node... %s', u.id)
|
||||
state.nodes.queue('add', u.id)
|
||||
state.check_nodes = PeriodicCallback(lambda: check_nodes(app), 60000)
|
||||
state.check_nodes = PeriodicCallback(lambda: check_nodes(app), 120000)
|
||||
state.check_nodes.start()
|
||||
|
||||
def check_nodes(app):
|
||||
|
|
@ -135,7 +135,7 @@ def start(app):
|
|||
application = Application([
|
||||
(r"/get/(.*)", ShareHandler, dict(app=app)),
|
||||
(r".*", NodeHandler, dict(app=app)),
|
||||
])
|
||||
], gzip=True)
|
||||
if not os.path.exists(settings.ssl_cert_path):
|
||||
settings.server['cert'] = cert.generate_ssl()
|
||||
|
||||
|
|
|
|||
40
oml/nodes.py
40
oml/nodes.py
|
|
@ -6,13 +6,15 @@ from Queue import Queue
|
|||
from threading import Thread
|
||||
import json
|
||||
import socket
|
||||
|
||||
from StringIO import StringIO
|
||||
import gzip
|
||||
import urllib2
|
||||
from datetime import datetime
|
||||
import os
|
||||
|
||||
import ox
|
||||
import ed25519
|
||||
import urllib2
|
||||
from tornado.ioloop import PeriodicCallback
|
||||
|
||||
import settings
|
||||
import user.models
|
||||
|
|
@ -42,6 +44,8 @@ class Node(object):
|
|||
self.vk = ed25519.VerifyingKey(key, encoding=ENCODING)
|
||||
self.go_online()
|
||||
logger.debug('new Node %s online=%s', self.user_id, self.online)
|
||||
self._ping = PeriodicCallback(self.ping, 120000)
|
||||
self._ping.start()
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
|
|
@ -120,6 +124,8 @@ class Node(object):
|
|||
self.online = False
|
||||
return None
|
||||
data = r.read()
|
||||
if r.headers.get('content-encoding', None) == 'gzip':
|
||||
data = gzip.GzipFile(fileobj=StringIO(data)).read()
|
||||
sig = r.headers.get('X-Ed25519-Signature')
|
||||
if sig and self._valid(data, sig):
|
||||
response = json.loads(data)
|
||||
|
|
@ -151,6 +157,13 @@ class Node(object):
|
|||
pass
|
||||
return False
|
||||
|
||||
def ping(self):
|
||||
with self._app.app_context():
|
||||
if self.online:
|
||||
self.online = self.can_connect()
|
||||
else:
|
||||
self.go_online()
|
||||
|
||||
def go_online(self):
|
||||
self.resolve()
|
||||
u = self.user
|
||||
|
|
@ -179,7 +192,7 @@ class Node(object):
|
|||
self.online = False
|
||||
trigger_event('status', {
|
||||
'id': self.user_id,
|
||||
'status': 'online' if self.online else 'offline'
|
||||
'online': self.online
|
||||
})
|
||||
|
||||
def pullChanges(self):
|
||||
|
|
@ -199,7 +212,7 @@ class Node(object):
|
|||
self.online = False
|
||||
trigger_event('status', {
|
||||
'id': self.user_id,
|
||||
'status': 'offline'
|
||||
'online': self.online
|
||||
})
|
||||
r = False
|
||||
logger.debug('pushedChanges %s %s', r, self.user_id)
|
||||
|
|
@ -210,7 +223,7 @@ class Node(object):
|
|||
r = self.request(action, settings.preferences['username'], u.info.get('message'))
|
||||
else:
|
||||
r = self.request(action, u.info.get('message'))
|
||||
if r:
|
||||
if r != None:
|
||||
u.queued = False
|
||||
if 'message' in u.info:
|
||||
del u.info['message']
|
||||
|
|
@ -237,7 +250,21 @@ class Node(object):
|
|||
self._opener.addheaders = zip(headers.keys(), headers.values())
|
||||
r = self._opener.open(url, timeout=self.TIMEOUT)
|
||||
if r.getcode() == 200:
|
||||
content = r.read()
|
||||
if r.headers.get('content-encoding', None) == 'gzip':
|
||||
content = gzip.GzipFile(fileobj=r).read()
|
||||
else:
|
||||
'''
|
||||
content = ''
|
||||
for chunk in iter(lambda: r.read(1024*1024), ''):
|
||||
content += chunk
|
||||
item.transferprogress = len(content) / item.info['size']
|
||||
item.save()
|
||||
trigger_event('transfer', {
|
||||
'id': item.id, 'progress': item.transferprogress
|
||||
})
|
||||
'''
|
||||
content = r.read()
|
||||
|
||||
t2 = datetime.now()
|
||||
duration = (t2-t1).total_seconds()
|
||||
if duration:
|
||||
|
|
@ -308,7 +335,6 @@ class Nodes(Thread):
|
|||
from user.models import User
|
||||
self._nodes[user_id] = Node(self, User.get_or_create(user_id))
|
||||
else:
|
||||
logger.debug('bring existing node online %s', user_id)
|
||||
if not self._nodes[user_id].online:
|
||||
self._nodes[user_id].go_online()
|
||||
|
||||
|
|
|
|||
|
|
@ -5,10 +5,14 @@ from sqlalchemy.sql.expression import and_, not_, or_, ClauseElement
|
|||
from datetime import datetime
|
||||
import unicodedata
|
||||
from sqlalchemy.sql import operators, extract
|
||||
from sqlalchemy.orm import load_only
|
||||
|
||||
import utils
|
||||
import settings
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger('oxflask.query')
|
||||
|
||||
def get_operator(op, type='str'):
|
||||
return {
|
||||
'str': {
|
||||
|
|
@ -56,6 +60,9 @@ class Parser(object):
|
|||
}
|
||||
...
|
||||
'''
|
||||
logger.debug('parse_condition %s', condition)
|
||||
if not 'value' in condition:
|
||||
return None
|
||||
k = condition.get('key', '*')
|
||||
if not k:
|
||||
k = '*'
|
||||
|
|
@ -105,34 +112,7 @@ class Parser(object):
|
|||
q = ~q
|
||||
return q
|
||||
elif k == 'list':
|
||||
'''
|
||||
q = Q(id=0)
|
||||
l = v.split(":")
|
||||
if len(l) == 1:
|
||||
vqs = Volume.objects.filter(name=v, user=user)
|
||||
if vqs.count() == 1:
|
||||
v = vqs[0]
|
||||
q = Q(files__instances__volume__id=v.id)
|
||||
elif len(l) >= 2:
|
||||
l = (l[0], ":".join(l[1:]))
|
||||
lqs = list(List.objects.filter(name=l[1], user__username=l[0]))
|
||||
if len(lqs) == 1 and lqs[0].accessible(user):
|
||||
l = lqs[0]
|
||||
if l.query.get('static', False) == False:
|
||||
data = l.query
|
||||
q = self.parse_conditions(data.get('conditions', []),
|
||||
data.get('operator', '&'),
|
||||
user, l.user)
|
||||
else:
|
||||
q = Q(id__in=l.items.all())
|
||||
if exclude:
|
||||
q = ~q
|
||||
else:
|
||||
q = Q(id=0)
|
||||
'''
|
||||
l = v.split(":")
|
||||
nickname = l[0]
|
||||
name = ':'.join(l[1:])
|
||||
nickname, name = v.split(':', 1)
|
||||
if nickname:
|
||||
p = self._user.query.filter_by(nickname=nickname).first()
|
||||
v = '%s:%s' % (p.id, name)
|
||||
|
|
@ -151,7 +131,17 @@ class Parser(object):
|
|||
q = self.parse_conditions(data.get('conditions', []),
|
||||
data.get('operator', '&'))
|
||||
else:
|
||||
q = (self._find.key == 'list') & (self._find.value == v)
|
||||
if exclude:
|
||||
q = (self._find.key == 'list') & (self._find.value == v)
|
||||
ids = [i.id
|
||||
for i in self._model.query.join(self._find).filter(q).group_by(self._model.id).options(load_only('id'))]
|
||||
if ids:
|
||||
q = ~self._model.id.in_(ids)
|
||||
else:
|
||||
q = (self._model.id != 0)
|
||||
|
||||
else:
|
||||
q = (self._find.key == 'list') & (self._find.value == v)
|
||||
return q
|
||||
elif key_type == 'date':
|
||||
def parse_date(d):
|
||||
|
|
|
|||
195
oml/user/api.py
195
oml/user/api.py
|
|
@ -4,7 +4,6 @@ from __future__ import division
|
|||
|
||||
import os
|
||||
from copy import deepcopy
|
||||
import subprocess
|
||||
import json
|
||||
|
||||
from oxflask.api import actions
|
||||
|
|
@ -12,7 +11,7 @@ from oxflask.shortcuts import returns_json
|
|||
|
||||
import models
|
||||
|
||||
from utils import get_position_by_id
|
||||
from utils import update_dict
|
||||
|
||||
import settings
|
||||
import state
|
||||
|
|
@ -24,7 +23,14 @@ logger = logging.getLogger('oml.user.api')
|
|||
@returns_json
|
||||
def init(request):
|
||||
'''
|
||||
this is an init request to test stuff
|
||||
takes {
|
||||
}
|
||||
returns {
|
||||
config
|
||||
user
|
||||
preferences
|
||||
ui
|
||||
}
|
||||
'''
|
||||
response = {}
|
||||
if os.path.exists(settings.oml_config_path):
|
||||
|
|
@ -43,26 +49,14 @@ def init(request):
|
|||
return response
|
||||
actions.register(init)
|
||||
|
||||
def update_dict(root, data):
|
||||
for key in data:
|
||||
keys = map(lambda p: p.replace('\0', '\\.'), key.replace('\\.', '\0').split('.'))
|
||||
value = data[key]
|
||||
p = root
|
||||
while len(keys)>1:
|
||||
key = keys.pop(0)
|
||||
if isinstance(p, list):
|
||||
p = p[get_position_by_id(p, key)]
|
||||
else:
|
||||
if key not in p:
|
||||
p[key] = {}
|
||||
p = p[key]
|
||||
if value == None and keys[0] in p:
|
||||
del p[keys[0]]
|
||||
else:
|
||||
p[keys[0]] = value
|
||||
|
||||
@returns_json
|
||||
def setPreferences(request):
|
||||
'''
|
||||
takes {
|
||||
key: value,
|
||||
'sub.key': value
|
||||
}
|
||||
'''
|
||||
data = json.loads(request.form['data']) if 'data' in request.form else {}
|
||||
update_dict(settings.preferences, data)
|
||||
return settings.preferences
|
||||
|
|
@ -70,6 +64,12 @@ actions.register(setPreferences)
|
|||
|
||||
@returns_json
|
||||
def setUI(request):
|
||||
'''
|
||||
takes {
|
||||
key: value,
|
||||
'sub.key': value
|
||||
}
|
||||
'''
|
||||
data = json.loads(request.form['data']) if 'data' in request.form else {}
|
||||
update_dict(settings.ui, data)
|
||||
return settings.ui
|
||||
|
|
@ -77,6 +77,11 @@ actions.register(setUI)
|
|||
|
||||
@returns_json
|
||||
def getUsers(request):
|
||||
'''
|
||||
returns {
|
||||
users: []
|
||||
}
|
||||
'''
|
||||
users = []
|
||||
for u in models.User.query.filter(models.User.id!=settings.USER_ID).all():
|
||||
users.append(u.json())
|
||||
|
|
@ -87,7 +92,20 @@ actions.register(getUsers)
|
|||
|
||||
@returns_json
|
||||
def getLists(request):
|
||||
'''
|
||||
returns {
|
||||
lists: []
|
||||
}
|
||||
'''
|
||||
from item.models import Item
|
||||
lists = []
|
||||
lists.append({
|
||||
'id': '',
|
||||
'items': Item.query.count(),
|
||||
'name': 'Libraries',
|
||||
'type': 'libraries',
|
||||
'user': '',
|
||||
})
|
||||
for u in models.User.query.filter((models.User.peered==True)|(models.User.id==settings.USER_ID)):
|
||||
lists += u.lists_json()
|
||||
return {
|
||||
|
|
@ -95,30 +113,47 @@ def getLists(request):
|
|||
}
|
||||
actions.register(getLists)
|
||||
|
||||
def validate_query(query):
|
||||
for condition in query['conditions']:
|
||||
if not list(sorted(condition.keys())) in (
|
||||
['conditions', 'operator'],
|
||||
['key', 'operator', 'value']
|
||||
):
|
||||
raise Exception('invalid query condition', condition)
|
||||
|
||||
@returns_json
|
||||
def addList(request):
|
||||
'''
|
||||
takes {
|
||||
name
|
||||
items
|
||||
query
|
||||
}
|
||||
'''
|
||||
data = json.loads(request.form['data']) if 'data' in request.form else {}
|
||||
logger.debug('addList %s', data)
|
||||
user_id = settings.USER_ID
|
||||
l = models.List.get(user_id, data['name'])
|
||||
if not l:
|
||||
if 'query' in data:
|
||||
validate_query(data['query'])
|
||||
if data['name']:
|
||||
l = models.List.create(user_id, data['name'], data.get('query'))
|
||||
if 'items' in data:
|
||||
l.add_items(data['items'])
|
||||
return l.json()
|
||||
else:
|
||||
raise Exception('name not set')
|
||||
return {}
|
||||
actions.register(addList, cache=False)
|
||||
|
||||
@returns_json
|
||||
def removeList(request):
|
||||
data = json.loads(request.form['data']) if 'data' in request.form else {}
|
||||
l = models.List.get(data['id'])
|
||||
if l:
|
||||
l.remove()
|
||||
return {}
|
||||
actions.register(removeList, cache=False)
|
||||
|
||||
@returns_json
|
||||
def editList(request):
|
||||
'''
|
||||
takes {
|
||||
id
|
||||
name
|
||||
query
|
||||
}
|
||||
'''
|
||||
data = json.loads(request.form['data']) if 'data' in request.form else {}
|
||||
logger.debug('editList %s', data)
|
||||
l = models.List.get_or_create(data['id'])
|
||||
|
|
@ -126,6 +161,7 @@ def editList(request):
|
|||
if 'name' in data:
|
||||
l.name = data['name']
|
||||
if 'query' in data:
|
||||
validate_query(data['query'])
|
||||
l._query = data['query']
|
||||
if l.type == 'static' and name != l.name:
|
||||
Changelog.record(state.user(), 'editlist', name, {'name': l.name})
|
||||
|
|
@ -133,8 +169,29 @@ def editList(request):
|
|||
return l.json()
|
||||
actions.register(editList, cache=False)
|
||||
|
||||
@returns_json
|
||||
def removeList(request):
|
||||
'''
|
||||
takes {
|
||||
id
|
||||
}
|
||||
'''
|
||||
data = json.loads(request.form['data']) if 'data' in request.form else {}
|
||||
l = models.List.get(data['id'])
|
||||
if l:
|
||||
l.remove()
|
||||
return {}
|
||||
actions.register(removeList, cache=False)
|
||||
|
||||
|
||||
@returns_json
|
||||
def addListItems(request):
|
||||
'''
|
||||
takes {
|
||||
list
|
||||
items
|
||||
}
|
||||
'''
|
||||
data = json.loads(request.form['data']) if 'data' in request.form else {}
|
||||
if data['list'] == ':':
|
||||
from item.models import Item
|
||||
|
|
@ -153,6 +210,12 @@ actions.register(addListItems, cache=False)
|
|||
|
||||
@returns_json
|
||||
def removeListItems(request):
|
||||
'''
|
||||
takes {
|
||||
list
|
||||
items
|
||||
}
|
||||
'''
|
||||
data = json.loads(request.form['data']) if 'data' in request.form else {}
|
||||
l = models.List.get(data['list'])
|
||||
if l:
|
||||
|
|
@ -163,6 +226,11 @@ actions.register(removeListItems, cache=False)
|
|||
|
||||
@returns_json
|
||||
def sortLists(request):
|
||||
'''
|
||||
takes {
|
||||
ids
|
||||
}
|
||||
'''
|
||||
data = json.loads(request.form['data']) if 'data' in request.form else {}
|
||||
n = 0
|
||||
logger.debug('sortLists %s', data)
|
||||
|
|
@ -177,6 +245,12 @@ actions.register(sortLists, cache=False)
|
|||
|
||||
@returns_json
|
||||
def editUser(request):
|
||||
'''
|
||||
takes {
|
||||
id
|
||||
nickname
|
||||
}
|
||||
'''
|
||||
data = json.loads(request.form['data']) if 'data' in request.form else {}
|
||||
if 'nickname' in data:
|
||||
p = models.User.get_or_create(data['id'])
|
||||
|
|
@ -187,6 +261,12 @@ actions.register(editUser, cache=False)
|
|||
|
||||
@returns_json
|
||||
def requestPeering(request):
|
||||
'''
|
||||
takes {
|
||||
id
|
||||
message
|
||||
}
|
||||
'''
|
||||
data = json.loads(request.form['data']) if 'data' in request.form else {}
|
||||
if len(data.get('id', '')) != 43:
|
||||
logger.debug('invalid user id')
|
||||
|
|
@ -203,6 +283,12 @@ actions.register(requestPeering, cache=False)
|
|||
|
||||
@returns_json
|
||||
def acceptPeering(request):
|
||||
'''
|
||||
takes {
|
||||
id
|
||||
message
|
||||
}
|
||||
'''
|
||||
data = json.loads(request.form['data']) if 'data' in request.form else {}
|
||||
if len(data.get('id', '')) != 43:
|
||||
logger.debug('invalid user id')
|
||||
|
|
@ -218,6 +304,12 @@ actions.register(acceptPeering, cache=False)
|
|||
|
||||
@returns_json
|
||||
def rejectPeering(request):
|
||||
'''
|
||||
takes {
|
||||
id
|
||||
message
|
||||
}
|
||||
'''
|
||||
data = json.loads(request.form['data']) if 'data' in request.form else {}
|
||||
if len(data.get('id', '')) != 43:
|
||||
logger.debug('invalid user id')
|
||||
|
|
@ -232,6 +324,12 @@ actions.register(rejectPeering, cache=False)
|
|||
|
||||
@returns_json
|
||||
def removePeering(request):
|
||||
'''
|
||||
takes {
|
||||
id
|
||||
message
|
||||
}
|
||||
'''
|
||||
data = json.loads(request.form['data']) if 'data' in request.form else {}
|
||||
if len(data.get('id', '')) != 43:
|
||||
logger.debug('invalid user id')
|
||||
|
|
@ -246,6 +344,10 @@ actions.register(removePeering, cache=False)
|
|||
|
||||
@returns_json
|
||||
def cancelPeering(request):
|
||||
'''
|
||||
takes {
|
||||
}
|
||||
'''
|
||||
data = json.loads(request.form['data']) if 'data' in request.form else {}
|
||||
if len(data.get('id', '')) != 43:
|
||||
logger.debug('invalid user id')
|
||||
|
|
@ -260,29 +362,12 @@ actions.register(cancelPeering, cache=False)
|
|||
|
||||
@returns_json
|
||||
def getActivity(request):
|
||||
'''
|
||||
return {
|
||||
activity
|
||||
progress
|
||||
}
|
||||
'''
|
||||
return state.activity
|
||||
actions.register(getActivity, cache=False)
|
||||
|
||||
@returns_json
|
||||
def selectFolder(request):
|
||||
data = json.loads(request.form['data']) if 'data' in request.form else {}
|
||||
cmd = ['./ctl', 'ui', 'folder']
|
||||
p = subprocess.Popen(cmd, stdout=subprocess.PIPE)
|
||||
stdout, stderr = p.communicate()
|
||||
path = stdout.decode('utf-8').strip()
|
||||
return {
|
||||
'path': path
|
||||
}
|
||||
actions.register(selectFolder, cache=False)
|
||||
|
||||
@returns_json
|
||||
def selectFile(request):
|
||||
data = json.loads(request.form['data']) if 'data' in request.form else {}
|
||||
cmd = ['./ctl', 'ui', 'file']
|
||||
p = subprocess.Popen(cmd, stdout=subprocess.PIPE)
|
||||
stdout, stderr = p.communicate()
|
||||
path = stdout.decode('utf-8').strip()
|
||||
return {
|
||||
'path': path
|
||||
}
|
||||
actions.register(selectFile, cache=False)
|
||||
|
|
|
|||
|
|
@ -66,7 +66,13 @@ class User(db.Model):
|
|||
return state.nodes and state.nodes.check_online(self.id)
|
||||
|
||||
def lists_json(self):
|
||||
return [l.json() for l in self.lists.order_by('position')]
|
||||
return [{
|
||||
'id': '%s:' % ('' if self.id == settings.USER_ID else self.nickname),
|
||||
'name': 'Library',
|
||||
'type': 'library',
|
||||
'items': self.items.count(),
|
||||
'user': self.nickname if self.id != settings.USER_ID else settings.preferences['username'],
|
||||
}] + [l.json() for l in self.lists.order_by('position')]
|
||||
|
||||
def update_peering(self, peered, username=None):
|
||||
was_peering = self.peered
|
||||
|
|
@ -128,19 +134,17 @@ class List(db.Model):
|
|||
user = db.relationship('User', backref=db.backref('lists', lazy='dynamic'))
|
||||
|
||||
items = db.relationship('Item', secondary=list_items,
|
||||
backref=db.backref('lists', lazy='dynamic'))
|
||||
backref=db.backref('lists', lazy='dynamic'))
|
||||
|
||||
@classmethod
|
||||
def get(cls, user_id, name=None):
|
||||
if not name:
|
||||
if name is None:
|
||||
user_id, name = cls.get_user_name(user_id)
|
||||
return cls.query.filter_by(user_id=user_id, name=name).first()
|
||||
|
||||
@classmethod
|
||||
def get_user_name(cls, user_id):
|
||||
l = user_id.split(':')
|
||||
nickname = l[0]
|
||||
name = ':'.join(l[1:])
|
||||
nickname, name = user_id.split(':', 1)
|
||||
if nickname:
|
||||
user = User.query.filter_by(nickname=nickname).first()
|
||||
user_id = user.id
|
||||
|
|
@ -149,19 +153,22 @@ class List(db.Model):
|
|||
return user_id, name
|
||||
|
||||
@classmethod
|
||||
def get_or_create(cls, user_id, name=None):
|
||||
if not name:
|
||||
def get_or_create(cls, user_id, name=None, query=None):
|
||||
if name is None:
|
||||
user_id, name = cls.get_user_name(user_id)
|
||||
l = cls.get(user_id, name)
|
||||
if not l:
|
||||
l = cls(name=name, user_id=user_id)
|
||||
db.session.add(l)
|
||||
db.session.commit()
|
||||
l = cls.create(user_id, name, query)
|
||||
return l
|
||||
|
||||
@classmethod
|
||||
def create(cls, user_id, name, query=None):
|
||||
l = cls(name=name, user_id=user_id)
|
||||
prefix = name
|
||||
n = 2
|
||||
while cls.get(user_id, name):
|
||||
name = '%s [%s]' % (prefix, n)
|
||||
n += 1
|
||||
l = cls(user_id=user_id, name=name)
|
||||
l._query = query
|
||||
l.type = 'smart' if l._query else 'static'
|
||||
l.position = cls.query.filter_by(user_id=user_id).count()
|
||||
|
|
|
|||
40
oml/utils.py
40
oml/utils.py
|
|
@ -2,17 +2,22 @@
|
|||
# vi:si:et:sw=4:sts=4:ts=4
|
||||
from __future__ import division
|
||||
|
||||
import os
|
||||
import Image
|
||||
from StringIO import StringIO
|
||||
import re
|
||||
import stdnum.isbn
|
||||
import socket
|
||||
import cStringIO
|
||||
import gzip
|
||||
|
||||
import ox
|
||||
import ed25519
|
||||
|
||||
from meta.utils import normalize_isbn, find_isbns
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger('oml.utils')
|
||||
|
||||
ENCODING='base64'
|
||||
|
||||
|
|
@ -108,3 +113,38 @@ def get_public_ipv6():
|
|||
s.close()
|
||||
return ip
|
||||
|
||||
def update_dict(root, data):
|
||||
for key in data:
|
||||
keys = map(lambda part: part.replace('\0', '\\.'), key.replace('\\.', '\0').split('.'))
|
||||
value = data[key]
|
||||
p = root
|
||||
while len(keys)>1:
|
||||
key = keys.pop(0)
|
||||
if isinstance(p, list):
|
||||
p = p[get_position_by_id(p, key)]
|
||||
else:
|
||||
if key not in p:
|
||||
p[key] = {}
|
||||
p = p[key]
|
||||
if value == None and keys[0] in p:
|
||||
del p[keys[0]]
|
||||
else:
|
||||
p[keys[0]] = value
|
||||
|
||||
def remove_empty_folders(prefix):
|
||||
empty = []
|
||||
for root, folders, files in os.walk(prefix):
|
||||
if not folders and not files:
|
||||
empty.append(root)
|
||||
for folder in empty:
|
||||
remove_empty_tree(folder)
|
||||
|
||||
def remove_empty_tree(leaf):
|
||||
while leaf:
|
||||
if not os.path.exists(leaf):
|
||||
leaf = os.path.dirname(leaf)
|
||||
elif os.path.isdir(leaf) and not os.listdir(leaf):
|
||||
logger.debug('rmdir %s', leaf)
|
||||
os.rmdir(leaf)
|
||||
else:
|
||||
break
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue