Open Media Library
This commit is contained in:
commit
2ee2bc178a
228 changed files with 85988 additions and 0 deletions
0
oml/item/__init__.py
Normal file
0
oml/item/__init__.py
Normal file
19
oml/item/add.py
Normal file
19
oml/item/add.py
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# vi:si:et:sw=4:sts=4:ts=4
|
||||
|
||||
import base64
|
||||
import models
|
||||
|
||||
import ox
|
||||
|
||||
import scan
|
||||
|
||||
def add(path):
|
||||
info = scan.get_metadata(path)
|
||||
id = info.pop('id')
|
||||
item = models.Item.get_or_create(id)
|
||||
item.path = path
|
||||
item.info = info
|
||||
models.db.session.add(item)
|
||||
models.db.session.commit()
|
||||
|
||||
210
oml/item/api.py
Normal file
210
oml/item/api.py
Normal file
|
|
@ -0,0 +1,210 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# vi:si:et:sw=4:sts=4:ts=4
|
||||
from datetime import datetime
|
||||
|
||||
from flask import json
|
||||
from oxflask.api import actions
|
||||
from oxflask.shortcuts import returns_json
|
||||
|
||||
from oml import utils
|
||||
import query
|
||||
|
||||
import models
|
||||
import settings
|
||||
from changelog import Changelog
|
||||
import re
|
||||
import state
|
||||
|
||||
import utils
|
||||
|
||||
@returns_json
|
||||
def find(request):
|
||||
'''
|
||||
find items
|
||||
'''
|
||||
response = {}
|
||||
data = json.loads(request.form['data']) if 'data' in request.form else {}
|
||||
q = query.parse(data)
|
||||
if 'group' in q:
|
||||
response['items'] = []
|
||||
'''
|
||||
items = 'items'
|
||||
item_qs = q['qs']
|
||||
order_by = query.order_by_group(q)
|
||||
qs = models.Facet.objects.filter(key=q['group']).filter(item__id__in=item_qs)
|
||||
qs = qs.values('value').annotate(items=Count('id')).order_by(*order_by)
|
||||
|
||||
if 'positions' in q:
|
||||
response['positions'] = {}
|
||||
ids = [j['value'] for j in qs]
|
||||
response['positions'] = utils.get_positions(ids, q['positions'])
|
||||
elif 'range' in data:
|
||||
qs = qs[q['range'][0]:q['range'][1]]
|
||||
response['items'] = [{'name': i['value'], 'items': i[items]} for i in qs]
|
||||
else:
|
||||
response['items'] = qs.count()
|
||||
'''
|
||||
_g = {}
|
||||
key = utils.get_by_id(settings.config['itemKeys'], q['group'])
|
||||
for item in q['qs']:
|
||||
i = item.json()
|
||||
if q['group'] in i:
|
||||
values = i[q['group']]
|
||||
if isinstance(values, basestring):
|
||||
values = [values]
|
||||
for value in values:
|
||||
if key.get('filterMap') and value:
|
||||
value = re.compile(key.get('filterMap')).findall(value)
|
||||
if value:
|
||||
value = value[0]
|
||||
else:
|
||||
continue
|
||||
if value not in _g:
|
||||
_g[value] = 0
|
||||
_g[value] += 1
|
||||
g = [{'name': k, 'items': _g[k]} for k in _g]
|
||||
if 'sort' in data: # parse adds default sort to q!
|
||||
g.sort(key=lambda k: k[q['sort'][0]['key']])
|
||||
if q['sort'][0]['operator'] == '-':
|
||||
g.reverse()
|
||||
if 'positions' in data:
|
||||
response['positions'] = {}
|
||||
ids = [k['name'] for k in g]
|
||||
response['positions'] = utils.get_positions(ids, data['positions'])
|
||||
elif 'range' in data:
|
||||
response['items'] = g[q['range'][0]:q['range'][1]]
|
||||
else:
|
||||
response['items'] = len(g)
|
||||
elif 'position' in data:
|
||||
ids = [i.id for i in q['qs']]
|
||||
response['position'] = utils.get_positions(ids, [data['qs'][0].id])[0]
|
||||
elif 'positions' in data:
|
||||
ids = [i.id for i in q['qs']]
|
||||
response['positions'] = utils.get_positions(ids, data['positions'])
|
||||
elif 'keys' in data:
|
||||
'''
|
||||
qs = qs[q['range'][0]:q['range'][1]]
|
||||
response['items'] = [p.json(data['keys']) for p in qs]
|
||||
'''
|
||||
response['items'] = []
|
||||
for i in q['qs'][q['range'][0]:q['range'][1]]:
|
||||
j = i.json()
|
||||
response['items'].append({k:j[k] for k in j if not data['keys'] or k in data['keys']})
|
||||
else:
|
||||
items = [i.json() for i in q['qs']]
|
||||
response['items'] = len(items)
|
||||
response['size'] = sum([i.get('size',0) for i in items])
|
||||
return response
|
||||
actions.register(find)
|
||||
|
||||
@returns_json
|
||||
def get(request):
|
||||
response = {}
|
||||
data = json.loads(request.form['data']) if 'data' in request.form else {}
|
||||
item = models.Item.get(data['id'])
|
||||
if item:
|
||||
response = item.json(data['keys'] if 'keys' in data else None)
|
||||
return response
|
||||
actions.register(get)
|
||||
|
||||
@returns_json
|
||||
def edit(request):
|
||||
response = {}
|
||||
data = json.loads(request.form['data']) if 'data' in request.form else {}
|
||||
print 'edit', data
|
||||
item = models.Item.get(data['id'])
|
||||
keys = filter(lambda k: k in models.Item.id_keys, data.keys())
|
||||
print item, keys
|
||||
if item and keys and item.json()['mediastate'] == 'available':
|
||||
key = keys[0]
|
||||
print 'update mainid', key, data[key]
|
||||
item.update_mainid(key, data[key])
|
||||
response = item.json()
|
||||
else:
|
||||
print 'can only edit available items'
|
||||
response = item.json()
|
||||
return response
|
||||
actions.register(edit, cache=False)
|
||||
|
||||
|
||||
@returns_json
|
||||
def identify(request):
|
||||
'''
|
||||
takes {
|
||||
title: string,
|
||||
author: [string],
|
||||
publisher: string,
|
||||
date: string
|
||||
}
|
||||
returns {
|
||||
title: string,
|
||||
autor: [string],
|
||||
date: string,
|
||||
}
|
||||
'''
|
||||
response = {}
|
||||
data = json.loads(request.form['data']) if 'data' in request.form else {}
|
||||
response = {
|
||||
'items': [
|
||||
{
|
||||
u'title': u'Cinema',
|
||||
u'author': [u'Gilles Deleuze'],
|
||||
u'date': u'1986-10',
|
||||
u'publisher': u'University of Minnesota Press',
|
||||
u'isbn10': u'0816613990',
|
||||
},
|
||||
{
|
||||
u'title': u'How to Change the World: Reflections on Marx and Marxism',
|
||||
u'author': [u'Eric Hobsbawm'],
|
||||
u'date': u'2011-09-06',
|
||||
u'publisher': u'Yale University Press',
|
||||
u'isbn13': u'9780300176162',
|
||||
}
|
||||
]
|
||||
}
|
||||
return response
|
||||
actions.register(identify)
|
||||
|
||||
@returns_json
|
||||
def download(request):
|
||||
response = {}
|
||||
data = json.loads(request.form['data']) if 'data' in request.form else {}
|
||||
item = models.Item.get(data['id'])
|
||||
if item:
|
||||
item.transferprogress = 0
|
||||
item.transferadded = datetime.now()
|
||||
p = models.User.get(settings.USER_ID)
|
||||
if p not in item.users:
|
||||
item.users.append(p)
|
||||
item.update()
|
||||
response = {'status': 'queued'}
|
||||
return response
|
||||
actions.register(download, cache=False)
|
||||
|
||||
@returns_json
|
||||
def cancelDownload(request):
|
||||
response = {}
|
||||
data = json.loads(request.form['data']) if 'data' in request.form else {}
|
||||
item = models.Item.get(data['id'])
|
||||
if item:
|
||||
item.transferprogress = None
|
||||
item.transferadded = None
|
||||
p = models.User.get(settings.USER_ID)
|
||||
if p in item.users:
|
||||
item.users.remove(p)
|
||||
item.update()
|
||||
response = {'status': 'cancelled'}
|
||||
return response
|
||||
actions.register(cancelDownload, cache=False)
|
||||
|
||||
@returns_json
|
||||
def scan(request):
|
||||
state.main.add_callback(state.websockets[0].put, json.dumps(['scan', {}]))
|
||||
return {}
|
||||
actions.register(scan, cache=False)
|
||||
|
||||
@returns_json
|
||||
def _import(request):
|
||||
state.main.add_callback(state.websockets[0].put, json.dumps(['import', {}]))
|
||||
return {}
|
||||
actions.register(_import, 'import', cache=False)
|
||||
74
oml/item/covers.py
Normal file
74
oml/item/covers.py
Normal file
|
|
@ -0,0 +1,74 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# vi:si:et:sw=4:sts=4:ts=4
|
||||
|
||||
import sqlite3
|
||||
import Image
|
||||
from StringIO import StringIO
|
||||
|
||||
from settings import covers_db_path
|
||||
|
||||
class Covers(dict):
|
||||
def __init__(self, db):
|
||||
self._db = db
|
||||
|
||||
def connect(self):
|
||||
self.conn = sqlite3.connect(self._db, timeout=10)
|
||||
self.create()
|
||||
|
||||
def create(self):
|
||||
c = self.conn.cursor()
|
||||
c.execute(u'CREATE TABLE IF NOT EXISTS cover (id varchar(64) unique, data blob)')
|
||||
c.execute(u'CREATE TABLE IF NOT EXISTS setting (key varchar(256) unique, value text)')
|
||||
if int(self.get_setting(c, 'version', 0)) < 1:
|
||||
self.set_setting(c, 'version', 1)
|
||||
|
||||
def get_setting(self, c, key, default=None):
|
||||
c.execute(u'SELECT value FROM setting WHERE key = ?', (key, ))
|
||||
for row in c:
|
||||
return row[0]
|
||||
return default
|
||||
|
||||
def set_setting(self, c, key, value):
|
||||
c.execute(u'INSERT OR REPLACE INTO setting values (?, ?)', (key, str(value)))
|
||||
|
||||
def black(self):
|
||||
img = Image.new('RGB', (80, 128))
|
||||
o = StringIO()
|
||||
img.save(o, format='jpeg')
|
||||
data = o.getvalue()
|
||||
o.close()
|
||||
return data
|
||||
|
||||
def __getitem__(self, id, default=None):
|
||||
sql = u'SELECT data FROM cover WHERE id=?'
|
||||
self.connect()
|
||||
c = self.conn.cursor()
|
||||
c.execute(sql, (id, ))
|
||||
data = default
|
||||
for row in c:
|
||||
data = row[0]
|
||||
break
|
||||
c.close()
|
||||
self.conn.close()
|
||||
return data
|
||||
|
||||
def __setitem__(self, id, data):
|
||||
sql = u'INSERT OR REPLACE INTO cover values (?, ?)'
|
||||
self.connect()
|
||||
c = self.conn.cursor()
|
||||
data = sqlite3.Binary(data)
|
||||
c.execute(sql, (id, data))
|
||||
self.conn.commit()
|
||||
c.close()
|
||||
self.conn.close()
|
||||
|
||||
def __delitem__(self, id):
|
||||
sql = u'DELETE FROM cover WHERE id = ?'
|
||||
self.connect()
|
||||
c = self.conn.cursor()
|
||||
c.execute(sql, (id, ))
|
||||
self.conn.commit()
|
||||
c.close()
|
||||
self.conn.close()
|
||||
|
||||
covers = Covers(covers_db_path)
|
||||
13
oml/item/migrate.py
Normal file
13
oml/item/migrate.py
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
import models
|
||||
from copy import deepcopy
|
||||
|
||||
def import_all():
|
||||
for i in models.items:
|
||||
item = models.Item.get_or_create(i['id'])
|
||||
item.path = i['path']
|
||||
item.info = deepcopy(i)
|
||||
del item.info['path']
|
||||
del item.info['id']
|
||||
item.meta = item.info.pop('meta', {})
|
||||
models.db.session.add(item)
|
||||
models.db.session.commit()
|
||||
427
oml/item/models.py
Normal file
427
oml/item/models.py
Normal file
|
|
@ -0,0 +1,427 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# vi:si:et:sw=4:sts=4:ts=4
|
||||
|
||||
import os
|
||||
import re
|
||||
import base64
|
||||
import json
|
||||
import hashlib
|
||||
from datetime import datetime
|
||||
from StringIO import StringIO
|
||||
|
||||
import Image
|
||||
import ox
|
||||
|
||||
import settings
|
||||
from settings import db, config
|
||||
|
||||
from user.models import User
|
||||
|
||||
from person import get_sort_name
|
||||
|
||||
import media
|
||||
from meta import scraper
|
||||
|
||||
import utils
|
||||
|
||||
from oxflask.db import MutableDict
|
||||
|
||||
from covers import covers
|
||||
from changelog import Changelog
|
||||
from websocket import trigger_event
|
||||
|
||||
class Work(db.Model):
|
||||
|
||||
created = db.Column(db.DateTime())
|
||||
modified = db.Column(db.DateTime())
|
||||
|
||||
id = db.Column(db.String(32), primary_key=True)
|
||||
|
||||
meta = db.Column(MutableDict.as_mutable(db.PickleType(pickler=json)))
|
||||
|
||||
def __repr__(self):
|
||||
return self.id
|
||||
|
||||
def __init__(self, id):
|
||||
self.id = id
|
||||
self.created = datetime.now()
|
||||
self.modified = datetime.now()
|
||||
|
||||
class Edition(db.Model):
|
||||
|
||||
created = db.Column(db.DateTime())
|
||||
modified = db.Column(db.DateTime())
|
||||
|
||||
id = db.Column(db.String(32), primary_key=True)
|
||||
|
||||
meta = db.Column(MutableDict.as_mutable(db.PickleType(pickler=json)))
|
||||
|
||||
work_id = db.Column(db.String(32), db.ForeignKey('work.id'))
|
||||
work = db.relationship('Work', backref=db.backref('editions', lazy='dynamic'))
|
||||
|
||||
def __repr__(self):
|
||||
return self.id
|
||||
|
||||
def __init__(self, id):
|
||||
self.id = id
|
||||
self.created = datetime.now()
|
||||
self.modified = datetime.now()
|
||||
|
||||
user_items = db.Table('useritem',
|
||||
db.Column('user_id', db.String(43), db.ForeignKey('user.id')),
|
||||
db.Column('item_id', db.String(32), db.ForeignKey('item.id'))
|
||||
)
|
||||
|
||||
class Item(db.Model):
|
||||
|
||||
created = db.Column(db.DateTime())
|
||||
modified = db.Column(db.DateTime())
|
||||
|
||||
id = db.Column(db.String(32), primary_key=True)
|
||||
|
||||
info = db.Column(MutableDict.as_mutable(db.PickleType(pickler=json)))
|
||||
meta = db.Column(MutableDict.as_mutable(db.PickleType(pickler=json)))
|
||||
|
||||
added = db.Column(db.DateTime()) # added to local library
|
||||
accessed = db.Column(db.DateTime())
|
||||
timesaccessed = db.Column(db.Integer())
|
||||
|
||||
transferadded = db.Column(db.DateTime())
|
||||
transferprogress = db.Column(db.Float())
|
||||
|
||||
users = db.relationship('User', secondary=user_items,
|
||||
backref=db.backref('items', lazy='dynamic'))
|
||||
|
||||
edition_id = db.Column(db.String(32), db.ForeignKey('edition.id'))
|
||||
edition = db.relationship('Edition', backref=db.backref('items', lazy='dynamic'))
|
||||
|
||||
work_id = db.Column(db.String(32), db.ForeignKey('work.id'))
|
||||
work = db.relationship('Work', backref=db.backref('items', lazy='dynamic'))
|
||||
|
||||
@property
|
||||
def timestamp(self):
|
||||
return self.modified.strftime('%s')
|
||||
|
||||
def __repr__(self):
|
||||
return self.id
|
||||
|
||||
def __init__(self, id):
|
||||
if isinstance(id, list):
|
||||
id = base64.b32encode(hashlib.sha1(''.join(id)).digest())
|
||||
self.id = id
|
||||
self.created = datetime.now()
|
||||
self.modified = datetime.now()
|
||||
self.info = {}
|
||||
self.meta = {}
|
||||
|
||||
@classmethod
|
||||
def get(cls, id):
|
||||
if isinstance(id, list):
|
||||
id = base64.b32encode(hashlib.sha1(''.join(id)).digest())
|
||||
return cls.query.filter_by(id=id).first()
|
||||
|
||||
@classmethod
|
||||
def get_or_create(cls, id, info=None):
|
||||
if isinstance(id, list):
|
||||
id = base64.b32encode(hashlib.sha1(''.join(id)).digest())
|
||||
item = cls.query.filter_by(id=id).first()
|
||||
if not item:
|
||||
item = cls(id=id)
|
||||
if info:
|
||||
item.info = info
|
||||
db.session.add(item)
|
||||
db.session.commit()
|
||||
return item
|
||||
|
||||
def json(self, keys=None):
|
||||
j = {}
|
||||
j['id'] = self.id
|
||||
j['created'] = self.created
|
||||
j['modified'] = self.modified
|
||||
j['timesaccessed'] = self.timesaccessed
|
||||
j['accessed'] = self.accessed
|
||||
j['added'] = self.added
|
||||
j['transferadded'] = self.transferadded
|
||||
j['transferprogress'] = self.transferprogress
|
||||
j['users'] = map(str, list(self.users))
|
||||
|
||||
if self.info:
|
||||
j.update(self.info)
|
||||
if self.meta:
|
||||
j.update(self.meta)
|
||||
|
||||
for key in self.id_keys + ['mainid']:
|
||||
if key not in self.meta and key in j:
|
||||
del j[key]
|
||||
'''
|
||||
if self.work_id:
|
||||
j['work'] = {
|
||||
'olid': self.work_id
|
||||
}
|
||||
j['work'].update(self.work.meta)
|
||||
'''
|
||||
if keys:
|
||||
for k in j.keys():
|
||||
if k not in keys:
|
||||
del j[k]
|
||||
return j
|
||||
|
||||
def get_path(self):
|
||||
f = self.files.first()
|
||||
prefs = settings.preferences
|
||||
prefix = os.path.join(os.path.expanduser(prefs['libraryPath']), 'Books/')
|
||||
return os.path.join(prefix, f.path) if f else None
|
||||
|
||||
def update_sort(self):
|
||||
for key in config['itemKeys']:
|
||||
if key.get('sort'):
|
||||
value = self.json().get(key['id'], None)
|
||||
sort_type = key.get('sortType', key['type'])
|
||||
if value:
|
||||
if sort_type == 'integer':
|
||||
value = int(value)
|
||||
elif sort_type == 'float':
|
||||
value = float(value)
|
||||
elif sort_type == 'date':
|
||||
pass
|
||||
elif sort_type == 'name':
|
||||
if not isinstance(value, list):
|
||||
value = [value]
|
||||
value = map(get_sort_name, value)
|
||||
value = ox.sort_string(u'\n'.join(value))
|
||||
elif sort_type == 'title':
|
||||
value = utils.sort_title(value).lower()
|
||||
else:
|
||||
if isinstance(value, list):
|
||||
value = u'\n'.join(value)
|
||||
if value:
|
||||
value = unicode(value)
|
||||
value = ox.sort_string(value).lower()
|
||||
setattr(self, 'sort_%s' % key['id'], value)
|
||||
|
||||
def update_find(self):
|
||||
for key in config['itemKeys']:
|
||||
if key.get('find') or key.get('filter'):
|
||||
value = self.json().get(key['id'], None)
|
||||
if key.get('filterMap') and value:
|
||||
value = re.compile(key.get('filterMap')).findall(value)[0]
|
||||
print key['id'], value
|
||||
if value:
|
||||
if isinstance(value, list):
|
||||
Find.query.filter_by(item_id=self.id, key=key['id']).delete()
|
||||
for v in value:
|
||||
f = Find(item_id=self.id, key=key['id'])
|
||||
f.value = v.lower()
|
||||
db.session.add(f)
|
||||
else:
|
||||
f = Find.get_or_create(self.id, key['id'])
|
||||
f.value = value.lower()
|
||||
db.session.add(f)
|
||||
else:
|
||||
f = Find.get(self.id, key['id'])
|
||||
if f:
|
||||
db.session.delete(f)
|
||||
|
||||
def update_lists(self):
|
||||
Find.query.filter_by(item_id=self.id, key='list').delete()
|
||||
for p in self.users:
|
||||
f = Find()
|
||||
f.item_id = self.id
|
||||
f.key = 'list'
|
||||
if p.id == settings.USER_ID:
|
||||
f.value = ':'
|
||||
else:
|
||||
f.value = '%s:' % p.id
|
||||
db.session.add(f)
|
||||
|
||||
def update(self):
|
||||
users = map(str, list(self.users))
|
||||
self.meta['mediastate'] = 'available' # available, unavailable, transferring
|
||||
if self.transferadded and self.transferprogress < 1:
|
||||
self.meta['mediastate'] = 'transferring'
|
||||
else:
|
||||
self.meta['mediastate'] = 'available' if settings.USER_ID in users else 'unavailable'
|
||||
self.update_sort()
|
||||
self.update_find()
|
||||
self.update_lists()
|
||||
self.modified = datetime.now()
|
||||
self.save()
|
||||
|
||||
def save(self):
|
||||
db.session.add(self)
|
||||
db.session.commit()
|
||||
|
||||
def update_mainid(self, key, id):
|
||||
record = {}
|
||||
if id:
|
||||
self.meta[key] = id
|
||||
self.meta['mainid'] = key
|
||||
record[key] = id
|
||||
else:
|
||||
if key in self.meta:
|
||||
del self.meta[key]
|
||||
if 'mainid' in self.meta:
|
||||
del self.meta['mainid']
|
||||
record[key] = ''
|
||||
for k in self.id_keys:
|
||||
if k != key:
|
||||
if k in self.meta:
|
||||
del self.meta[k]
|
||||
print 'mainid', 'mainid' in self.meta, self.meta.get('mainid')
|
||||
print 'key', key, self.meta.get(key)
|
||||
# get metadata from external resources
|
||||
self.scrape()
|
||||
self.update()
|
||||
self.update_cover()
|
||||
db.session.add(self)
|
||||
db.session.commit()
|
||||
user = User.get_or_create(settings.USER_ID)
|
||||
if user in self.users:
|
||||
Changelog.record(user, 'edititem', self.id, record)
|
||||
|
||||
def extract_cover(self):
|
||||
path = self.get_path()
|
||||
if not path:
|
||||
return getattr(media, self.meta['extensions']).cover(path)
|
||||
|
||||
def update_cover(self):
|
||||
cover = None
|
||||
if 'cover' in self.meta:
|
||||
cover = ox.cache.read_url(self.meta['cover'])
|
||||
#covers[self.id] = requests.get(self.meta['cover']).content
|
||||
if cover:
|
||||
covers[self.id] = cover
|
||||
path = self.get_path()
|
||||
if not cover and path:
|
||||
cover = self.extract_cover()
|
||||
if cover:
|
||||
covers[self.id] = cover
|
||||
if cover:
|
||||
img = Image.open(StringIO(cover))
|
||||
self.meta['coverRatio'] = img.size[0]/img.size[1]
|
||||
for p in (':128', ':256'):
|
||||
del covers['%s%s' % (self.id, p)]
|
||||
return cover
|
||||
|
||||
def scrape(self):
|
||||
mainid = self.meta.get('mainid')
|
||||
print 'scrape', mainid, self.meta.get(mainid)
|
||||
if mainid == 'olid':
|
||||
scraper.update_ol(self)
|
||||
scraper.add_lookupbyisbn(self)
|
||||
elif mainid in ('isbn10', 'isbn13'):
|
||||
scraper.add_lookupbyisbn(self)
|
||||
elif mainid == 'lccn':
|
||||
import meta.lccn
|
||||
info = meta.lccn.info(self.meta[mainid])
|
||||
for key in info:
|
||||
self.meta[key] = info[key]
|
||||
else:
|
||||
print 'FIX UPDATE', mainid
|
||||
self.update()
|
||||
|
||||
def save_file(self, content):
|
||||
p = User.get(settings.USER_ID)
|
||||
f = File.get(self.id)
|
||||
if not f:
|
||||
path = 'Downloads/%s.%s' % (self.id, self.info['extension'])
|
||||
f = File.get_or_create(self.id, self.info, path=path)
|
||||
path = self.get_path()
|
||||
if not os.path.exists(path):
|
||||
ox.makedirs(os.path.dirname(path))
|
||||
with open(path, 'wb') as fd:
|
||||
fd.write(content)
|
||||
if p not in self.users:
|
||||
self.users.append(p)
|
||||
self.transferprogress = 1
|
||||
self.added = datetime.now()
|
||||
Changelog.record(p, 'additem', self.id, self.info)
|
||||
self.update()
|
||||
trigger_event('transfer', {
|
||||
'id': self.id, 'progress': 1
|
||||
})
|
||||
return True
|
||||
else:
|
||||
print 'TRIED TO SAVE EXISTING FILE!!!'
|
||||
self.transferprogress = 1
|
||||
self.update()
|
||||
return False
|
||||
|
||||
for key in config['itemKeys']:
|
||||
if key.get('sort'):
|
||||
sort_type = key.get('sortType', key['type'])
|
||||
if sort_type == 'integer':
|
||||
col = db.Column(db.BigInteger(), index=True)
|
||||
elif sort_type == 'float':
|
||||
col = db.Column(db.Float(), index=True)
|
||||
elif sort_type == 'date':
|
||||
col = db.Column(db.DateTime(), index=True)
|
||||
else:
|
||||
col = db.Column(db.String(1000), index=True)
|
||||
setattr(Item, 'sort_%s' % key['id'], col)
|
||||
|
||||
Item.id_keys = ['isbn10', 'isbn13', 'lccn', 'olid', 'oclc']
|
||||
Item.item_keys = config['itemKeys']
|
||||
Item.filter_keys = []
|
||||
|
||||
class Find(db.Model):
|
||||
id = db.Column(db.Integer(), primary_key=True)
|
||||
item_id = db.Column(db.String(32), db.ForeignKey('item.id'))
|
||||
item = db.relationship('Item', backref=db.backref('find', lazy='dynamic'))
|
||||
key = db.Column(db.String(200), index=True)
|
||||
value = db.Column(db.Text())
|
||||
|
||||
def __repr__(self):
|
||||
return (u'%s=%s' % (self.key, self.value)).encode('utf-8')
|
||||
|
||||
@classmethod
|
||||
def get(cls, item, key):
|
||||
return cls.query.filter_by(item_id=item, key=key).first()
|
||||
|
||||
@classmethod
|
||||
def get_or_create(cls, item, key):
|
||||
f = cls.get(item, key)
|
||||
if not f:
|
||||
f = cls(item_id=item, key=key)
|
||||
db.session.add(f)
|
||||
db.session.commit()
|
||||
return f
|
||||
|
||||
class File(db.Model):
|
||||
|
||||
created = db.Column(db.DateTime())
|
||||
modified = db.Column(db.DateTime())
|
||||
|
||||
sha1 = db.Column(db.String(32), primary_key=True)
|
||||
path = db.Column(db.String(2048))
|
||||
|
||||
info = db.Column(MutableDict.as_mutable(db.PickleType(pickler=json)))
|
||||
|
||||
item_id = db.Column(db.String(32), db.ForeignKey('item.id'))
|
||||
item = db.relationship('Item', backref=db.backref('files', lazy='dynamic'))
|
||||
|
||||
@classmethod
|
||||
def get(cls, sha1):
|
||||
return cls.query.filter_by(sha1=sha1).first()
|
||||
|
||||
@classmethod
|
||||
def get_or_create(cls, sha1, info=None, path=None):
|
||||
f = cls.get(sha1)
|
||||
if not f:
|
||||
f = cls(sha1=sha1)
|
||||
if info:
|
||||
f.info = info
|
||||
if path:
|
||||
f.path = path
|
||||
f.item_id = Item.get_or_create(id=sha1, info=info).id
|
||||
db.session.add(f)
|
||||
db.session.commit()
|
||||
return f
|
||||
|
||||
def __repr__(self):
|
||||
return self.sha1
|
||||
|
||||
def __init__(self, sha1):
|
||||
self.sha1 = sha1
|
||||
self.created = datetime.now()
|
||||
self.modified = datetime.now()
|
||||
42
oml/item/person.py
Normal file
42
oml/item/person.py
Normal file
|
|
@ -0,0 +1,42 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# vi:si:et:sw=4:sts=4:ts=4
|
||||
|
||||
import unicodedata
|
||||
|
||||
import ox
|
||||
|
||||
from settings import db
|
||||
|
||||
def get_sort_name(name, sortname=None):
|
||||
name = unicodedata.normalize('NFKD', name).strip()
|
||||
if name:
|
||||
person = Person.get(name)
|
||||
if not person:
|
||||
person = Person(name=name, sortname=sortname)
|
||||
person.save()
|
||||
sortname = unicodedata.normalize('NFKD', person.sortname)
|
||||
else:
|
||||
sortname = u''
|
||||
return sortname
|
||||
|
||||
class Person(db.Model):
|
||||
name = db.Column(db.String(1024), primary_key=True)
|
||||
sortname = db.Column(db.String())
|
||||
numberofnames = db.Column(db.Integer())
|
||||
|
||||
def __repr__(self):
|
||||
return self.name
|
||||
|
||||
@classmethod
|
||||
def get(cls, name):
|
||||
return cls.query.filter_by(name=name).first()
|
||||
|
||||
def save(self):
|
||||
if not self.sortname:
|
||||
self.sortname = ox.get_sort_name(self.name)
|
||||
self.sortname = unicodedata.normalize('NFKD', self.sortname)
|
||||
self.sortsortname = ox.sort_string(self.sortname)
|
||||
self.numberofnames = len(self.name.split(' '))
|
||||
db.session.add(self)
|
||||
db.session.commit()
|
||||
|
||||
83
oml/item/query.py
Normal file
83
oml/item/query.py
Normal file
|
|
@ -0,0 +1,83 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# vi:si:et:sw=4:sts=4:ts=4
|
||||
|
||||
import settings
|
||||
import models
|
||||
import utils
|
||||
import oxflask.query
|
||||
|
||||
from sqlalchemy.sql.expression import nullslast
|
||||
|
||||
def parse(data):
|
||||
query = {}
|
||||
query['range'] = [0, 100]
|
||||
query['sort'] = [{'key':'title', 'operator':'+'}]
|
||||
for key in ('keys', 'group', 'list', 'range', 'sort', 'query'):
|
||||
if key in data:
|
||||
query[key] = data[key]
|
||||
print data
|
||||
query['qs'] = oxflask.query.Parser(models.Item).find(data)
|
||||
if 'query' in query and 'conditions' in query['query'] and query['query']['conditions']:
|
||||
conditions = query['query']['conditions']
|
||||
condition = conditions[0]
|
||||
if condition['key'] == '*':
|
||||
value = condition['value'].lower()
|
||||
query['qs'] = models.Item.query.join(
|
||||
models.Find, models.Find.item_id==models.Item.id).filter(
|
||||
models.Find.value.contains(value))
|
||||
if 'group' in query:
|
||||
query['qs'] = order_by_group(query['qs'], query['sort'])
|
||||
else:
|
||||
query['qs'] = order(query['qs'], query['sort'])
|
||||
return query
|
||||
|
||||
def order(qs, sort, prefix='sort_'):
|
||||
order_by = []
|
||||
if len(sort) == 1:
|
||||
additional_sort = settings.config['user']['ui']['listSort']
|
||||
key = utils.get_by_id(models.Item.item_keys, sort[0]['key'])
|
||||
for s in key.get('additionalSort', additional_sort):
|
||||
if s['key'] not in [e['key'] for e in sort]:
|
||||
sort.append(s)
|
||||
for e in sort:
|
||||
operator = e['operator']
|
||||
if operator != '-':
|
||||
operator = ''
|
||||
else:
|
||||
operator = ' DESC'
|
||||
key = {}.get(e['key'], e['key'])
|
||||
if key not in ('fixme', ):
|
||||
key = "%s%s" % (prefix, key)
|
||||
order = '%s%s' % (key, operator)
|
||||
order_by.append(order)
|
||||
if order_by:
|
||||
#nulllast not supported in sqlite, use IS NULL hack instead
|
||||
#order_by = map(nullslast, order_by)
|
||||
_order_by = []
|
||||
for order in order_by:
|
||||
nulls = "%s IS NULL" % order.split(' ')[0]
|
||||
_order_by.append(nulls)
|
||||
_order_by.append(order)
|
||||
order_by = _order_by
|
||||
qs = qs.order_by(*order_by)
|
||||
return qs
|
||||
|
||||
def order_by_group(qs, sort):
|
||||
return qs
|
||||
if 'sort' in query:
|
||||
if len(query['sort']) == 1 and query['sort'][0]['key'] == 'items':
|
||||
order_by = query['sort'][0]['operator'] == '-' and '-items' or 'items'
|
||||
if query['group'] == "year":
|
||||
secondary = query['sort'][0]['operator'] == '-' and '-sortvalue' or 'sortvalue'
|
||||
order_by = (order_by, secondary)
|
||||
elif query['group'] != "keyword":
|
||||
order_by = (order_by, 'sortvalue')
|
||||
else:
|
||||
order_by = (order_by, 'value')
|
||||
else:
|
||||
order_by = query['sort'][0]['operator'] == '-' and '-sortvalue' or 'sortvalue'
|
||||
order_by = (order_by, 'items')
|
||||
else:
|
||||
order_by = ('-sortvalue', 'items')
|
||||
return order_by
|
||||
|
||||
182
oml/item/scan.py
Normal file
182
oml/item/scan.py
Normal file
|
|
@ -0,0 +1,182 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# vi:si:et:sw=4:sts=4:ts=4
|
||||
from __future__ import division
|
||||
|
||||
import os
|
||||
import shutil
|
||||
from datetime import datetime
|
||||
|
||||
import ox
|
||||
|
||||
from app import app
|
||||
import settings
|
||||
from settings import db
|
||||
from item.models import File
|
||||
from user.models import User
|
||||
|
||||
from changelog import Changelog
|
||||
|
||||
import media
|
||||
from websocket import trigger_event
|
||||
|
||||
def remove_missing():
|
||||
dirty = False
|
||||
with app.app_context():
|
||||
user = User.get_or_create(settings.USER_ID)
|
||||
prefs = settings.preferences
|
||||
prefix = os.path.join(os.path.expanduser(prefs['libraryPath']), 'Books/')
|
||||
for f in File.query:
|
||||
if not os.path.exists(f.item.get_path()):
|
||||
dirty = True
|
||||
print 'file gone', f, f.item.get_path()
|
||||
f.item.users.remove(user)
|
||||
if not f.item.users:
|
||||
print 'last user, remove'
|
||||
db.session.delete(f.item)
|
||||
else:
|
||||
f.item.update_lists()
|
||||
Changelog.record(user, 'removeitem', f.item.id)
|
||||
db.session.delete(f)
|
||||
if dirty:
|
||||
db.session.commit()
|
||||
|
||||
def run_scan():
|
||||
remove_missing()
|
||||
with app.app_context():
|
||||
prefs = settings.preferences
|
||||
prefix = os.path.join(os.path.expanduser(prefs['libraryPath']), 'Books/')
|
||||
if not prefix[-1] == '/':
|
||||
prefix += '/'
|
||||
user = User.get_or_create(settings.USER_ID)
|
||||
assert isinstance(prefix, unicode)
|
||||
extensions = ['pdf', 'epub', 'txt']
|
||||
books = []
|
||||
for root, folders, files in os.walk(prefix):
|
||||
for f in files:
|
||||
#if f.startswith('._') or f == '.DS_Store':
|
||||
if f.startswith('.'):
|
||||
continue
|
||||
f = os.path.join(root, f)
|
||||
ext = f.split('.')[-1]
|
||||
if ext in extensions:
|
||||
books.append(f)
|
||||
|
||||
trigger_event('scan', {
|
||||
'path': prefix,
|
||||
'files': len(books)
|
||||
})
|
||||
position = 0
|
||||
added = 0
|
||||
for f in ox.sorted_strings(books):
|
||||
position += 1
|
||||
id = media.get_id(f)
|
||||
file = File.get(id)
|
||||
path = f[len(prefix):]
|
||||
if not file:
|
||||
data = media.metadata(f)
|
||||
ext = f.split('.')[-1]
|
||||
data['extension'] = ext
|
||||
data['size'] = os.stat(f).st_size
|
||||
file = File.get_or_create(id, data, path)
|
||||
item = file.item
|
||||
if 'mainid' in file.info:
|
||||
del file.info['mainid']
|
||||
db.session.add(file)
|
||||
if 'mainid' in item.info:
|
||||
item.meta['mainid'] = item.info.pop('mainid')
|
||||
item.meta[item.meta['mainid']] = item.info[item.meta['mainid']]
|
||||
db.session.add(item)
|
||||
item.users.append(user)
|
||||
Changelog.record(user, 'additem', item.id, item.info)
|
||||
if item.meta.get('mainid'):
|
||||
Changelog.record(user, 'edititem', item.id, {
|
||||
item.meta['mainid']: item.meta[item.meta['mainid']]
|
||||
})
|
||||
item.added = datetime.now()
|
||||
item.scrape()
|
||||
added += 1
|
||||
trigger_event('scan', {
|
||||
'position': position,
|
||||
'length': len(books),
|
||||
'path': path,
|
||||
'progress': position/len(books),
|
||||
'added': added,
|
||||
})
|
||||
trigger_event('scan', {
|
||||
'progress': 1,
|
||||
'added': added,
|
||||
'done': True
|
||||
})
|
||||
|
||||
def run_import():
|
||||
with app.app_context():
|
||||
prefs = settings.preferences
|
||||
prefix = os.path.expanduser(prefs['importPath'])
|
||||
prefix_books = os.path.join(os.path.expanduser(prefs['libraryPath']), 'Books/')
|
||||
prefix_imported = os.path.join(prefix_books, 'Imported/')
|
||||
if not prefix[-1] == '/':
|
||||
prefix += '/'
|
||||
user = User.get_or_create(settings.USER_ID)
|
||||
assert isinstance(prefix, unicode)
|
||||
extensions = ['pdf', 'epub', 'txt']
|
||||
books = []
|
||||
for root, folders, files in os.walk(prefix):
|
||||
for f in files:
|
||||
#if f.startswith('._') or f == '.DS_Store':
|
||||
if f.startswith('.'):
|
||||
continue
|
||||
f = os.path.join(root, f)
|
||||
ext = f.split('.')[-1]
|
||||
if ext in extensions:
|
||||
books.append(f)
|
||||
|
||||
trigger_event('import', {
|
||||
'path': prefix,
|
||||
'files': len(books)
|
||||
})
|
||||
position = 0
|
||||
added = 0
|
||||
for f in ox.sorted_strings(books):
|
||||
position += 1
|
||||
id = media.get_id(f)
|
||||
file = File.get(id)
|
||||
path = f[len(prefix):]
|
||||
if not file:
|
||||
f_import = f
|
||||
f = f.replace(prefix, prefix_imported)
|
||||
ox.makedirs(os.path.dirname(f))
|
||||
shutil.move(f_import, f)
|
||||
path = f[len(prefix_books):]
|
||||
data = media.metadata(f)
|
||||
ext = f.split('.')[-1]
|
||||
data['extension'] = ext
|
||||
data['size'] = os.stat(f).st_size
|
||||
file = File.get_or_create(id, data, path)
|
||||
item = file.item
|
||||
if 'mainid' in file.info:
|
||||
del file.info['mainid']
|
||||
db.session.add(file)
|
||||
if 'mainid' in item.info:
|
||||
item.meta['mainid'] = item.info.pop('mainid')
|
||||
item.meta[item.meta['mainid']] = item.info[item.meta['mainid']]
|
||||
db.session.add(item)
|
||||
item.users.append(user)
|
||||
Changelog.record(user, 'additem', item.id, item.info)
|
||||
if item.meta.get('mainid'):
|
||||
Changelog.record(user, 'edititem', item.id, {
|
||||
item.meta['mainid']: item.meta[item.meta['mainid']]
|
||||
})
|
||||
item.scrape()
|
||||
added += 1
|
||||
trigger_event('import', {
|
||||
'position': position,
|
||||
'length': len(books),
|
||||
'path': path,
|
||||
'progress': position/len(books),
|
||||
'added': added,
|
||||
})
|
||||
trigger_event('import', {
|
||||
'progress': 1,
|
||||
'added': added,
|
||||
'done': True
|
||||
})
|
||||
101
oml/item/views.py
Normal file
101
oml/item/views.py
Normal file
|
|
@ -0,0 +1,101 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# vi:si:et:sw=4:sts=4:ts=4
|
||||
|
||||
import os
|
||||
from datetime import datetime
|
||||
import zipfile
|
||||
import mimetypes
|
||||
from StringIO import StringIO
|
||||
import Image
|
||||
|
||||
from flask import Blueprint
|
||||
from flask import json, request, make_response, abort, send_file
|
||||
from covers import covers
|
||||
|
||||
import settings
|
||||
|
||||
from models import Item, db
|
||||
|
||||
from utils import resize_image
|
||||
|
||||
app = Blueprint('item', __name__, static_folder=settings.static_path)
|
||||
|
||||
@app.route('/<string:id>/epub/')
|
||||
@app.route('/<string:id>/epub/<path:filename>')
|
||||
def epub(id, filename=''):
|
||||
item = Item.get(id)
|
||||
if not item or item.info['extension'] != 'epub':
|
||||
abort(404)
|
||||
|
||||
path = item.get_path()
|
||||
z = zipfile.ZipFile(path)
|
||||
if filename == '':
|
||||
return '<br>\n'.join([f.filename for f in z.filelist])
|
||||
if filename not in [f.filename for f in z.filelist]:
|
||||
abort(404)
|
||||
resp = make_response(z.read(filename))
|
||||
resp.content_type = {
|
||||
'xpgt': 'application/vnd.adobe-page-template+xml'
|
||||
}.get(filename.split('.')[0], mimetypes.guess_type(filename)[0]) or 'text/plain'
|
||||
return resp
|
||||
|
||||
@app.route('/<string:id>/get')
|
||||
@app.route('/<string:id>/txt/')
|
||||
@app.route('/<string:id>/pdf')
|
||||
def get(id):
|
||||
item = Item.get(id)
|
||||
if not item:
|
||||
abort(404)
|
||||
path = item.get_path()
|
||||
mimetype={
|
||||
'epub': 'application/epub+zip',
|
||||
'pdf': 'application/pdf',
|
||||
}.get(path.split('.')[-1], None)
|
||||
return send_file(path, mimetype=mimetype)
|
||||
|
||||
@app.route('/<string:id>/cover.jpg')
|
||||
@app.route('/<string:id>/cover<int:size>.jpg')
|
||||
def cover(id, size=None):
|
||||
item = Item.get(id)
|
||||
if not item:
|
||||
abort(404)
|
||||
data = None
|
||||
if size:
|
||||
data = covers['%s:%s' % (id, size)]
|
||||
if data:
|
||||
size = None
|
||||
if not data:
|
||||
data = covers[id]
|
||||
if not data:
|
||||
print 'check for cover', id
|
||||
data = item.update_cover()
|
||||
if not data:
|
||||
data = covers.black()
|
||||
if size:
|
||||
data = covers['%s:%s' % (id, size)] = resize_image(data, size=size)
|
||||
data = str(data)
|
||||
if not 'coverRatio' in item.meta:
|
||||
#img = Image.open(StringIO(str(covers[id])))
|
||||
img = Image.open(StringIO(data))
|
||||
item.meta['coverRatio'] = float(img.size[0])/img.size[1]
|
||||
db.session.add(item)
|
||||
db.session.commit()
|
||||
resp = make_response(data)
|
||||
resp.content_type = "image/jpeg"
|
||||
return resp
|
||||
|
||||
@app.route('/<string:id>/reader/')
|
||||
def reader(id, filename=''):
|
||||
item = Item.get(id)
|
||||
if item.info['extension'] == 'epub':
|
||||
html = 'html/epub.html'
|
||||
elif item.info['extension'] == 'pdf':
|
||||
html = 'html/pdf.html'
|
||||
elif item.info['extension'] == 'txt':
|
||||
html = 'html/txt.html'
|
||||
else:
|
||||
abort(404)
|
||||
item.sort_accessed = item.accessed = datetime.now()
|
||||
item.sort_timesaccessed = item.timesaccessed = (item.timesaccessed or 0) + 1
|
||||
item.save()
|
||||
return app.send_static_file(html)
|
||||
Loading…
Add table
Add a link
Reference in a new issue