2014-05-04 17:26:43 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
# vi:si:et:sw=4:sts=4:ts=4
|
|
|
|
from datetime import datetime
|
2014-08-12 08:16:57 +00:00
|
|
|
import base64
|
|
|
|
import hashlib
|
|
|
|
import os
|
|
|
|
import re
|
2014-05-17 11:45:57 +00:00
|
|
|
import shutil
|
2016-01-14 06:59:55 +00:00
|
|
|
import stat
|
2014-05-27 11:06:39 +00:00
|
|
|
import unicodedata
|
2014-05-04 17:26:43 +00:00
|
|
|
|
|
|
|
import ox
|
2016-01-16 15:57:15 +00:00
|
|
|
from sqlalchemy.schema import CreateTable
|
2014-08-09 15:03:16 +00:00
|
|
|
import sqlalchemy as sa
|
2014-05-21 22:41:29 +00:00
|
|
|
|
2014-08-12 08:16:57 +00:00
|
|
|
from changelog import Changelog
|
|
|
|
from db import MutableDict
|
2014-09-02 22:32:44 +00:00
|
|
|
import json_pickler
|
|
|
|
from .icons import icons
|
2016-01-14 13:09:56 +00:00
|
|
|
from .person import get_sort_name, Person
|
2016-01-10 09:10:38 +00:00
|
|
|
from queryparser import Parser
|
2014-08-12 08:16:57 +00:00
|
|
|
from settings import config
|
2016-01-16 05:17:52 +00:00
|
|
|
from utils import remove_empty_folders, get_ratio
|
2014-08-12 08:16:57 +00:00
|
|
|
from websocket import trigger_event
|
|
|
|
import db
|
2014-05-04 17:26:43 +00:00
|
|
|
import media
|
2016-01-07 10:12:48 +00:00
|
|
|
#import metaremote as meta
|
|
|
|
import meta
|
2014-08-12 08:16:57 +00:00
|
|
|
import settings
|
2014-05-12 23:43:27 +00:00
|
|
|
import state
|
2014-05-04 17:26:43 +00:00
|
|
|
import utils
|
|
|
|
|
2014-08-12 08:16:57 +00:00
|
|
|
import logging
|
2015-11-29 14:56:38 +00:00
|
|
|
logger = logging.getLogger(__name__)
|
2014-05-17 14:26:59 +00:00
|
|
|
|
2014-08-09 16:14:14 +00:00
|
|
|
user_items = sa.Table('useritem', db.metadata,
|
2014-08-09 15:03:16 +00:00
|
|
|
sa.Column('user_id', sa.String(43), sa.ForeignKey('user.id')),
|
|
|
|
sa.Column('item_id', sa.String(32), sa.ForeignKey('item.id'))
|
2014-05-04 17:26:43 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
class Item(db.Model):
|
2014-08-09 15:03:16 +00:00
|
|
|
__tablename__ = 'item'
|
2014-05-04 17:26:43 +00:00
|
|
|
|
2014-08-09 15:03:16 +00:00
|
|
|
created = sa.Column(sa.DateTime())
|
|
|
|
modified = sa.Column(sa.DateTime())
|
2014-05-04 17:26:43 +00:00
|
|
|
|
2014-08-09 15:03:16 +00:00
|
|
|
id = sa.Column(sa.String(32), primary_key=True)
|
2014-05-04 17:26:43 +00:00
|
|
|
|
2014-09-02 22:32:44 +00:00
|
|
|
info = sa.Column(MutableDict.as_mutable(sa.PickleType(pickler=json_pickler)))
|
|
|
|
meta = sa.Column(MutableDict.as_mutable(sa.PickleType(pickler=json_pickler)))
|
2014-05-04 17:26:43 +00:00
|
|
|
|
2014-05-21 22:41:29 +00:00
|
|
|
# why is this in db and not in i.e. info?
|
2014-08-09 15:03:16 +00:00
|
|
|
added = sa.Column(sa.DateTime()) # added to local library
|
|
|
|
accessed = sa.Column(sa.DateTime())
|
|
|
|
timesaccessed = sa.Column(sa.Integer())
|
2014-05-04 17:26:43 +00:00
|
|
|
|
2014-08-09 15:03:16 +00:00
|
|
|
users = sa.orm.relationship('User', secondary=user_items,
|
|
|
|
backref=sa.orm.backref('items', lazy='dynamic'))
|
2014-05-04 17:26:43 +00:00
|
|
|
|
|
|
|
@property
|
|
|
|
def timestamp(self):
|
2014-05-21 00:02:21 +00:00
|
|
|
return utils.datetime2ts(self.modified)
|
2014-05-04 17:26:43 +00:00
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return self.id
|
|
|
|
|
|
|
|
def __init__(self, id):
|
|
|
|
if isinstance(id, list):
|
|
|
|
id = base64.b32encode(hashlib.sha1(''.join(id)).digest())
|
|
|
|
self.id = id
|
2014-05-20 00:43:54 +00:00
|
|
|
self.created = datetime.utcnow()
|
|
|
|
self.modified = datetime.utcnow()
|
2014-05-04 17:26:43 +00:00
|
|
|
self.info = {}
|
|
|
|
self.meta = {}
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def get(cls, id):
|
|
|
|
if isinstance(id, list):
|
|
|
|
id = base64.b32encode(hashlib.sha1(''.join(id)).digest())
|
|
|
|
return cls.query.filter_by(id=id).first()
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def get_or_create(cls, id, info=None):
|
|
|
|
if isinstance(id, list):
|
|
|
|
id = base64.b32encode(hashlib.sha1(''.join(id)).digest())
|
|
|
|
item = cls.query.filter_by(id=id).first()
|
|
|
|
if not item:
|
|
|
|
item = cls(id=id)
|
|
|
|
if info:
|
|
|
|
item.info = info
|
2014-08-09 16:14:14 +00:00
|
|
|
state.db.session.add(item)
|
|
|
|
state.db.session.commit()
|
2014-05-04 17:26:43 +00:00
|
|
|
return item
|
|
|
|
|
2016-01-10 09:10:38 +00:00
|
|
|
@classmethod
|
|
|
|
def find(cls, data):
|
|
|
|
return Parser(cls, user_items, Find, Sort).find(data)
|
|
|
|
|
2016-01-06 18:06:48 +00:00
|
|
|
def add_user(self, user):
|
2016-01-10 06:13:03 +00:00
|
|
|
if not user in self.users:
|
|
|
|
self.users.append(user)
|
2016-01-06 18:06:48 +00:00
|
|
|
l = user.library
|
2016-01-10 06:13:03 +00:00
|
|
|
if not self in l.items:
|
|
|
|
l.items.append(self)
|
|
|
|
state.db.session.add(l)
|
2016-01-06 18:06:48 +00:00
|
|
|
|
2014-05-04 17:26:43 +00:00
|
|
|
def json(self, keys=None):
|
|
|
|
j = {}
|
|
|
|
j['id'] = self.id
|
|
|
|
j['created'] = self.created
|
|
|
|
j['modified'] = self.modified
|
|
|
|
j['timesaccessed'] = self.timesaccessed
|
|
|
|
j['accessed'] = self.accessed
|
|
|
|
j['added'] = self.added
|
2014-05-21 22:41:29 +00:00
|
|
|
t = Transfer.get(self.id)
|
|
|
|
if t:
|
|
|
|
j['transferadded'] = t.added
|
|
|
|
j['transferprogress'] = t.progress
|
2016-01-07 06:06:34 +00:00
|
|
|
|
|
|
|
# unused and slow
|
|
|
|
#j['users'] = list(map(str, list(self.users)))
|
2014-05-04 17:26:43 +00:00
|
|
|
|
|
|
|
if self.info:
|
2016-01-16 05:56:03 +00:00
|
|
|
meta_keys = [k for k in self.meta_keys if k != 'pages']
|
2016-01-11 13:43:54 +00:00
|
|
|
for key in self.info:
|
2016-01-16 05:56:03 +00:00
|
|
|
if (not keys or key in keys) and key not in meta_keys:
|
2016-01-11 13:43:54 +00:00
|
|
|
j[key] = self.info[key]
|
|
|
|
|
2014-05-04 17:26:43 +00:00
|
|
|
if self.meta:
|
|
|
|
j.update(self.meta)
|
|
|
|
|
2016-01-11 13:43:54 +00:00
|
|
|
for key in self.id_keys:
|
2014-05-04 17:26:43 +00:00
|
|
|
if key not in self.meta and key in j:
|
|
|
|
del j[key]
|
|
|
|
if keys:
|
2016-01-04 10:25:18 +00:00
|
|
|
for k in list(j):
|
2014-05-04 17:26:43 +00:00
|
|
|
if k not in keys:
|
|
|
|
del j[k]
|
2016-01-05 14:43:00 +00:00
|
|
|
for key in [k['id'] for k in settings.config['itemKeys'] if isinstance(k['type'], list)]:
|
|
|
|
if key in j and not isinstance(j[key], list):
|
|
|
|
j[key] = [j[key]]
|
2014-05-04 17:26:43 +00:00
|
|
|
return j
|
|
|
|
|
|
|
|
def get_path(self):
|
|
|
|
f = self.files.first()
|
2014-05-17 11:45:57 +00:00
|
|
|
return f.fullpath() if f else None
|
2014-05-04 17:26:43 +00:00
|
|
|
|
|
|
|
def update_sort(self):
|
2015-04-21 17:58:32 +00:00
|
|
|
update = False
|
2014-05-21 22:41:29 +00:00
|
|
|
s = Sort.get_or_create(self.id)
|
2014-05-04 17:26:43 +00:00
|
|
|
for key in config['itemKeys']:
|
|
|
|
if key.get('sort'):
|
|
|
|
value = self.json().get(key['id'], None)
|
|
|
|
sort_type = key.get('sortType', key['type'])
|
|
|
|
if value:
|
|
|
|
if sort_type == 'integer':
|
2015-04-20 07:49:35 +00:00
|
|
|
if isinstance(value, str):
|
|
|
|
value = int(re.sub('[^0-9]', '', value))
|
|
|
|
else:
|
|
|
|
value = int(value)
|
2014-05-04 17:26:43 +00:00
|
|
|
elif sort_type == 'float':
|
|
|
|
value = float(value)
|
|
|
|
elif sort_type == 'date':
|
|
|
|
pass
|
2015-03-08 12:48:22 +00:00
|
|
|
elif sort_type == 'person':
|
2014-05-04 17:26:43 +00:00
|
|
|
if not isinstance(value, list):
|
|
|
|
value = [value]
|
2014-09-02 22:32:44 +00:00
|
|
|
value = list(map(get_sort_name, value))
|
2015-03-08 12:48:22 +00:00
|
|
|
value = ox.sort_string('\n'.join(value)).lower()
|
2014-05-04 17:26:43 +00:00
|
|
|
elif sort_type == 'title':
|
2014-05-26 23:45:29 +00:00
|
|
|
if isinstance(value, dict):
|
2014-09-02 22:32:44 +00:00
|
|
|
value = list(value.values())
|
2014-05-26 23:45:29 +00:00
|
|
|
if isinstance(value, list):
|
2014-09-02 22:32:44 +00:00
|
|
|
value = ''.join(value)
|
2014-10-04 18:57:09 +00:00
|
|
|
value = ox.get_sort_title(value)
|
2014-05-04 17:26:43 +00:00
|
|
|
value = utils.sort_title(value).lower()
|
|
|
|
else:
|
|
|
|
if isinstance(value, list):
|
2014-09-02 22:32:44 +00:00
|
|
|
value = '\n'.join(value)
|
2014-05-04 17:26:43 +00:00
|
|
|
if value:
|
2014-09-02 22:32:44 +00:00
|
|
|
value = str(value)
|
2014-05-04 17:26:43 +00:00
|
|
|
value = ox.sort_string(value).lower()
|
2014-05-16 08:06:11 +00:00
|
|
|
elif isinstance(value, list): #empty list
|
2016-01-16 05:47:55 +00:00
|
|
|
value = None
|
2015-04-21 17:58:32 +00:00
|
|
|
if getattr(s, key['id']) != value:
|
|
|
|
setattr(s, key['id'], value)
|
|
|
|
update = True
|
|
|
|
if update:
|
|
|
|
state.db.session.add(s)
|
2014-05-04 17:26:43 +00:00
|
|
|
|
|
|
|
def update_find(self):
|
2014-05-14 09:57:11 +00:00
|
|
|
|
|
|
|
def add(k, v):
|
2015-04-21 17:58:32 +00:00
|
|
|
f = Find.query.filter_by(item_id=self.id, key=k, value=v).first()
|
|
|
|
if not f:
|
|
|
|
f = Find(item_id=self.id, key=k)
|
|
|
|
if f.value != v:
|
|
|
|
f.findvalue = unicodedata.normalize('NFKD', v).lower()
|
|
|
|
f.value = v
|
|
|
|
state.db.session.add(f)
|
|
|
|
|
|
|
|
keys = []
|
2014-05-04 17:26:43 +00:00
|
|
|
for key in config['itemKeys']:
|
2014-05-21 00:02:21 +00:00
|
|
|
if key.get('find') or key.get('filter') or key.get('type') in [['string'], 'string']:
|
2014-05-04 17:26:43 +00:00
|
|
|
value = self.json().get(key['id'], None)
|
|
|
|
if key.get('filterMap') and value:
|
2014-05-12 23:43:27 +00:00
|
|
|
value = re.compile(key.get('filterMap')).findall(value)
|
|
|
|
if value: value = value[0]
|
2014-05-04 17:26:43 +00:00
|
|
|
if value:
|
2015-04-21 17:58:32 +00:00
|
|
|
keys.append(key['id'])
|
2014-05-26 23:45:29 +00:00
|
|
|
if isinstance(value, dict):
|
2014-09-02 22:32:44 +00:00
|
|
|
value = ' '.join(list(value.values()))
|
2014-05-14 09:57:11 +00:00
|
|
|
if not isinstance(value, list):
|
|
|
|
value = [value]
|
2015-04-21 17:58:32 +00:00
|
|
|
value = [
|
|
|
|
v.decode('utf-8') if isinstance(v, bytes) else v
|
|
|
|
for v in value
|
|
|
|
]
|
2014-05-14 09:57:11 +00:00
|
|
|
for v in value:
|
|
|
|
add(key['id'], v)
|
2015-04-21 17:58:32 +00:00
|
|
|
for f in Find.query.filter_by(item_id=self.id,
|
|
|
|
key=key['id']).filter(Find.value.notin_(value)):
|
2014-08-09 16:14:14 +00:00
|
|
|
state.db.session.delete(f)
|
2015-04-21 17:58:32 +00:00
|
|
|
for f in Find.query.filter_by(item_id=self.id).filter(Find.key.notin_(keys)):
|
|
|
|
state.db.session.delete(f)
|
2014-05-04 17:26:43 +00:00
|
|
|
|
2016-01-11 13:43:54 +00:00
|
|
|
def update(self, modified=None):
|
2014-05-21 00:02:21 +00:00
|
|
|
for key in ('mediastate', 'coverRatio', 'previewRatio'):
|
2014-05-19 20:58:00 +00:00
|
|
|
if key in self.meta:
|
|
|
|
if key not in self.info:
|
|
|
|
self.info[key] = self.meta[key]
|
|
|
|
del self.meta[key]
|
2014-09-02 22:32:44 +00:00
|
|
|
users = list(map(str, list(self.users)))
|
2014-05-19 20:58:00 +00:00
|
|
|
self.info['mediastate'] = 'available' # available, unavailable, transferring
|
2014-05-21 22:41:29 +00:00
|
|
|
t = Transfer.get(self.id)
|
|
|
|
if t and t.added and t.progress < 1:
|
2014-05-19 20:58:00 +00:00
|
|
|
self.info['mediastate'] = 'transferring'
|
2014-05-04 17:26:43 +00:00
|
|
|
else:
|
2014-05-19 20:58:00 +00:00
|
|
|
self.info['mediastate'] = 'available' if settings.USER_ID in users else 'unavailable'
|
2016-01-11 13:43:54 +00:00
|
|
|
if modified:
|
|
|
|
self.modified = modified
|
|
|
|
else:
|
|
|
|
self.modified = datetime.utcnow()
|
2014-05-04 17:26:43 +00:00
|
|
|
self.update_sort()
|
|
|
|
self.update_find()
|
|
|
|
self.save()
|
|
|
|
|
|
|
|
def save(self):
|
2014-08-09 16:14:14 +00:00
|
|
|
state.db.session.add(self)
|
|
|
|
state.db.session.commit()
|
2014-05-04 17:26:43 +00:00
|
|
|
|
2014-05-22 14:20:40 +00:00
|
|
|
def delete(self, commit=True):
|
|
|
|
Sort.query.filter_by(item_id=self.id).delete()
|
|
|
|
Transfer.query.filter_by(item_id=self.id).delete()
|
2015-11-16 15:35:42 +00:00
|
|
|
Scrape.query.filter_by(item_id=self.id).delete()
|
2014-08-11 18:10:07 +00:00
|
|
|
state.db.session.delete(self)
|
2016-01-16 10:57:52 +00:00
|
|
|
icons.clear('cover:%s' % self.id)
|
|
|
|
icons.clear('preview:%s' % self.id)
|
2014-05-22 14:20:40 +00:00
|
|
|
if commit:
|
2014-08-09 16:14:14 +00:00
|
|
|
state.db.session.commit()
|
2014-05-22 14:20:40 +00:00
|
|
|
|
2016-01-03 18:03:19 +00:00
|
|
|
meta_keys = (
|
2016-01-05 16:57:59 +00:00
|
|
|
'author',
|
2016-01-11 13:43:54 +00:00
|
|
|
'categories',
|
|
|
|
'cover',
|
2016-01-05 16:57:59 +00:00
|
|
|
'date',
|
|
|
|
'description',
|
|
|
|
'edition',
|
2016-01-11 13:43:54 +00:00
|
|
|
'isbn',
|
2016-01-05 16:57:59 +00:00
|
|
|
'language',
|
|
|
|
'pages',
|
|
|
|
'place',
|
|
|
|
'publisher',
|
2016-01-11 13:43:54 +00:00
|
|
|
'series',
|
|
|
|
'tableofcontents',
|
2016-01-05 16:57:59 +00:00
|
|
|
'title'
|
2016-01-03 18:03:19 +00:00
|
|
|
)
|
2014-05-21 00:02:21 +00:00
|
|
|
|
2016-01-13 09:58:06 +00:00
|
|
|
def update_meta(self, data, modified=None, reset_from=False):
|
2014-05-21 00:02:21 +00:00
|
|
|
update = False
|
|
|
|
record = {}
|
|
|
|
for key in self.meta_keys:
|
|
|
|
if key in data:
|
2014-05-25 18:06:12 +00:00
|
|
|
if self.meta.get(key) != data[key]:
|
|
|
|
record[key] = data[key]
|
2015-12-01 13:21:58 +00:00
|
|
|
self.meta[key] = data[key]
|
|
|
|
update = True
|
2016-01-04 10:25:18 +00:00
|
|
|
for key in list(self.meta):
|
2014-05-21 00:02:21 +00:00
|
|
|
if key not in self.meta_keys:
|
|
|
|
del self.meta[key]
|
|
|
|
update = True
|
2016-01-13 09:58:06 +00:00
|
|
|
if reset_from and '_from' in self.info:
|
|
|
|
del self.info['_from']
|
|
|
|
update = True
|
2014-05-21 00:02:21 +00:00
|
|
|
if update:
|
2016-01-11 13:43:54 +00:00
|
|
|
self.update(modified)
|
2014-05-19 22:59:02 +00:00
|
|
|
self.save()
|
2016-01-11 13:43:54 +00:00
|
|
|
if 'cover' in record:
|
|
|
|
self.update_icons()
|
2014-05-19 22:59:02 +00:00
|
|
|
user = state.user()
|
2014-05-25 18:06:12 +00:00
|
|
|
if record and user in self.users:
|
2016-01-08 21:00:31 +00:00
|
|
|
Changelog.record_ts(user, modified, 'edititem', self.id, record)
|
2014-05-19 20:58:00 +00:00
|
|
|
|
2016-01-13 09:58:06 +00:00
|
|
|
def edit(self, data, modified=None, reset_from=False):
|
2016-01-05 16:16:50 +00:00
|
|
|
Scrape.query.filter_by(item_id=self.id).delete()
|
2016-01-13 09:58:06 +00:00
|
|
|
self.update_meta(data, modified, reset_from=reset_from)
|
2015-12-01 11:09:50 +00:00
|
|
|
for f in self.files.all():
|
|
|
|
f.move()
|
2014-05-21 00:02:21 +00:00
|
|
|
|
|
|
|
def extract_preview(self):
|
2014-05-04 17:26:43 +00:00
|
|
|
path = self.get_path()
|
2014-05-18 23:24:04 +00:00
|
|
|
if path:
|
|
|
|
return getattr(media, self.info['extension']).cover(path)
|
2014-05-04 17:26:43 +00:00
|
|
|
|
2016-01-16 05:17:52 +00:00
|
|
|
def update_cover(self):
|
2014-05-21 00:02:21 +00:00
|
|
|
key = 'cover:%s'%self.id
|
2014-05-04 17:26:43 +00:00
|
|
|
cover = None
|
2014-05-16 08:06:11 +00:00
|
|
|
if 'cover' in self.meta and self.meta['cover']:
|
2015-12-11 00:43:56 +00:00
|
|
|
try:
|
|
|
|
cover = ox.cache.read_url(self.meta['cover'])
|
|
|
|
except:
|
|
|
|
logger.debug('unable to read cover url %s', self.meta['cover'])
|
|
|
|
cover = None
|
2014-05-04 17:26:43 +00:00
|
|
|
if cover:
|
2014-05-21 00:02:21 +00:00
|
|
|
icons[key] = cover
|
|
|
|
self.info['coverRatio'] = get_ratio(cover)
|
2014-05-16 08:06:11 +00:00
|
|
|
else:
|
2016-01-10 07:26:46 +00:00
|
|
|
del icons[key]
|
2016-01-16 05:17:52 +00:00
|
|
|
if not cover:
|
|
|
|
if 'previewRatio' in self.info:
|
|
|
|
self.info['coverRatio'] = self.info['previewRatio']
|
|
|
|
elif 'coverRatio' in self.info:
|
|
|
|
del self.info['coverRatio']
|
2016-01-16 10:57:52 +00:00
|
|
|
icons.clear('cover:%s:' % self.id)
|
2016-01-16 05:17:52 +00:00
|
|
|
|
|
|
|
def update_preview(self):
|
2014-05-21 00:02:21 +00:00
|
|
|
key = 'preview:%s'%self.id
|
2016-01-10 07:26:46 +00:00
|
|
|
preview = self.extract_preview()
|
|
|
|
if preview:
|
|
|
|
icons[key] = preview
|
|
|
|
self.info['previewRatio'] = get_ratio(preview)
|
2016-01-16 05:17:52 +00:00
|
|
|
if not 'coverRatio' in self.info:
|
2016-01-10 07:26:46 +00:00
|
|
|
self.info['coverRatio'] = self.info['previewRatio']
|
|
|
|
else:
|
|
|
|
del icons[key]
|
|
|
|
if 'previewRatio' in self.info:
|
|
|
|
del self.info['previewRatio']
|
2016-01-16 05:17:52 +00:00
|
|
|
if not preview:
|
|
|
|
if 'coverRatio' in self.info:
|
|
|
|
self.info['previewRatio'] = self.info['coverRatio']
|
|
|
|
elif 'previewRatio' in self.info:
|
|
|
|
del self.info['previewRatio']
|
2016-01-16 10:57:52 +00:00
|
|
|
icons.clear('preview:%s:' % self.id)
|
2016-01-16 05:17:52 +00:00
|
|
|
|
|
|
|
def update_icons(self):
|
|
|
|
self.update_cover()
|
|
|
|
self.update_preview()
|
2014-05-04 17:26:43 +00:00
|
|
|
|
2016-01-11 13:43:54 +00:00
|
|
|
def load_metadata(self):
|
|
|
|
'''
|
|
|
|
load metadata from user_metadata or get via isbn?
|
|
|
|
'''
|
|
|
|
for key in self.meta_keys:
|
2016-01-12 07:32:39 +00:00
|
|
|
if key in self.info:
|
|
|
|
if key not in self.meta:
|
|
|
|
self.meta[key] = self.info[key]
|
2016-01-16 05:56:03 +00:00
|
|
|
if key != 'pages':
|
|
|
|
del self.info[key]
|
2016-01-11 14:29:28 +00:00
|
|
|
|
2016-01-11 13:43:54 +00:00
|
|
|
#FIXME get from user_meta
|
|
|
|
if state.online:
|
|
|
|
if 'isbn' in self.meta:
|
|
|
|
data = meta.lookup('isbn', self.meta['isbn'])
|
|
|
|
if data:
|
|
|
|
self.meta.update(data)
|
2014-05-04 17:26:43 +00:00
|
|
|
|
2014-05-12 23:43:27 +00:00
|
|
|
def queue_download(self):
|
|
|
|
u = state.user()
|
|
|
|
if not u in self.users:
|
2014-05-24 21:21:03 +00:00
|
|
|
t = Transfer.get_or_create(self.id)
|
2014-05-21 00:02:21 +00:00
|
|
|
logger.debug('queue %s for download', self.id)
|
2016-01-06 18:06:48 +00:00
|
|
|
self.add_user(u)
|
2014-05-12 23:43:27 +00:00
|
|
|
|
2014-05-04 17:26:43 +00:00
|
|
|
def save_file(self, content):
|
2014-05-12 23:43:27 +00:00
|
|
|
u = state.user()
|
2014-05-04 17:26:43 +00:00
|
|
|
f = File.get(self.id)
|
2014-05-12 23:43:27 +00:00
|
|
|
content_id = media.get_id(data=content)
|
|
|
|
if content_id != self.id:
|
2014-05-17 14:26:59 +00:00
|
|
|
logger.debug('INVALID CONTENT %s vs %s', self.id, content_id)
|
2014-05-12 23:43:27 +00:00
|
|
|
return False
|
2014-05-04 17:26:43 +00:00
|
|
|
if not f:
|
|
|
|
path = 'Downloads/%s.%s' % (self.id, self.info['extension'])
|
2014-05-25 18:06:12 +00:00
|
|
|
info = self.info.copy()
|
|
|
|
for key in ('mediastate', 'coverRatio', 'previewRatio'):
|
|
|
|
if key in info:
|
|
|
|
del info[key]
|
|
|
|
f = File.get_or_create(self.id, info, path=path)
|
2014-05-04 17:26:43 +00:00
|
|
|
path = self.get_path()
|
|
|
|
if not os.path.exists(path):
|
|
|
|
ox.makedirs(os.path.dirname(path))
|
|
|
|
with open(path, 'wb') as fd:
|
|
|
|
fd.write(content)
|
2016-01-12 05:39:21 +00:00
|
|
|
f.info = media.metadata(path)
|
|
|
|
f.save()
|
|
|
|
for key in ('tableofcontents', ):
|
|
|
|
if key not in self.meta and key in f.info:
|
|
|
|
self.meta[key] = f.info[key]
|
2014-05-12 23:43:27 +00:00
|
|
|
if u not in self.users:
|
2016-01-06 18:06:48 +00:00
|
|
|
self.add_user(u)
|
2014-05-21 22:41:29 +00:00
|
|
|
t = Transfer.get_or_create(self.id)
|
|
|
|
t.progress = 1
|
|
|
|
t.save()
|
2014-05-20 00:43:54 +00:00
|
|
|
self.added = datetime.utcnow()
|
2014-05-25 18:06:12 +00:00
|
|
|
Changelog.record(u, 'additem', self.id, f.info)
|
2016-01-13 16:16:53 +00:00
|
|
|
Changelog.record(u, 'edititem', self.id, self.meta)
|
2014-05-04 17:26:43 +00:00
|
|
|
self.update()
|
2014-05-17 14:26:59 +00:00
|
|
|
f.move()
|
2014-05-21 00:02:21 +00:00
|
|
|
self.update_icons()
|
2014-05-24 21:50:22 +00:00
|
|
|
self.save()
|
2014-05-04 17:26:43 +00:00
|
|
|
trigger_event('transfer', {
|
|
|
|
'id': self.id, 'progress': 1
|
|
|
|
})
|
|
|
|
return True
|
|
|
|
else:
|
2014-05-17 14:26:59 +00:00
|
|
|
logger.debug('TRIED TO SAVE EXISTING FILE!!!')
|
2014-05-21 22:41:29 +00:00
|
|
|
t = Transfer.get_or_create(self.id)
|
|
|
|
t.progress = 1
|
|
|
|
t.save()
|
2014-05-04 17:26:43 +00:00
|
|
|
self.update()
|
|
|
|
return False
|
|
|
|
|
2014-05-17 11:45:57 +00:00
|
|
|
def remove_file(self):
|
|
|
|
for f in self.files.all():
|
|
|
|
path = f.fullpath()
|
|
|
|
if os.path.exists(path):
|
|
|
|
os.unlink(path)
|
2014-05-18 23:24:04 +00:00
|
|
|
remove_empty_folders(os.path.dirname(path))
|
2014-08-09 16:14:14 +00:00
|
|
|
state.db.session.delete(f)
|
2014-05-17 11:45:57 +00:00
|
|
|
user = state.user()
|
2014-05-19 01:36:37 +00:00
|
|
|
if user in self.users:
|
|
|
|
self.users.remove(user)
|
2014-05-19 18:12:02 +00:00
|
|
|
for l in self.lists.filter_by(user_id=user.id):
|
|
|
|
l.items.remove(self)
|
2014-05-17 11:45:57 +00:00
|
|
|
if not self.users:
|
2014-05-22 14:20:40 +00:00
|
|
|
self.delete()
|
2014-05-17 11:45:57 +00:00
|
|
|
else:
|
|
|
|
self.update()
|
2014-05-25 14:05:06 +00:00
|
|
|
Transfer.query.filter_by(item_id=self.id).delete()
|
2014-05-17 11:45:57 +00:00
|
|
|
Changelog.record(user, 'removeitem', self.id)
|
|
|
|
|
2014-05-21 22:41:29 +00:00
|
|
|
class Sort(db.Model):
|
2014-08-09 15:03:16 +00:00
|
|
|
__tablename__ = 'sort'
|
|
|
|
|
|
|
|
item_id = sa.Column(sa.String(32), sa.ForeignKey('item.id'), primary_key=True)
|
|
|
|
item = sa.orm.relationship('Item', backref=sa.orm.backref('sort', lazy='dynamic'))
|
2014-05-21 22:41:29 +00:00
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return '%s_sort' % self.item_id
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def get(cls, item_id):
|
|
|
|
return cls.query.filter_by(item_id=item_id).first()
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def get_or_create(cls, item_id):
|
|
|
|
f = cls.get(item_id)
|
|
|
|
if not f:
|
|
|
|
f = cls(item_id=item_id)
|
2014-08-09 16:14:14 +00:00
|
|
|
state.db.session.add(f)
|
|
|
|
state.db.session.commit()
|
2014-05-21 22:41:29 +00:00
|
|
|
return f
|
|
|
|
|
2016-01-16 15:57:15 +00:00
|
|
|
Item.sort_keys = []
|
2014-05-04 17:26:43 +00:00
|
|
|
for key in config['itemKeys']:
|
|
|
|
if key.get('sort'):
|
|
|
|
sort_type = key.get('sortType', key['type'])
|
|
|
|
if sort_type == 'integer':
|
2014-08-09 15:03:16 +00:00
|
|
|
col = sa.Column(sa.BigInteger(), index=True)
|
2014-05-04 17:26:43 +00:00
|
|
|
elif sort_type == 'float':
|
2014-08-09 15:03:16 +00:00
|
|
|
col = sa.Column(sa.Float(), index=True)
|
2014-05-04 17:26:43 +00:00
|
|
|
elif sort_type == 'date':
|
2014-08-09 15:03:16 +00:00
|
|
|
col = sa.Column(sa.DateTime(), index=True)
|
2014-05-04 17:26:43 +00:00
|
|
|
else:
|
2014-08-09 15:03:16 +00:00
|
|
|
col = sa.Column(sa.String(1000), index=True)
|
2014-05-21 22:41:29 +00:00
|
|
|
setattr(Sort, '%s' % key['id'], col)
|
2016-01-16 15:57:15 +00:00
|
|
|
Item.sort_keys.append(key['id'])
|
2014-05-04 17:26:43 +00:00
|
|
|
|
2014-05-21 00:02:21 +00:00
|
|
|
Item.id_keys = ['isbn', 'lccn', 'olid', 'oclc', 'asin']
|
2014-05-04 17:26:43 +00:00
|
|
|
Item.item_keys = config['itemKeys']
|
2014-05-14 09:57:11 +00:00
|
|
|
Item.filter_keys = [k['id'] for k in config['itemKeys'] if k.get('filter')]
|
2014-05-04 17:26:43 +00:00
|
|
|
|
|
|
|
class Find(db.Model):
|
2014-08-09 15:03:16 +00:00
|
|
|
__tablename__ = 'find'
|
|
|
|
|
|
|
|
id = sa.Column(sa.Integer(), primary_key=True)
|
|
|
|
item_id = sa.Column(sa.String(32), sa.ForeignKey('item.id'))
|
2016-01-10 09:10:38 +00:00
|
|
|
item = sa.orm.relationship('Item', backref=sa.orm.backref('find_', lazy='dynamic'))
|
2014-08-09 15:03:16 +00:00
|
|
|
key = sa.Column(sa.String(200), index=True)
|
|
|
|
value = sa.Column(sa.Text())
|
|
|
|
findvalue = sa.Column(sa.Text(), index=True)
|
2014-05-04 17:26:43 +00:00
|
|
|
|
|
|
|
def __repr__(self):
|
2015-04-21 17:58:32 +00:00
|
|
|
return '%s=%s' % (self.key, self.findvalue)
|
2014-05-04 17:26:43 +00:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def get(cls, item, key):
|
|
|
|
return cls.query.filter_by(item_id=item, key=key).first()
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def get_or_create(cls, item, key):
|
|
|
|
f = cls.get(item, key)
|
|
|
|
if not f:
|
|
|
|
f = cls(item_id=item, key=key)
|
2014-08-09 16:14:14 +00:00
|
|
|
state.db.session.add(f)
|
|
|
|
state.db.session.commit()
|
2014-05-04 17:26:43 +00:00
|
|
|
return f
|
|
|
|
|
|
|
|
class File(db.Model):
|
2014-08-09 15:03:16 +00:00
|
|
|
__tablename__ = 'file'
|
2014-05-04 17:26:43 +00:00
|
|
|
|
2014-08-09 15:03:16 +00:00
|
|
|
created = sa.Column(sa.DateTime())
|
|
|
|
modified = sa.Column(sa.DateTime())
|
2014-05-04 17:26:43 +00:00
|
|
|
|
2014-08-09 15:03:16 +00:00
|
|
|
sha1 = sa.Column(sa.String(32), primary_key=True)
|
|
|
|
path = sa.Column(sa.String(2048))
|
2014-05-04 17:26:43 +00:00
|
|
|
|
2014-09-02 22:32:44 +00:00
|
|
|
info = sa.Column(MutableDict.as_mutable(sa.PickleType(pickler=json_pickler)))
|
2014-05-04 17:26:43 +00:00
|
|
|
|
2014-08-09 15:03:16 +00:00
|
|
|
item_id = sa.Column(sa.String(32), sa.ForeignKey('item.id'))
|
|
|
|
item = sa.orm.relationship('Item', backref=sa.orm.backref('files', lazy='dynamic'))
|
2014-05-04 17:26:43 +00:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def get(cls, sha1):
|
|
|
|
return cls.query.filter_by(sha1=sha1).first()
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def get_or_create(cls, sha1, info=None, path=None):
|
|
|
|
f = cls.get(sha1)
|
|
|
|
if not f:
|
|
|
|
f = cls(sha1=sha1)
|
|
|
|
if info:
|
|
|
|
f.info = info
|
|
|
|
if path:
|
|
|
|
f.path = path
|
|
|
|
f.item_id = Item.get_or_create(id=sha1, info=info).id
|
2014-08-09 16:14:14 +00:00
|
|
|
state.db.session.add(f)
|
|
|
|
state.db.session.commit()
|
2014-05-04 17:26:43 +00:00
|
|
|
return f
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return self.sha1
|
|
|
|
|
|
|
|
def __init__(self, sha1):
|
|
|
|
self.sha1 = sha1
|
2014-05-20 00:43:54 +00:00
|
|
|
self.created = datetime.utcnow()
|
|
|
|
self.modified = datetime.utcnow()
|
2014-05-17 11:45:57 +00:00
|
|
|
|
|
|
|
def fullpath(self):
|
|
|
|
prefs = settings.preferences
|
|
|
|
prefix = os.path.join(os.path.expanduser(prefs['libraryPath']), 'Books/')
|
|
|
|
return os.path.join(prefix, self.path)
|
|
|
|
|
2016-01-14 06:59:55 +00:00
|
|
|
def make_readonly(self):
|
|
|
|
current_path = self.fullpath()
|
|
|
|
if os.path.exists(current_path):
|
|
|
|
mode = os.stat(current_path)[stat.ST_MODE]
|
|
|
|
readonly = mode & ~stat.S_IWUSR & ~stat.S_IWGRP & ~stat.S_IWOTH
|
|
|
|
if mode != readonly:
|
|
|
|
os.chmod(current_path, readonly)
|
|
|
|
|
2014-05-17 11:45:57 +00:00
|
|
|
def move(self):
|
2014-05-19 11:09:12 +00:00
|
|
|
def format_underscores(string):
|
|
|
|
return re.sub('^\.|\.$|:|/|\?|<|>', '_', string)
|
2014-05-17 11:45:57 +00:00
|
|
|
prefs = settings.preferences
|
|
|
|
prefix = os.path.join(os.path.expanduser(prefs['libraryPath']), 'Books/')
|
|
|
|
j = self.item.json()
|
|
|
|
|
|
|
|
current_path = self.fullpath()
|
2015-11-19 14:11:18 +00:00
|
|
|
if not os.path.exists(current_path):
|
|
|
|
logger.debug('file is missing. %s', current_path)
|
|
|
|
return
|
2016-01-14 06:59:55 +00:00
|
|
|
|
|
|
|
self.make_readonly()
|
|
|
|
|
2016-01-06 19:38:15 +00:00
|
|
|
author = '; '.join([get_sort_name(a) for a in j.get('author', [])])
|
2014-05-17 11:45:57 +00:00
|
|
|
if not author:
|
|
|
|
author = 'Unknown Author'
|
2016-01-14 07:54:34 +00:00
|
|
|
if ' (Ed.)' in author:
|
|
|
|
author = author.replace(' (Ed.)', '') + ' (Ed.)'
|
2014-05-17 11:45:57 +00:00
|
|
|
title = j.get('title', 'Untitled')
|
|
|
|
extension = j['extension']
|
2014-05-19 11:09:12 +00:00
|
|
|
|
2014-05-17 11:45:57 +00:00
|
|
|
if len(title) > 100:
|
|
|
|
title = title[:100]
|
2014-05-19 11:09:12 +00:00
|
|
|
|
|
|
|
title = format_underscores(title)
|
|
|
|
author = format_underscores(author)
|
2016-01-06 19:29:06 +00:00
|
|
|
publisher = j.get('publisher')
|
|
|
|
if publisher:
|
|
|
|
extra = ', '.join(publisher)
|
|
|
|
else:
|
|
|
|
extra = ''
|
|
|
|
date = j.get('date')
|
|
|
|
if date and len(date) >= 4:
|
|
|
|
extra += ' ' + date[:4]
|
|
|
|
if extra:
|
2016-01-14 07:54:34 +00:00
|
|
|
extra = format_underscores(extra)
|
2016-01-06 19:29:06 +00:00
|
|
|
title = '%s (%s)' % (title, extra.strip())
|
2014-05-17 11:45:57 +00:00
|
|
|
filename = '%s.%s' % (title, extension)
|
2015-03-07 18:42:16 +00:00
|
|
|
first = unicodedata.normalize('NFD', author[0].upper())[0].upper()
|
|
|
|
new_path = os.path.join(first, author, filename)
|
2014-05-28 11:36:44 +00:00
|
|
|
new_path = new_path.replace('\x00', '')
|
2016-01-06 19:29:06 +00:00
|
|
|
new_path = ox.decode_html(new_path)
|
2014-05-17 11:45:57 +00:00
|
|
|
if self.path == new_path:
|
|
|
|
return
|
|
|
|
h = ''
|
|
|
|
while os.path.exists(os.path.join(prefix, new_path)):
|
|
|
|
h = self.sha1[:len(h)+1]
|
|
|
|
filename = '%s.%s.%s' % (title, h, extension)
|
2015-03-07 18:42:16 +00:00
|
|
|
first = unicodedata.normalize('NFD', author[0].upper())[0].upper()
|
|
|
|
new_path = os.path.join(first, author, filename)
|
2014-05-17 11:45:57 +00:00
|
|
|
if current_path == os.path.join(prefix, new_path):
|
|
|
|
break
|
|
|
|
if self.path != new_path:
|
|
|
|
path = os.path.join(prefix, new_path)
|
|
|
|
ox.makedirs(os.path.dirname(path))
|
|
|
|
shutil.move(current_path, path)
|
|
|
|
self.path = new_path
|
|
|
|
self.save()
|
2016-01-14 10:44:11 +00:00
|
|
|
for folder in set(os.path.dirname(p) for p in [current_path, path]):
|
|
|
|
remove_empty_folders(folder)
|
2014-05-17 11:45:57 +00:00
|
|
|
|
|
|
|
def save(self):
|
2014-08-09 16:14:14 +00:00
|
|
|
state.db.session.add(self)
|
|
|
|
state.db.session.commit()
|
2014-05-21 00:02:21 +00:00
|
|
|
|
2015-03-07 16:24:07 +00:00
|
|
|
class Scrape(db.Model):
|
|
|
|
|
|
|
|
__tablename__ = 'scrape'
|
|
|
|
|
|
|
|
item_id = sa.Column(sa.String(32), sa.ForeignKey('item.id'), primary_key=True)
|
|
|
|
item = sa.orm.relationship('Item', backref=sa.orm.backref('scraping', lazy='dynamic'))
|
|
|
|
|
|
|
|
added = sa.Column(sa.DateTime())
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return '='.join(map(str, [self.item_id, self.added]))
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def get(cls, item_id):
|
|
|
|
return cls.query.filter_by(item_id=item_id).first()
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def get_or_create(cls, item_id):
|
|
|
|
t = cls.get(item_id)
|
|
|
|
if not t:
|
|
|
|
t = cls(item_id=item_id)
|
|
|
|
t.added = datetime.utcnow()
|
|
|
|
t.save()
|
|
|
|
return t
|
|
|
|
|
|
|
|
def save(self):
|
|
|
|
state.db.session.add(self)
|
|
|
|
state.db.session.commit()
|
|
|
|
|
|
|
|
def remove(self):
|
|
|
|
state.db.session.delete(self)
|
|
|
|
state.db.session.commit()
|
2014-05-21 22:41:29 +00:00
|
|
|
|
|
|
|
class Transfer(db.Model):
|
2014-08-09 15:03:16 +00:00
|
|
|
__tablename__ = 'transfer'
|
2014-05-21 22:41:29 +00:00
|
|
|
|
2014-08-09 15:03:16 +00:00
|
|
|
item_id = sa.Column(sa.String(32), sa.ForeignKey('item.id'), primary_key=True)
|
|
|
|
item = sa.orm.relationship('Item', backref=sa.orm.backref('transfer', lazy='dynamic'))
|
2014-05-21 22:41:29 +00:00
|
|
|
|
2014-08-09 15:03:16 +00:00
|
|
|
added = sa.Column(sa.DateTime())
|
|
|
|
progress = sa.Column(sa.Float())
|
2014-05-21 22:41:29 +00:00
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return '='.join(map(str, [self.item_id, self.progress]))
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def get(cls, item_id):
|
|
|
|
return cls.query.filter_by(item_id=item_id).first()
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def get_or_create(cls, item_id):
|
|
|
|
t = cls.get(item_id)
|
|
|
|
if not t:
|
|
|
|
t = cls(item_id=item_id)
|
|
|
|
t.added = datetime.utcnow()
|
|
|
|
t.progress = 0
|
|
|
|
t.save()
|
|
|
|
return t
|
|
|
|
|
|
|
|
def save(self):
|
2014-08-09 16:14:14 +00:00
|
|
|
state.db.session.add(self)
|
|
|
|
state.db.session.commit()
|
2014-05-21 22:41:29 +00:00
|
|
|
|
2014-05-21 00:02:21 +00:00
|
|
|
class Metadata(db.Model):
|
2014-08-09 15:03:16 +00:00
|
|
|
__tablename__ = 'metadata'
|
2014-05-21 00:02:21 +00:00
|
|
|
|
2014-08-09 15:03:16 +00:00
|
|
|
created = sa.Column(sa.DateTime())
|
|
|
|
modified = sa.Column(sa.DateTime())
|
2014-05-21 00:02:21 +00:00
|
|
|
|
2014-08-09 15:03:16 +00:00
|
|
|
id = sa.Column(sa.Integer(), primary_key=True)
|
2014-05-21 00:02:21 +00:00
|
|
|
|
2014-08-09 15:03:16 +00:00
|
|
|
key = sa.Column(sa.String(256))
|
|
|
|
value = sa.Column(sa.String(256))
|
2014-05-21 00:02:21 +00:00
|
|
|
|
2014-09-02 22:32:44 +00:00
|
|
|
data = sa.Column(MutableDict.as_mutable(sa.PickleType(pickler=json_pickler)))
|
2014-05-21 00:02:21 +00:00
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return '='.join([self.key, self.value])
|
|
|
|
|
|
|
|
@property
|
|
|
|
def timestamp(self):
|
|
|
|
return utils.datetime2ts(self.modified)
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def get(cls, key, value):
|
|
|
|
return cls.query.filter_by(key=key, value=value).first()
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def get_or_create(cls, key, value):
|
|
|
|
m = cls.get(key, value)
|
|
|
|
if not m:
|
|
|
|
m = cls(key=key, value=value)
|
|
|
|
m.created = datetime.utcnow()
|
|
|
|
m.data = {}
|
|
|
|
m.save()
|
|
|
|
return m
|
|
|
|
|
|
|
|
def save(self):
|
|
|
|
self.modified = datetime.utcnow()
|
2014-08-09 16:14:14 +00:00
|
|
|
state.db.session.add(self)
|
|
|
|
state.db.session.commit()
|
2014-05-21 00:02:21 +00:00
|
|
|
|
|
|
|
def reset(self):
|
|
|
|
user = state.user()
|
|
|
|
Changelog.record(user, 'resetmeta', self.key, self.value)
|
2014-08-09 16:14:14 +00:00
|
|
|
state.db.session.delete(self)
|
|
|
|
state.db.session.commit()
|
2014-05-21 00:02:21 +00:00
|
|
|
self.update_items()
|
|
|
|
|
2016-01-08 17:49:00 +00:00
|
|
|
def edit(self, data, record=True):
|
2014-05-21 00:02:21 +00:00
|
|
|
changed = {}
|
|
|
|
for key in data:
|
2016-01-03 17:36:20 +00:00
|
|
|
if key == 'id':
|
|
|
|
continue
|
2015-12-01 13:21:58 +00:00
|
|
|
if data[key] != self.data.get(key):
|
2014-05-21 00:02:21 +00:00
|
|
|
self.data[key] = data[key]
|
|
|
|
changed[key] = data[key]
|
|
|
|
if changed:
|
|
|
|
self.save()
|
2016-01-08 17:49:00 +00:00
|
|
|
if record:
|
|
|
|
user = state.user()
|
|
|
|
Changelog.record(user, 'editmeta', self.key, self.value, changed)
|
2014-05-21 00:02:21 +00:00
|
|
|
return changed
|
|
|
|
|
|
|
|
def update_items(self):
|
|
|
|
for f in Find.query.filter_by(key=self.key, value=self.value):
|
2015-12-01 11:09:50 +00:00
|
|
|
if f.item:
|
2016-01-05 17:26:58 +00:00
|
|
|
f.item.update()
|
2014-05-21 00:02:21 +00:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def load(self, key, value):
|
|
|
|
m = self.get(key, value)
|
|
|
|
if m:
|
2016-01-03 17:36:20 +00:00
|
|
|
if 'id' in m.data:
|
|
|
|
del m.data['id']
|
2014-05-21 00:02:21 +00:00
|
|
|
return m.data
|
|
|
|
return {}
|
2016-01-14 13:09:56 +00:00
|
|
|
|
|
|
|
def remove_unused_names():
|
|
|
|
used = list(set(
|
|
|
|
get_sort_name(a)
|
|
|
|
for i in Item.query
|
|
|
|
for a in i.meta.get('author', [])
|
|
|
|
))
|
|
|
|
for p in Person.query.filter(Person.sortname.notin_(used)):
|
|
|
|
state.db.session.delete(p)
|
|
|
|
state.db.session.commit()
|
2016-01-16 15:57:15 +00:00
|
|
|
|
|
|
|
def update_sort_table():
|
2016-01-17 08:19:31 +00:00
|
|
|
current = db.get_table_columns('sort')
|
2016-01-16 15:57:15 +00:00
|
|
|
drop_columns = list(set(current) - set(Item.sort_keys+['item_id']))
|
|
|
|
if drop_columns:
|
|
|
|
db.drop_columns('sort', drop_columns)
|
2016-01-17 08:19:31 +00:00
|
|
|
add_columns = list(set(Item.sort_keys)-set(current+['item_id']))
|
2016-01-16 15:57:15 +00:00
|
|
|
if add_columns:
|
|
|
|
create_table = str(CreateTable(Sort.__table__).compile(db.engine)).split('\n')
|
|
|
|
sql = []
|
|
|
|
for col in add_columns:
|
|
|
|
add = [r for r in create_table if '\t%s ' % col in r][0].strip()[:-1]
|
|
|
|
sql.append('ALTER TABLE sort ADD '+add)
|
|
|
|
sql.append('CREATE INDEX ix_sort_{col} ON sort ({col})'.format(col=col))
|
|
|
|
with db.session() as s:
|
|
|
|
for q in sql:
|
|
|
|
s.connection().execute(q)
|
|
|
|
s.commit()
|
2016-01-17 06:29:06 +00:00
|
|
|
layout = db.get_layout()
|
|
|
|
sort_indexes = [i[len('ix_sort_'):] for i in layout['indexes'] if i.startswith('ix_sort_')]
|
|
|
|
sql = []
|
|
|
|
for col in set(Item.sort_keys)-set(sort_indexes):
|
|
|
|
sql.append('CREATE INDEX ix_sort_{col} ON sort ({col})'.format(col=col))
|
|
|
|
if sql:
|
|
|
|
with db.session() as s:
|
|
|
|
for q in sql:
|
|
|
|
s.connection().execute(q)
|
|
|
|
s.commit()
|