2014-05-04 17:26:43 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
from datetime import datetime
|
2014-08-12 08:16:57 +00:00
|
|
|
import base64
|
|
|
|
import hashlib
|
|
|
|
import os
|
|
|
|
import re
|
2014-05-17 11:45:57 +00:00
|
|
|
import shutil
|
2016-01-14 06:59:55 +00:00
|
|
|
import stat
|
2014-05-27 11:06:39 +00:00
|
|
|
import unicodedata
|
2016-02-24 06:52:36 +00:00
|
|
|
import time
|
2017-06-12 15:00:32 +00:00
|
|
|
import string
|
2014-05-04 17:26:43 +00:00
|
|
|
|
|
|
|
import ox
|
2016-01-19 10:05:16 +00:00
|
|
|
from sqlalchemy.orm import load_only
|
2016-01-16 15:57:15 +00:00
|
|
|
from sqlalchemy.schema import CreateTable
|
2014-08-09 15:03:16 +00:00
|
|
|
import sqlalchemy as sa
|
2014-05-21 22:41:29 +00:00
|
|
|
|
2017-06-03 20:50:14 +00:00
|
|
|
from changelog import add_record
|
2014-08-12 08:16:57 +00:00
|
|
|
from db import MutableDict
|
2014-09-02 22:32:44 +00:00
|
|
|
import json_pickler
|
|
|
|
from .icons import icons
|
2016-01-14 13:09:56 +00:00
|
|
|
from .person import get_sort_name, Person
|
2016-01-10 09:10:38 +00:00
|
|
|
from queryparser import Parser
|
2014-08-12 08:16:57 +00:00
|
|
|
from settings import config
|
2019-01-14 15:02:34 +00:00
|
|
|
from utils import remove_empty_folders, get_ratio, same_path
|
2014-08-12 08:16:57 +00:00
|
|
|
from websocket import trigger_event
|
|
|
|
import db
|
2014-05-04 17:26:43 +00:00
|
|
|
import media
|
2016-01-07 10:12:48 +00:00
|
|
|
import meta
|
2014-08-12 08:16:57 +00:00
|
|
|
import settings
|
2014-05-12 23:43:27 +00:00
|
|
|
import state
|
2014-05-04 17:26:43 +00:00
|
|
|
import utils
|
|
|
|
|
2014-08-12 08:16:57 +00:00
|
|
|
import logging
|
2015-11-29 14:56:38 +00:00
|
|
|
logger = logging.getLogger(__name__)
|
2014-05-17 14:26:59 +00:00
|
|
|
|
2014-08-09 16:14:14 +00:00
|
|
|
user_items = sa.Table('useritem', db.metadata,
|
2016-06-24 10:27:05 +00:00
|
|
|
sa.Column('user_id', sa.String(43), sa.ForeignKey('user.id')),
|
|
|
|
sa.Column('item_id', sa.String(32), sa.ForeignKey('item.id')))
|
2014-05-04 17:26:43 +00:00
|
|
|
|
|
|
|
class Item(db.Model):
|
2014-08-09 15:03:16 +00:00
|
|
|
__tablename__ = 'item'
|
2014-05-04 17:26:43 +00:00
|
|
|
|
2014-08-09 15:03:16 +00:00
|
|
|
created = sa.Column(sa.DateTime())
|
|
|
|
modified = sa.Column(sa.DateTime())
|
2014-05-04 17:26:43 +00:00
|
|
|
|
2014-08-09 15:03:16 +00:00
|
|
|
id = sa.Column(sa.String(32), primary_key=True)
|
2014-05-04 17:26:43 +00:00
|
|
|
|
2014-09-02 22:32:44 +00:00
|
|
|
info = sa.Column(MutableDict.as_mutable(sa.PickleType(pickler=json_pickler)))
|
|
|
|
meta = sa.Column(MutableDict.as_mutable(sa.PickleType(pickler=json_pickler)))
|
2014-05-04 17:26:43 +00:00
|
|
|
|
2014-05-21 22:41:29 +00:00
|
|
|
# why is this in db and not in i.e. info?
|
2016-06-24 10:27:05 +00:00
|
|
|
added = sa.Column(sa.DateTime()) # added to local library
|
2014-08-09 15:03:16 +00:00
|
|
|
accessed = sa.Column(sa.DateTime())
|
|
|
|
timesaccessed = sa.Column(sa.Integer())
|
2014-05-04 17:26:43 +00:00
|
|
|
|
2014-08-09 15:03:16 +00:00
|
|
|
users = sa.orm.relationship('User', secondary=user_items,
|
2016-06-24 10:27:05 +00:00
|
|
|
backref=sa.orm.backref('items', lazy='dynamic'))
|
2014-05-04 17:26:43 +00:00
|
|
|
|
|
|
|
@property
|
|
|
|
def timestamp(self):
|
2014-05-21 00:02:21 +00:00
|
|
|
return utils.datetime2ts(self.modified)
|
2014-05-04 17:26:43 +00:00
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return self.id
|
|
|
|
|
|
|
|
def __init__(self, id):
|
|
|
|
if isinstance(id, list):
|
|
|
|
id = base64.b32encode(hashlib.sha1(''.join(id)).digest())
|
|
|
|
self.id = id
|
2014-05-20 00:43:54 +00:00
|
|
|
self.created = datetime.utcnow()
|
|
|
|
self.modified = datetime.utcnow()
|
2014-05-04 17:26:43 +00:00
|
|
|
self.info = {}
|
|
|
|
self.meta = {}
|
|
|
|
|
|
|
|
@classmethod
|
2019-01-15 07:50:11 +00:00
|
|
|
def get(cls, id, for_update=False):
|
2014-05-04 17:26:43 +00:00
|
|
|
if isinstance(id, list):
|
|
|
|
id = base64.b32encode(hashlib.sha1(''.join(id)).digest())
|
2019-01-15 07:50:11 +00:00
|
|
|
qs = cls.query.filter_by(id=id)
|
|
|
|
if for_update:
|
|
|
|
qs = qs.with_for_update()
|
|
|
|
return qs.first()
|
2014-05-04 17:26:43 +00:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def get_or_create(cls, id, info=None):
|
|
|
|
if isinstance(id, list):
|
|
|
|
id = base64.b32encode(hashlib.sha1(''.join(id)).digest())
|
|
|
|
item = cls.query.filter_by(id=id).first()
|
|
|
|
if not item:
|
|
|
|
item = cls(id=id)
|
|
|
|
if info:
|
|
|
|
item.info = info
|
2014-08-09 16:14:14 +00:00
|
|
|
state.db.session.add(item)
|
|
|
|
state.db.session.commit()
|
2014-05-04 17:26:43 +00:00
|
|
|
return item
|
|
|
|
|
2016-01-10 09:10:38 +00:00
|
|
|
@classmethod
|
|
|
|
def find(cls, data):
|
2016-02-11 18:23:32 +00:00
|
|
|
from user.models import list_items
|
|
|
|
return Parser(cls, user_items, list_items, Find, Sort).find(data)
|
2016-01-10 09:10:38 +00:00
|
|
|
|
2016-02-10 14:02:32 +00:00
|
|
|
@classmethod
|
|
|
|
def remove_many(cls, ids):
|
|
|
|
Find.query.filter(Find.item_id.in_(ids)).delete(synchronize_session=False)
|
|
|
|
Sort.query.filter(Sort.item_id.in_(ids)).delete(synchronize_session=False)
|
|
|
|
cls.query.filter(cls.id.in_(ids)).delete(synchronize_session=False)
|
|
|
|
state.db.session.expire_all()
|
2016-02-27 05:51:12 +00:00
|
|
|
Sort.query.filter_by(item_id=None).delete()
|
|
|
|
Find.query.filter_by(item_id=None).delete()
|
2016-02-10 14:02:32 +00:00
|
|
|
|
2016-02-11 17:55:46 +00:00
|
|
|
@classmethod
|
|
|
|
def remove_without_user(cls):
|
|
|
|
q = user_items.select()
|
|
|
|
owned_ids = {i['item_id'] for i in state.db.session.execute(q)}
|
|
|
|
ids = {i.id for i in cls.query.options(load_only('id'))}
|
|
|
|
remove = ids - owned_ids
|
|
|
|
if remove:
|
|
|
|
cls.remove_many(remove)
|
|
|
|
|
2016-01-06 18:06:48 +00:00
|
|
|
def add_user(self, user):
|
2016-02-15 11:30:17 +00:00
|
|
|
from user.models import list_items
|
2016-06-24 10:27:05 +00:00
|
|
|
if user not in self.users:
|
2016-01-10 06:13:03 +00:00
|
|
|
self.users.append(user)
|
2016-01-06 18:06:48 +00:00
|
|
|
l = user.library
|
2016-06-24 10:27:05 +00:00
|
|
|
if self not in l.items:
|
2016-02-15 11:30:17 +00:00
|
|
|
q = list_items.insert({'item_id': self.id, 'list_id': l.id})
|
|
|
|
state.db.session.execute(q)
|
2016-01-06 18:06:48 +00:00
|
|
|
|
2014-05-04 17:26:43 +00:00
|
|
|
def json(self, keys=None):
|
2016-03-17 15:03:03 +00:00
|
|
|
j = {}
|
2014-05-04 17:26:43 +00:00
|
|
|
j['id'] = self.id
|
|
|
|
j['created'] = self.created
|
|
|
|
j['modified'] = self.modified
|
|
|
|
j['timesaccessed'] = self.timesaccessed
|
|
|
|
j['accessed'] = self.accessed
|
|
|
|
j['added'] = self.added
|
2016-03-17 15:03:03 +00:00
|
|
|
if (not keys or 'transferadded' in keys or 'transferprogress' in keys) \
|
|
|
|
and state.downloads and not state.shutdown:
|
2016-02-11 15:55:41 +00:00
|
|
|
t = state.downloads.transfers.get(self.id)
|
|
|
|
if t:
|
|
|
|
j['transferadded'] = t['added']
|
|
|
|
j['transferprogress'] = t['progress']
|
2016-01-07 06:06:34 +00:00
|
|
|
# unused and slow
|
2016-06-24 10:27:05 +00:00
|
|
|
# j['users'] = list(map(str, list(self.users)))
|
2014-05-04 17:26:43 +00:00
|
|
|
|
|
|
|
if self.info:
|
2016-01-16 05:56:03 +00:00
|
|
|
meta_keys = [k for k in self.meta_keys if k != 'pages']
|
2016-01-11 13:43:54 +00:00
|
|
|
for key in self.info:
|
2016-01-16 05:56:03 +00:00
|
|
|
if (not keys or key in keys) and key not in meta_keys:
|
2016-01-11 13:43:54 +00:00
|
|
|
j[key] = self.info[key]
|
|
|
|
|
2014-05-04 17:26:43 +00:00
|
|
|
if self.meta:
|
2016-03-17 15:03:03 +00:00
|
|
|
for key in self.meta:
|
|
|
|
if not keys or key in keys:
|
|
|
|
j[key] = self.meta[key]
|
2014-05-04 17:26:43 +00:00
|
|
|
|
2016-01-11 13:43:54 +00:00
|
|
|
for key in self.id_keys:
|
2014-05-04 17:26:43 +00:00
|
|
|
if key not in self.meta and key in j:
|
|
|
|
del j[key]
|
|
|
|
if keys:
|
2016-01-04 10:25:18 +00:00
|
|
|
for k in list(j):
|
2014-05-04 17:26:43 +00:00
|
|
|
if k not in keys:
|
|
|
|
del j[k]
|
2016-01-19 10:05:16 +00:00
|
|
|
for key in self.array_keys:
|
2016-01-05 14:43:00 +00:00
|
|
|
if key in j and not isinstance(j[key], list):
|
|
|
|
j[key] = [j[key]]
|
2016-02-22 14:22:18 +00:00
|
|
|
if keys is None or 'sharemetadata' in keys:
|
|
|
|
j['sharemetadata'] = j.get('sharemetadata', False)
|
2016-02-26 13:06:00 +00:00
|
|
|
if not j['sharemetadata']:
|
|
|
|
j['sharemetadata'] = False
|
2014-05-04 17:26:43 +00:00
|
|
|
return j
|
|
|
|
|
|
|
|
def get_path(self):
|
|
|
|
f = self.files.first()
|
2014-05-17 11:45:57 +00:00
|
|
|
return f.fullpath() if f else None
|
2014-05-04 17:26:43 +00:00
|
|
|
|
2016-02-10 14:01:33 +00:00
|
|
|
def update_sort(self, commit=True):
|
2015-04-21 17:58:32 +00:00
|
|
|
update = False
|
2016-02-10 14:01:33 +00:00
|
|
|
s = Sort.get_or_create(self.id, commit=commit)
|
2014-05-04 17:26:43 +00:00
|
|
|
for key in config['itemKeys']:
|
|
|
|
if key.get('sort'):
|
|
|
|
value = self.json().get(key['id'], None)
|
|
|
|
sort_type = key.get('sortType', key['type'])
|
|
|
|
if value:
|
|
|
|
if sort_type == 'integer':
|
2015-04-20 07:49:35 +00:00
|
|
|
if isinstance(value, str):
|
|
|
|
value = int(re.sub('[^0-9]', '', value))
|
|
|
|
else:
|
|
|
|
value = int(value)
|
2014-05-04 17:26:43 +00:00
|
|
|
elif sort_type == 'float':
|
|
|
|
value = float(value)
|
|
|
|
elif sort_type == 'date':
|
|
|
|
pass
|
2015-03-08 12:48:22 +00:00
|
|
|
elif sort_type == 'person':
|
2014-05-04 17:26:43 +00:00
|
|
|
if not isinstance(value, list):
|
|
|
|
value = [value]
|
2016-02-11 06:10:09 +00:00
|
|
|
value = [get_sort_name(v, commit=commit) for v in value]
|
2015-03-08 12:48:22 +00:00
|
|
|
value = ox.sort_string('\n'.join(value)).lower()
|
2014-05-04 17:26:43 +00:00
|
|
|
elif sort_type == 'title':
|
2016-01-25 17:32:04 +00:00
|
|
|
value = self.get_sorttitle().lower()
|
|
|
|
value = utils.sort_title(value)
|
2016-01-19 11:14:00 +00:00
|
|
|
elif sort_type == 'boolean':
|
|
|
|
pass
|
2014-05-04 17:26:43 +00:00
|
|
|
else:
|
|
|
|
if isinstance(value, list):
|
2014-09-02 22:32:44 +00:00
|
|
|
value = '\n'.join(value)
|
2014-05-04 17:26:43 +00:00
|
|
|
if value:
|
2014-09-02 22:32:44 +00:00
|
|
|
value = str(value)
|
2014-05-04 17:26:43 +00:00
|
|
|
value = ox.sort_string(value).lower()
|
2016-06-24 10:27:05 +00:00
|
|
|
elif isinstance(value, list): # empty list
|
2016-01-16 05:47:55 +00:00
|
|
|
value = None
|
2016-01-20 06:46:46 +00:00
|
|
|
if not value and sort_type != 'boolean':
|
|
|
|
value = None
|
2015-04-21 17:58:32 +00:00
|
|
|
if getattr(s, key['id']) != value:
|
|
|
|
setattr(s, key['id'], value)
|
|
|
|
update = True
|
|
|
|
if update:
|
|
|
|
state.db.session.add(s)
|
2014-05-04 17:26:43 +00:00
|
|
|
|
2016-02-10 14:01:33 +00:00
|
|
|
def update_find(self, commit=True):
|
2016-02-11 06:10:09 +00:00
|
|
|
current_values = {}
|
|
|
|
for f in Find.query.filter_by(item_id=self.id):
|
|
|
|
if f.key not in current_values:
|
|
|
|
current_values[f.key] = set()
|
|
|
|
current_values[f.key].add(f.value)
|
2014-05-14 09:57:11 +00:00
|
|
|
|
|
|
|
def add(k, v):
|
2016-02-11 06:10:09 +00:00
|
|
|
if k in current_values and v in current_values[k]:
|
|
|
|
f = Find.query.filter_by(item_id=self.id, key=k, value=v).first()
|
|
|
|
else:
|
2015-04-21 17:58:32 +00:00
|
|
|
f = Find(item_id=self.id, key=k)
|
|
|
|
if f.value != v:
|
|
|
|
f.findvalue = unicodedata.normalize('NFKD', v).lower()
|
|
|
|
f.value = v
|
2016-01-25 17:32:04 +00:00
|
|
|
if k in self.filter_keys:
|
|
|
|
sort_type = utils.get_by_id(settings.config['itemKeys'], k).get('sortType')
|
|
|
|
if sort_type == 'person':
|
2016-02-11 06:10:09 +00:00
|
|
|
f.sortvalue = get_sort_name(f.value, commit=commit)
|
2016-01-25 17:32:04 +00:00
|
|
|
else:
|
|
|
|
f.sortvalue = f.value
|
|
|
|
if f.sortvalue:
|
2016-01-27 09:39:10 +00:00
|
|
|
f.sortvalue = ox.sort_string(unicodedata.normalize('NFKD', f.sortvalue)).lower()
|
2016-01-25 17:32:04 +00:00
|
|
|
else:
|
|
|
|
f.sortvalue = None
|
2015-04-21 17:58:32 +00:00
|
|
|
state.db.session.add(f)
|
|
|
|
|
|
|
|
keys = []
|
2014-05-04 17:26:43 +00:00
|
|
|
for key in config['itemKeys']:
|
2016-01-19 11:14:00 +00:00
|
|
|
if key.get('find') or \
|
|
|
|
key.get('filter') or key.get('type') in [['string'], 'string'] or \
|
2016-06-24 10:27:05 +00:00
|
|
|
(key.get('type') == 'boolean' and key.get('sort')):
|
2014-05-04 17:26:43 +00:00
|
|
|
value = self.json().get(key['id'], None)
|
|
|
|
if key.get('filterMap') and value:
|
2014-05-12 23:43:27 +00:00
|
|
|
value = re.compile(key.get('filterMap')).findall(value)
|
2016-06-24 10:27:05 +00:00
|
|
|
if value:
|
|
|
|
value = value[0]
|
2016-01-19 11:14:00 +00:00
|
|
|
if key.get('type') == 'boolean':
|
|
|
|
value = True if value else False
|
|
|
|
value = str(value).lower()
|
2014-05-04 17:26:43 +00:00
|
|
|
if value:
|
2015-04-21 17:58:32 +00:00
|
|
|
keys.append(key['id'])
|
2014-05-26 23:45:29 +00:00
|
|
|
if isinstance(value, dict):
|
2014-09-02 22:32:44 +00:00
|
|
|
value = ' '.join(list(value.values()))
|
2014-05-14 09:57:11 +00:00
|
|
|
if not isinstance(value, list):
|
|
|
|
value = [value]
|
2015-04-21 17:58:32 +00:00
|
|
|
value = [
|
|
|
|
v.decode('utf-8') if isinstance(v, bytes) else v
|
|
|
|
for v in value
|
|
|
|
]
|
2014-05-14 09:57:11 +00:00
|
|
|
for v in value:
|
|
|
|
add(key['id'], v)
|
2016-02-11 06:10:09 +00:00
|
|
|
if key['id'] in current_values:
|
|
|
|
removed_values = current_values[key['id']] - set(value)
|
|
|
|
if removed_values:
|
|
|
|
for f in Find.query.filter_by(item_id=self.id,
|
2016-06-24 10:27:05 +00:00
|
|
|
key=key['id']).filter(Find.value.in_(removed_values)):
|
2016-02-11 06:10:09 +00:00
|
|
|
state.db.session.delete(f)
|
|
|
|
removed_keys = set(current_values) - set(keys)
|
|
|
|
if removed_keys:
|
|
|
|
for f in Find.query.filter_by(item_id=self.id).filter(Find.key.in_(removed_keys)):
|
|
|
|
state.db.session.delete(f)
|
2014-05-04 17:26:43 +00:00
|
|
|
|
2016-02-11 06:10:09 +00:00
|
|
|
def update_mediastate(self):
|
|
|
|
# available, unavailable, transferring
|
2016-02-11 15:55:41 +00:00
|
|
|
if state.downloads:
|
|
|
|
t = state.downloads.transfers.get(self.id)
|
|
|
|
if t and t.get('added') and t.get('progress', 0) < 1:
|
|
|
|
self.info['mediastate'] = 'transferring'
|
|
|
|
else:
|
|
|
|
self.info['mediastate'] = 'available' if self.files.count() else 'unavailable'
|
2014-05-04 17:26:43 +00:00
|
|
|
else:
|
2016-02-10 14:01:33 +00:00
|
|
|
self.info['mediastate'] = 'available' if self.files.count() else 'unavailable'
|
2016-02-11 06:10:09 +00:00
|
|
|
|
|
|
|
def update(self, modified=None, commit=True):
|
|
|
|
self.update_mediastate()
|
2016-01-11 13:43:54 +00:00
|
|
|
if modified:
|
|
|
|
self.modified = modified
|
|
|
|
else:
|
|
|
|
self.modified = datetime.utcnow()
|
2016-02-10 14:01:33 +00:00
|
|
|
self.update_sort(commit=commit)
|
|
|
|
self.update_find(commit=commit)
|
|
|
|
if commit:
|
|
|
|
self.save()
|
|
|
|
else:
|
|
|
|
state.db.session.add(self)
|
2014-05-04 17:26:43 +00:00
|
|
|
|
|
|
|
def save(self):
|
2014-08-09 16:14:14 +00:00
|
|
|
state.db.session.add(self)
|
|
|
|
state.db.session.commit()
|
2014-05-04 17:26:43 +00:00
|
|
|
|
2014-05-22 14:20:40 +00:00
|
|
|
def delete(self, commit=True):
|
|
|
|
Sort.query.filter_by(item_id=self.id).delete()
|
2016-02-11 15:55:41 +00:00
|
|
|
if state.downloads and self.id in state.downloads.transfers:
|
|
|
|
del state.downloads.transfers[self.id]
|
2014-08-11 18:10:07 +00:00
|
|
|
state.db.session.delete(self)
|
2016-01-16 10:57:52 +00:00
|
|
|
icons.clear('cover:%s' % self.id)
|
|
|
|
icons.clear('preview:%s' % self.id)
|
2014-05-22 14:20:40 +00:00
|
|
|
if commit:
|
2014-08-09 16:14:14 +00:00
|
|
|
state.db.session.commit()
|
2014-05-22 14:20:40 +00:00
|
|
|
|
2016-01-03 18:03:19 +00:00
|
|
|
meta_keys = (
|
2016-01-05 16:57:59 +00:00
|
|
|
'author',
|
2016-01-11 13:43:54 +00:00
|
|
|
'categories',
|
|
|
|
'cover',
|
2016-01-05 16:57:59 +00:00
|
|
|
'date',
|
|
|
|
'description',
|
|
|
|
'edition',
|
2016-01-11 13:43:54 +00:00
|
|
|
'isbn',
|
2016-01-05 16:57:59 +00:00
|
|
|
'language',
|
|
|
|
'pages',
|
|
|
|
'place',
|
|
|
|
'publisher',
|
2016-01-11 13:43:54 +00:00
|
|
|
'series',
|
2016-01-19 10:05:16 +00:00
|
|
|
'sharemetadata',
|
2016-01-11 13:43:54 +00:00
|
|
|
'tableofcontents',
|
2016-01-25 17:32:04 +00:00
|
|
|
'title',
|
|
|
|
'sorttitle'
|
2016-01-03 18:03:19 +00:00
|
|
|
)
|
2014-05-21 00:02:21 +00:00
|
|
|
|
2016-01-19 10:05:16 +00:00
|
|
|
def update_metadata(self, data, modified=None):
|
2014-05-21 00:02:21 +00:00
|
|
|
update = False
|
|
|
|
record = {}
|
|
|
|
for key in self.meta_keys:
|
|
|
|
if key in data:
|
2014-05-25 18:06:12 +00:00
|
|
|
if self.meta.get(key) != data[key]:
|
|
|
|
record[key] = data[key]
|
2015-12-01 13:21:58 +00:00
|
|
|
self.meta[key] = data[key]
|
|
|
|
update = True
|
2016-01-04 10:25:18 +00:00
|
|
|
for key in list(self.meta):
|
2014-05-21 00:02:21 +00:00
|
|
|
if key not in self.meta_keys:
|
|
|
|
del self.meta[key]
|
|
|
|
update = True
|
|
|
|
if update:
|
2016-01-11 13:43:54 +00:00
|
|
|
self.update(modified)
|
2014-05-19 22:59:02 +00:00
|
|
|
self.save()
|
2016-01-11 13:43:54 +00:00
|
|
|
if 'cover' in record:
|
2016-01-19 16:15:32 +00:00
|
|
|
self.update_cover()
|
2014-05-19 22:59:02 +00:00
|
|
|
user = state.user()
|
2014-05-25 18:06:12 +00:00
|
|
|
if record and user in self.users:
|
2017-06-03 20:50:14 +00:00
|
|
|
add_record('edititem', self.id, record, _ts=modified)
|
2016-01-19 10:05:16 +00:00
|
|
|
if 'sharemetadata' in record and not record['sharemetadata']:
|
|
|
|
self.sync_metadata()
|
2014-05-19 20:58:00 +00:00
|
|
|
|
2016-01-19 10:05:16 +00:00
|
|
|
def edit(self, data, modified=None):
|
|
|
|
self.update_metadata(data, modified)
|
2015-12-01 11:09:50 +00:00
|
|
|
for f in self.files.all():
|
|
|
|
f.move()
|
2014-05-21 00:02:21 +00:00
|
|
|
|
2016-01-19 10:05:16 +00:00
|
|
|
def get_hash(self):
|
2016-02-10 14:02:32 +00:00
|
|
|
return utils.get_meta_hash(self.meta)
|
2016-01-19 10:05:16 +00:00
|
|
|
|
2016-01-25 17:32:04 +00:00
|
|
|
def get_sorttitle(self):
|
|
|
|
title = self.meta.get('sorttitle')
|
|
|
|
if title is None:
|
|
|
|
title = self.meta.get('title', 'Untitled')
|
|
|
|
title = ox.get_sort_title(title)
|
|
|
|
return title
|
|
|
|
|
2016-01-19 10:05:16 +00:00
|
|
|
def sync_metadata(self):
|
|
|
|
if self.meta.get('sharemetadata'):
|
|
|
|
return
|
|
|
|
peers = [u for u in self.users if u.id != settings.USER_ID]
|
2016-02-08 09:03:21 +00:00
|
|
|
peers.sort(key=lambda u: utils.user_sort_key(u.json()))
|
2016-01-19 10:05:16 +00:00
|
|
|
sync_from = None
|
|
|
|
first_peer = None
|
|
|
|
# get first peer with sharemetadata set
|
|
|
|
for u in peers:
|
2016-02-10 14:02:32 +00:00
|
|
|
peer = utils.get_peer(u.id)
|
|
|
|
if self.id in peer.library:
|
|
|
|
m = peer.library[self.id].get('meta')
|
|
|
|
else:
|
|
|
|
m = None
|
2016-01-19 10:05:16 +00:00
|
|
|
if m:
|
2016-02-10 14:02:32 +00:00
|
|
|
if m.get('sharemetadata'):
|
|
|
|
sync_from = u.id
|
2016-01-19 10:05:16 +00:00
|
|
|
break
|
|
|
|
if not first_peer:
|
2016-02-10 14:02:32 +00:00
|
|
|
first_peer = u.id
|
2016-01-19 10:05:16 +00:00
|
|
|
# of fall back to first peer that has this item
|
|
|
|
# in case its not available locally
|
2016-01-19 12:12:45 +00:00
|
|
|
if not sync_from and self.info.get('mediastate') != 'available' and first_peer:
|
2016-06-24 10:27:05 +00:00
|
|
|
# logger.debug('syncing from first peer that has item %s', first_peer)
|
2016-01-19 10:05:16 +00:00
|
|
|
sync_from = first_peer
|
|
|
|
if sync_from:
|
2016-02-10 14:02:32 +00:00
|
|
|
peer = utils.get_peer(sync_from)
|
|
|
|
data_hash = peer.get_metahash(self.id)
|
|
|
|
item = peer.library[self.id]
|
|
|
|
sync_meta = item['meta']
|
|
|
|
sync_modified = item.get('modified')
|
|
|
|
if self.get_hash() != data_hash:
|
|
|
|
logger.debug('update %s with metadata from %s', self, sync_from)
|
2016-01-19 10:05:16 +00:00
|
|
|
record = {}
|
2016-02-10 14:02:32 +00:00
|
|
|
for key in sync_meta:
|
|
|
|
if key != 'sharemetadata' and self.meta.get(key) != sync_meta[key]:
|
|
|
|
record[key] = self.meta[key] = sync_meta[key]
|
|
|
|
for key in set(self.meta)-set(sync_meta):
|
2016-01-19 10:05:16 +00:00
|
|
|
record[key] = self.meta[key] = [] if key in self.array_keys else ''
|
2016-02-10 14:02:32 +00:00
|
|
|
self.update(sync_modified)
|
2016-01-19 10:05:16 +00:00
|
|
|
self.save()
|
2016-02-13 11:50:09 +00:00
|
|
|
for f in self.files.all():
|
|
|
|
f.move()
|
2016-01-19 10:05:16 +00:00
|
|
|
user = state.user()
|
|
|
|
if record and user in self.users:
|
2017-06-03 20:50:14 +00:00
|
|
|
add_record('edititem', self.id, record, _ts=self.modified)
|
2016-01-19 16:13:39 +00:00
|
|
|
if 'cover' in record:
|
2016-02-10 14:02:32 +00:00
|
|
|
if state.tasks:
|
|
|
|
state.tasks.queue('getcover', self.id)
|
2016-01-19 10:05:16 +00:00
|
|
|
|
2014-05-21 00:02:21 +00:00
|
|
|
def extract_preview(self):
|
2014-05-04 17:26:43 +00:00
|
|
|
path = self.get_path()
|
2014-05-18 23:24:04 +00:00
|
|
|
if path:
|
|
|
|
return getattr(media, self.info['extension']).cover(path)
|
2016-01-21 07:05:49 +00:00
|
|
|
else:
|
|
|
|
for u in self.users:
|
|
|
|
if u.id != settings.USER_ID:
|
|
|
|
if state.nodes.download_preview(u.id, self.id):
|
|
|
|
break
|
2014-05-04 17:26:43 +00:00
|
|
|
|
2016-01-21 07:05:49 +00:00
|
|
|
def get_preview(self):
|
2016-06-24 10:27:05 +00:00
|
|
|
key = 'preview:%s' % self.id
|
2016-01-21 07:05:49 +00:00
|
|
|
data = icons[key]
|
|
|
|
if not data:
|
|
|
|
preview = self.extract_preview()
|
|
|
|
if preview:
|
|
|
|
icons[key] = preview
|
|
|
|
|
2016-01-16 05:17:52 +00:00
|
|
|
def update_preview(self):
|
2016-01-24 13:38:07 +00:00
|
|
|
logger.debug('%s update_preview', self.id)
|
2016-06-24 10:27:05 +00:00
|
|
|
key = 'preview:%s' % self.id
|
2016-01-10 07:26:46 +00:00
|
|
|
preview = self.extract_preview()
|
|
|
|
if preview:
|
|
|
|
icons[key] = preview
|
|
|
|
self.info['previewRatio'] = get_ratio(preview)
|
2016-06-24 10:27:05 +00:00
|
|
|
if 'coverRatio' not in self.info:
|
2016-01-10 07:26:46 +00:00
|
|
|
self.info['coverRatio'] = self.info['previewRatio']
|
|
|
|
else:
|
|
|
|
del icons[key]
|
|
|
|
if 'previewRatio' in self.info:
|
|
|
|
del self.info['previewRatio']
|
2016-01-16 05:17:52 +00:00
|
|
|
if not preview:
|
|
|
|
if 'coverRatio' in self.info:
|
|
|
|
self.info['previewRatio'] = self.info['coverRatio']
|
|
|
|
elif 'previewRatio' in self.info:
|
|
|
|
del self.info['previewRatio']
|
2016-01-16 10:57:52 +00:00
|
|
|
icons.clear('preview:%s:' % self.id)
|
2016-01-24 13:38:07 +00:00
|
|
|
logger.debug('%s update_preview done', self.id)
|
2016-01-16 05:17:52 +00:00
|
|
|
|
2019-01-31 09:33:54 +00:00
|
|
|
def update_cover(self):
|
2016-02-05 12:47:35 +00:00
|
|
|
if state.online:
|
2019-01-31 09:33:54 +00:00
|
|
|
download_cover(self.id)
|
2016-02-10 14:02:32 +00:00
|
|
|
elif state.tasks:
|
2016-02-11 06:14:40 +00:00
|
|
|
state.tasks.queue('getcover', self.id)
|
2019-01-31 09:33:54 +00:00
|
|
|
|
|
|
|
def update_icons(self):
|
|
|
|
self.update_cover()
|
2016-01-16 05:17:52 +00:00
|
|
|
self.update_preview()
|
2014-05-04 17:26:43 +00:00
|
|
|
|
2016-01-11 13:43:54 +00:00
|
|
|
def load_metadata(self):
|
|
|
|
'''
|
|
|
|
load metadata from user_metadata or get via isbn?
|
|
|
|
'''
|
|
|
|
for key in self.meta_keys:
|
2016-01-12 07:32:39 +00:00
|
|
|
if key in self.info:
|
|
|
|
if key not in self.meta:
|
|
|
|
self.meta[key] = self.info[key]
|
2016-01-16 05:56:03 +00:00
|
|
|
if key != 'pages':
|
|
|
|
del self.info[key]
|
2016-01-11 14:29:28 +00:00
|
|
|
|
2016-06-24 10:27:05 +00:00
|
|
|
# FIXME get from user_meta
|
2016-01-11 13:43:54 +00:00
|
|
|
if state.online:
|
|
|
|
if 'isbn' in self.meta:
|
2016-07-17 11:07:25 +00:00
|
|
|
data = meta.lookup_isbn(self.meta['isbn'])
|
2016-01-11 13:43:54 +00:00
|
|
|
if data:
|
2016-01-23 12:37:25 +00:00
|
|
|
for key in data:
|
|
|
|
self.meta[key] = data[key]
|
2014-05-04 17:26:43 +00:00
|
|
|
|
2014-05-12 23:43:27 +00:00
|
|
|
def queue_download(self):
|
|
|
|
u = state.user()
|
2016-06-24 10:27:05 +00:00
|
|
|
if self.id not in state.downloads.transfers:
|
2016-02-24 06:52:36 +00:00
|
|
|
state.downloads.transfers[self.id] = {
|
|
|
|
'added': datetime.utcnow(),
|
|
|
|
'progress': 0
|
|
|
|
}
|
|
|
|
logger.debug('queue %s for download', self.id)
|
2016-06-24 10:27:05 +00:00
|
|
|
if u not in self.users:
|
2016-01-06 18:06:48 +00:00
|
|
|
self.add_user(u)
|
2014-05-12 23:43:27 +00:00
|
|
|
|
2014-05-04 17:26:43 +00:00
|
|
|
def save_file(self, content):
|
2014-05-12 23:43:27 +00:00
|
|
|
u = state.user()
|
2014-05-04 17:26:43 +00:00
|
|
|
f = File.get(self.id)
|
2014-05-12 23:43:27 +00:00
|
|
|
content_id = media.get_id(data=content)
|
|
|
|
if content_id != self.id:
|
2014-05-17 14:26:59 +00:00
|
|
|
logger.debug('INVALID CONTENT %s vs %s', self.id, content_id)
|
2014-05-12 23:43:27 +00:00
|
|
|
return False
|
2014-05-04 17:26:43 +00:00
|
|
|
if not f:
|
2016-01-20 07:12:45 +00:00
|
|
|
path = '.import/%s.%s' % (self.id, self.info['extension'])
|
2014-05-25 18:06:12 +00:00
|
|
|
info = self.info.copy()
|
|
|
|
for key in ('mediastate', 'coverRatio', 'previewRatio'):
|
|
|
|
if key in info:
|
|
|
|
del info[key]
|
|
|
|
f = File.get_or_create(self.id, info, path=path)
|
2014-05-04 17:26:43 +00:00
|
|
|
path = self.get_path()
|
|
|
|
if not os.path.exists(path):
|
|
|
|
ox.makedirs(os.path.dirname(path))
|
|
|
|
with open(path, 'wb') as fd:
|
|
|
|
fd.write(content)
|
2016-01-12 05:39:21 +00:00
|
|
|
f.info = media.metadata(path)
|
|
|
|
f.save()
|
|
|
|
for key in ('tableofcontents', ):
|
|
|
|
if key not in self.meta and key in f.info:
|
|
|
|
self.meta[key] = f.info[key]
|
2014-05-12 23:43:27 +00:00
|
|
|
if u not in self.users:
|
2016-01-06 18:06:48 +00:00
|
|
|
self.add_user(u)
|
2016-02-11 15:55:41 +00:00
|
|
|
if state.downloads and self.id in state.downloads.transfers:
|
|
|
|
del state.downloads.transfers[self.id]
|
2014-05-20 00:43:54 +00:00
|
|
|
self.added = datetime.utcnow()
|
2017-06-03 20:50:14 +00:00
|
|
|
add_record('additem', self.id, f.info)
|
|
|
|
add_record('edititem', self.id, self.meta)
|
2016-02-11 17:22:39 +00:00
|
|
|
for l in self.lists.filter_by(user_id=settings.USER_ID):
|
2019-02-02 12:06:28 +00:00
|
|
|
if l.name != '' and l.name != 'Inbox':
|
2017-06-03 20:50:14 +00:00
|
|
|
add_record('addlistitems', l.name, [self.id])
|
2014-05-04 17:26:43 +00:00
|
|
|
self.update()
|
2014-05-17 14:26:59 +00:00
|
|
|
f.move()
|
2014-05-21 00:02:21 +00:00
|
|
|
self.update_icons()
|
2014-05-24 21:50:22 +00:00
|
|
|
self.save()
|
2014-05-04 17:26:43 +00:00
|
|
|
trigger_event('transfer', {
|
|
|
|
'id': self.id, 'progress': 1
|
|
|
|
})
|
|
|
|
return True
|
|
|
|
else:
|
2014-05-17 14:26:59 +00:00
|
|
|
logger.debug('TRIED TO SAVE EXISTING FILE!!!')
|
2016-02-11 15:55:41 +00:00
|
|
|
if state.downloads and self.id in state.downloads.transfers:
|
|
|
|
del state.downloads.transfers[self.id]
|
2014-05-04 17:26:43 +00:00
|
|
|
self.update()
|
2016-02-25 07:19:53 +00:00
|
|
|
return True
|
2014-05-04 17:26:43 +00:00
|
|
|
return False
|
|
|
|
|
2019-02-13 07:06:25 +00:00
|
|
|
def missing_file(self):
|
|
|
|
logger.debug('file is missing! %s: %s', self.id, self.get_path())
|
|
|
|
self.info['missing'] = True
|
|
|
|
state.db.session.add(self)
|
|
|
|
|
2014-05-17 11:45:57 +00:00
|
|
|
def remove_file(self):
|
|
|
|
for f in self.files.all():
|
|
|
|
path = f.fullpath()
|
|
|
|
if os.path.exists(path):
|
2017-05-30 19:02:58 +00:00
|
|
|
mode = 0o644
|
2016-02-23 11:57:15 +00:00
|
|
|
try:
|
2017-05-30 19:02:58 +00:00
|
|
|
os.chmod(path, mode)
|
2016-02-23 11:57:15 +00:00
|
|
|
os.unlink(path)
|
|
|
|
remove_empty_folders(os.path.dirname(path))
|
|
|
|
except:
|
|
|
|
pass
|
2014-08-09 16:14:14 +00:00
|
|
|
state.db.session.delete(f)
|
2014-05-17 11:45:57 +00:00
|
|
|
user = state.user()
|
2014-05-19 01:36:37 +00:00
|
|
|
if user in self.users:
|
|
|
|
self.users.remove(user)
|
2014-05-19 18:12:02 +00:00
|
|
|
for l in self.lists.filter_by(user_id=user.id):
|
|
|
|
l.items.remove(self)
|
2016-01-22 05:47:32 +00:00
|
|
|
if self.meta.get('sharemetadata'):
|
|
|
|
self.meta['sharemetadata'] = False
|
2014-05-17 11:45:57 +00:00
|
|
|
if not self.users:
|
2014-05-22 14:20:40 +00:00
|
|
|
self.delete()
|
2014-05-17 11:45:57 +00:00
|
|
|
else:
|
2016-01-19 05:21:27 +00:00
|
|
|
self.added = None
|
2014-05-17 11:45:57 +00:00
|
|
|
self.update()
|
2016-02-18 06:45:15 +00:00
|
|
|
if state.downloads:
|
|
|
|
if self.id in state.downloads.transfers:
|
|
|
|
del state.downloads.transfers[self.id]
|
2017-06-03 20:50:14 +00:00
|
|
|
add_record('removeitem', self.id)
|
2014-05-17 11:45:57 +00:00
|
|
|
|
2014-05-21 22:41:29 +00:00
|
|
|
class Sort(db.Model):
|
2014-08-09 15:03:16 +00:00
|
|
|
__tablename__ = 'sort'
|
|
|
|
|
|
|
|
item_id = sa.Column(sa.String(32), sa.ForeignKey('item.id'), primary_key=True)
|
|
|
|
item = sa.orm.relationship('Item', backref=sa.orm.backref('sort', lazy='dynamic'))
|
2014-05-21 22:41:29 +00:00
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return '%s_sort' % self.item_id
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def get(cls, item_id):
|
|
|
|
return cls.query.filter_by(item_id=item_id).first()
|
|
|
|
|
|
|
|
@classmethod
|
2016-02-10 14:01:33 +00:00
|
|
|
def get_or_create(cls, item_id, commit=True):
|
2014-05-21 22:41:29 +00:00
|
|
|
f = cls.get(item_id)
|
|
|
|
if not f:
|
|
|
|
f = cls(item_id=item_id)
|
2014-08-09 16:14:14 +00:00
|
|
|
state.db.session.add(f)
|
2016-02-10 14:01:33 +00:00
|
|
|
if commit:
|
|
|
|
state.db.session.commit()
|
2014-05-21 22:41:29 +00:00
|
|
|
return f
|
|
|
|
|
2017-05-05 09:42:39 +00:00
|
|
|
|
2016-01-16 15:57:15 +00:00
|
|
|
Item.sort_keys = []
|
2014-05-04 17:26:43 +00:00
|
|
|
for key in config['itemKeys']:
|
|
|
|
if key.get('sort'):
|
|
|
|
sort_type = key.get('sortType', key['type'])
|
|
|
|
if sort_type == 'integer':
|
2014-08-09 15:03:16 +00:00
|
|
|
col = sa.Column(sa.BigInteger(), index=True)
|
2014-05-04 17:26:43 +00:00
|
|
|
elif sort_type == 'float':
|
2014-08-09 15:03:16 +00:00
|
|
|
col = sa.Column(sa.Float(), index=True)
|
2014-05-04 17:26:43 +00:00
|
|
|
elif sort_type == 'date':
|
2014-08-09 15:03:16 +00:00
|
|
|
col = sa.Column(sa.DateTime(), index=True)
|
2016-01-19 11:14:00 +00:00
|
|
|
elif sort_type == 'boolean':
|
|
|
|
col = sa.Column(sa.Boolean(), index=True)
|
2014-05-04 17:26:43 +00:00
|
|
|
else:
|
2014-08-09 15:03:16 +00:00
|
|
|
col = sa.Column(sa.String(1000), index=True)
|
2014-05-21 22:41:29 +00:00
|
|
|
setattr(Sort, '%s' % key['id'], col)
|
2016-01-16 15:57:15 +00:00
|
|
|
Item.sort_keys.append(key['id'])
|
2014-05-04 17:26:43 +00:00
|
|
|
|
2014-05-21 00:02:21 +00:00
|
|
|
Item.id_keys = ['isbn', 'lccn', 'olid', 'oclc', 'asin']
|
2016-06-24 10:27:05 +00:00
|
|
|
Item.item_keys = config['itemKeys']
|
2014-05-14 09:57:11 +00:00
|
|
|
Item.filter_keys = [k['id'] for k in config['itemKeys'] if k.get('filter')]
|
2016-01-19 10:05:16 +00:00
|
|
|
Item.array_keys = [k['id'] for k in config['itemKeys'] if isinstance(k['type'], list)]
|
2014-05-04 17:26:43 +00:00
|
|
|
|
|
|
|
class Find(db.Model):
|
2014-08-09 15:03:16 +00:00
|
|
|
__tablename__ = 'find'
|
|
|
|
|
|
|
|
id = sa.Column(sa.Integer(), primary_key=True)
|
|
|
|
item_id = sa.Column(sa.String(32), sa.ForeignKey('item.id'))
|
2016-01-10 09:10:38 +00:00
|
|
|
item = sa.orm.relationship('Item', backref=sa.orm.backref('find_', lazy='dynamic'))
|
2014-08-09 15:03:16 +00:00
|
|
|
key = sa.Column(sa.String(200), index=True)
|
|
|
|
value = sa.Column(sa.Text())
|
|
|
|
findvalue = sa.Column(sa.Text(), index=True)
|
2016-01-25 17:32:04 +00:00
|
|
|
sortvalue = sa.Column(sa.Text())
|
2014-05-04 17:26:43 +00:00
|
|
|
|
|
|
|
def __repr__(self):
|
2015-04-21 17:58:32 +00:00
|
|
|
return '%s=%s' % (self.key, self.findvalue)
|
2014-05-04 17:26:43 +00:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def get(cls, item, key):
|
|
|
|
return cls.query.filter_by(item_id=item, key=key).first()
|
|
|
|
|
|
|
|
@classmethod
|
2016-02-10 14:01:33 +00:00
|
|
|
def get_or_create(cls, item, key, commit=True):
|
2014-05-04 17:26:43 +00:00
|
|
|
f = cls.get(item, key)
|
|
|
|
if not f:
|
|
|
|
f = cls(item_id=item, key=key)
|
2014-08-09 16:14:14 +00:00
|
|
|
state.db.session.add(f)
|
2016-02-10 14:01:33 +00:00
|
|
|
if commit:
|
|
|
|
state.db.session.commit()
|
2014-05-04 17:26:43 +00:00
|
|
|
return f
|
|
|
|
|
|
|
|
class File(db.Model):
|
2014-08-09 15:03:16 +00:00
|
|
|
__tablename__ = 'file'
|
2014-05-04 17:26:43 +00:00
|
|
|
|
2014-08-09 15:03:16 +00:00
|
|
|
created = sa.Column(sa.DateTime())
|
|
|
|
modified = sa.Column(sa.DateTime())
|
2014-05-04 17:26:43 +00:00
|
|
|
|
2014-08-09 15:03:16 +00:00
|
|
|
sha1 = sa.Column(sa.String(32), primary_key=True)
|
|
|
|
path = sa.Column(sa.String(2048))
|
2014-05-04 17:26:43 +00:00
|
|
|
|
2014-09-02 22:32:44 +00:00
|
|
|
info = sa.Column(MutableDict.as_mutable(sa.PickleType(pickler=json_pickler)))
|
2014-05-04 17:26:43 +00:00
|
|
|
|
2014-08-09 15:03:16 +00:00
|
|
|
item_id = sa.Column(sa.String(32), sa.ForeignKey('item.id'))
|
|
|
|
item = sa.orm.relationship('Item', backref=sa.orm.backref('files', lazy='dynamic'))
|
2014-05-04 17:26:43 +00:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def get(cls, sha1):
|
|
|
|
return cls.query.filter_by(sha1=sha1).first()
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def get_or_create(cls, sha1, info=None, path=None):
|
|
|
|
f = cls.get(sha1)
|
|
|
|
if not f:
|
|
|
|
f = cls(sha1=sha1)
|
|
|
|
if info:
|
|
|
|
f.info = info
|
|
|
|
if path:
|
|
|
|
f.path = path
|
|
|
|
f.item_id = Item.get_or_create(id=sha1, info=info).id
|
2014-08-09 16:14:14 +00:00
|
|
|
state.db.session.add(f)
|
|
|
|
state.db.session.commit()
|
2014-05-04 17:26:43 +00:00
|
|
|
return f
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return self.sha1
|
|
|
|
|
|
|
|
def __init__(self, sha1):
|
|
|
|
self.sha1 = sha1
|
2014-05-20 00:43:54 +00:00
|
|
|
self.created = datetime.utcnow()
|
|
|
|
self.modified = datetime.utcnow()
|
2014-05-17 11:45:57 +00:00
|
|
|
|
|
|
|
def fullpath(self):
|
|
|
|
prefs = settings.preferences
|
2016-01-31 17:28:53 +00:00
|
|
|
prefix = os.sep.join(os.path.join(os.path.expanduser(prefs['libraryPath']), 'Books/').split('/'))
|
2016-02-23 12:15:03 +00:00
|
|
|
return os.path.normpath(os.path.join(prefix, self.path))
|
2014-05-17 11:45:57 +00:00
|
|
|
|
2016-01-14 06:59:55 +00:00
|
|
|
def make_readonly(self):
|
|
|
|
current_path = self.fullpath()
|
|
|
|
if os.path.exists(current_path):
|
|
|
|
mode = os.stat(current_path)[stat.ST_MODE]
|
2017-05-30 19:02:58 +00:00
|
|
|
readonly = 0o444
|
2016-01-14 06:59:55 +00:00
|
|
|
if mode != readonly:
|
|
|
|
os.chmod(current_path, readonly)
|
|
|
|
|
2014-05-17 11:45:57 +00:00
|
|
|
def move(self):
|
2014-05-19 11:09:12 +00:00
|
|
|
def format_underscores(string):
|
2017-06-12 14:49:08 +00:00
|
|
|
return re.sub(r'^\.|\.$|:|/|\?|<|>|\\|\*|\||"', '_', string)
|
2014-05-17 11:45:57 +00:00
|
|
|
prefs = settings.preferences
|
2016-01-31 17:28:53 +00:00
|
|
|
prefix = os.sep.join(os.path.join(os.path.expanduser(prefs['libraryPath']), 'Books/').split('/'))
|
2016-02-05 12:47:35 +00:00
|
|
|
if not self.item:
|
|
|
|
return
|
2016-02-27 06:33:06 +00:00
|
|
|
j = self.item.json(keys=['title', 'author', 'publisher', 'date', 'extension'])
|
2014-05-17 11:45:57 +00:00
|
|
|
|
|
|
|
current_path = self.fullpath()
|
2015-11-19 14:11:18 +00:00
|
|
|
if not os.path.exists(current_path):
|
|
|
|
logger.debug('file is missing. %s', current_path)
|
|
|
|
return
|
2016-01-14 06:59:55 +00:00
|
|
|
|
2016-01-06 19:38:15 +00:00
|
|
|
author = '; '.join([get_sort_name(a) for a in j.get('author', [])])
|
2014-05-17 11:45:57 +00:00
|
|
|
if not author:
|
|
|
|
author = 'Unknown Author'
|
2016-01-14 07:54:34 +00:00
|
|
|
if ' (Ed.)' in author:
|
|
|
|
author = author.replace(' (Ed.)', '') + ' (Ed.)'
|
2017-06-03 21:26:01 +00:00
|
|
|
if len(author) > 200:
|
2016-01-29 16:35:59 +00:00
|
|
|
author = 'Various Authors'
|
2014-05-17 11:45:57 +00:00
|
|
|
title = j.get('title', 'Untitled')
|
|
|
|
extension = j['extension']
|
2014-05-19 11:09:12 +00:00
|
|
|
|
2014-05-17 11:45:57 +00:00
|
|
|
if len(title) > 100:
|
|
|
|
title = title[:100]
|
2014-05-19 11:09:12 +00:00
|
|
|
|
|
|
|
title = format_underscores(title)
|
|
|
|
author = format_underscores(author)
|
2016-01-06 19:29:06 +00:00
|
|
|
publisher = j.get('publisher')
|
|
|
|
if publisher:
|
|
|
|
extra = ', '.join(publisher)
|
|
|
|
else:
|
|
|
|
extra = ''
|
|
|
|
date = j.get('date')
|
|
|
|
if date and len(date) >= 4:
|
|
|
|
extra += ' ' + date[:4]
|
|
|
|
if extra:
|
2016-01-14 07:54:34 +00:00
|
|
|
extra = format_underscores(extra)
|
2016-01-06 19:29:06 +00:00
|
|
|
title = '%s (%s)' % (title, extra.strip())
|
2014-05-17 11:45:57 +00:00
|
|
|
filename = '%s.%s' % (title, extension)
|
2015-03-07 18:42:16 +00:00
|
|
|
first = unicodedata.normalize('NFD', author[0].upper())[0].upper()
|
2017-06-12 15:00:32 +00:00
|
|
|
if first not in string.ascii_uppercase:
|
|
|
|
first = '_'
|
2015-03-07 18:42:16 +00:00
|
|
|
new_path = os.path.join(first, author, filename)
|
2014-05-28 11:36:44 +00:00
|
|
|
new_path = new_path.replace('\x00', '')
|
2017-06-12 08:38:08 +00:00
|
|
|
new_path = new_path.replace('\n', ' ').replace(' ', ' ')
|
2016-01-06 19:29:06 +00:00
|
|
|
new_path = ox.decode_html(new_path)
|
2014-05-17 11:45:57 +00:00
|
|
|
if self.path == new_path:
|
|
|
|
return
|
|
|
|
h = ''
|
2017-06-12 14:49:08 +00:00
|
|
|
while utils.iexists(os.path.join(prefix, new_path)):
|
2014-05-17 11:45:57 +00:00
|
|
|
h = self.sha1[:len(h)+1]
|
|
|
|
filename = '%s.%s.%s' % (title, h, extension)
|
2015-03-07 18:42:16 +00:00
|
|
|
new_path = os.path.join(first, author, filename)
|
2014-05-17 11:45:57 +00:00
|
|
|
if current_path == os.path.join(prefix, new_path):
|
|
|
|
break
|
2019-01-14 15:02:34 +00:00
|
|
|
if not same_path(self.path, new_path):
|
2014-05-17 11:45:57 +00:00
|
|
|
path = os.path.join(prefix, new_path)
|
|
|
|
ox.makedirs(os.path.dirname(path))
|
2017-05-30 19:02:58 +00:00
|
|
|
mode = 0o644
|
2016-03-26 16:55:22 +00:00
|
|
|
try:
|
2017-05-30 19:02:58 +00:00
|
|
|
os.chmod(current_path, mode)
|
2016-03-26 16:55:22 +00:00
|
|
|
shutil.move(current_path, path)
|
|
|
|
except:
|
|
|
|
logger.debug('failed to move %s to %s', current_path, path, exc_info=True)
|
2019-01-14 12:04:28 +00:00
|
|
|
if os.path.exists(path):
|
2019-01-12 18:58:01 +00:00
|
|
|
os.unlink(path)
|
2016-03-26 16:55:22 +00:00
|
|
|
return
|
2019-01-14 07:51:24 +00:00
|
|
|
logger.debug('mv "%s" "%s"', self.path, new_path)
|
2014-05-17 11:45:57 +00:00
|
|
|
self.path = new_path
|
|
|
|
self.save()
|
2016-01-14 10:44:11 +00:00
|
|
|
for folder in set(os.path.dirname(p) for p in [current_path, path]):
|
|
|
|
remove_empty_folders(folder)
|
2016-02-13 11:40:37 +00:00
|
|
|
self.make_readonly()
|
2014-05-17 11:45:57 +00:00
|
|
|
|
|
|
|
def save(self):
|
2014-08-09 16:14:14 +00:00
|
|
|
state.db.session.add(self)
|
|
|
|
state.db.session.commit()
|
2014-05-21 22:41:29 +00:00
|
|
|
|
2016-01-14 13:09:56 +00:00
|
|
|
def remove_unused_names():
|
|
|
|
used = list(set(
|
|
|
|
get_sort_name(a)
|
|
|
|
for i in Item.query
|
|
|
|
for a in i.meta.get('author', [])
|
|
|
|
))
|
|
|
|
for p in Person.query.filter(Person.sortname.notin_(used)):
|
|
|
|
state.db.session.delete(p)
|
|
|
|
state.db.session.commit()
|
2016-01-16 15:57:15 +00:00
|
|
|
|
|
|
|
def update_sort_table():
|
2016-01-17 08:19:31 +00:00
|
|
|
current = db.get_table_columns('sort')
|
2016-01-16 15:57:15 +00:00
|
|
|
drop_columns = list(set(current) - set(Item.sort_keys+['item_id']))
|
|
|
|
if drop_columns:
|
|
|
|
db.drop_columns('sort', drop_columns)
|
2016-01-17 08:19:31 +00:00
|
|
|
add_columns = list(set(Item.sort_keys)-set(current+['item_id']))
|
2016-01-16 15:57:15 +00:00
|
|
|
if add_columns:
|
|
|
|
create_table = str(CreateTable(Sort.__table__).compile(db.engine)).split('\n')
|
|
|
|
sql = []
|
|
|
|
for col in add_columns:
|
|
|
|
add = [r for r in create_table if '\t%s ' % col in r][0].strip()[:-1]
|
|
|
|
sql.append('ALTER TABLE sort ADD '+add)
|
|
|
|
sql.append('CREATE INDEX ix_sort_{col} ON sort ({col})'.format(col=col))
|
|
|
|
with db.session() as s:
|
|
|
|
for q in sql:
|
|
|
|
s.connection().execute(q)
|
|
|
|
s.commit()
|
2016-02-05 12:47:35 +00:00
|
|
|
sql = []
|
2016-01-17 06:29:06 +00:00
|
|
|
layout = db.get_layout()
|
|
|
|
sort_indexes = [i[len('ix_sort_'):] for i in layout['indexes'] if i.startswith('ix_sort_')]
|
|
|
|
for col in set(Item.sort_keys)-set(sort_indexes):
|
|
|
|
sql.append('CREATE INDEX ix_sort_{col} ON sort ({col})'.format(col=col))
|
2016-06-24 10:27:05 +00:00
|
|
|
if 'sortvalue' not in db.get_table_columns('find'):
|
2016-01-25 17:32:04 +00:00
|
|
|
create_table = str(CreateTable(Find.__table__).compile(db.engine)).split('\n')
|
|
|
|
col = 'sortvalue'
|
|
|
|
add = [r for r in create_table if '\t%s ' % col in r][0].strip()[:-1]
|
|
|
|
sql.append('ALTER TABLE find ADD '+add)
|
2016-01-17 06:29:06 +00:00
|
|
|
if sql:
|
|
|
|
with db.session() as s:
|
|
|
|
for q in sql:
|
|
|
|
s.connection().execute(q)
|
|
|
|
s.commit()
|
2016-01-19 10:05:16 +00:00
|
|
|
|
2019-01-31 09:33:54 +00:00
|
|
|
def download_cover(id):
|
|
|
|
key = 'cover:%s' % id
|
2019-02-01 13:16:07 +00:00
|
|
|
cover = None
|
2019-01-31 09:33:54 +00:00
|
|
|
with db.session():
|
|
|
|
i = Item.get(id)
|
|
|
|
if i:
|
|
|
|
url = i.meta.get('cover')
|
|
|
|
else:
|
|
|
|
url = None
|
|
|
|
|
2019-01-31 09:55:26 +00:00
|
|
|
logger.debug('download cover %s %s', id, url)
|
2019-01-31 09:33:54 +00:00
|
|
|
ratio = None
|
2019-02-01 11:13:55 +00:00
|
|
|
if url:
|
|
|
|
try:
|
|
|
|
cover = ox.net.read_url(url)
|
|
|
|
ratio = get_ratio(cover)
|
|
|
|
except:
|
|
|
|
logger.debug('unable to read cover url %s', url)
|
|
|
|
cover = None
|
2019-01-31 09:33:54 +00:00
|
|
|
with db.session():
|
|
|
|
i = Item.get(id, for_update=True)
|
|
|
|
if i:
|
|
|
|
if cover:
|
|
|
|
i.info['coverRatio'] = get_ratio(cover)
|
|
|
|
else:
|
|
|
|
if 'previewRatio' in i.info:
|
|
|
|
i.info['coverRatio'] = i.info['previewRatio']
|
|
|
|
elif 'coverRatio' in i.info:
|
|
|
|
del i.info['coverRatio']
|
|
|
|
i.save()
|
|
|
|
|
|
|
|
if cover:
|
|
|
|
icons[key] = cover
|
|
|
|
else:
|
|
|
|
del icons[key]
|
|
|
|
icons.clear('cover:%s:' % id)
|
|
|
|
logger.debug('%s update_cover done', id)
|
2016-01-19 10:05:16 +00:00
|
|
|
|
2016-02-11 06:14:40 +00:00
|
|
|
def get_cover(id):
|
|
|
|
delay = 60
|
|
|
|
if state.online:
|
2016-06-24 10:27:05 +00:00
|
|
|
# logger.debug('get_cover(%s)', id)
|
2019-01-31 09:33:54 +00:00
|
|
|
download_cover(id)
|
2016-02-11 06:14:40 +00:00
|
|
|
else:
|
|
|
|
state.main.call_later(delay, lambda: state.tasks.queue('getcover', id))
|
|
|
|
|
|
|
|
|
2016-01-24 07:44:43 +00:00
|
|
|
def get_preview(id):
|
2016-02-11 06:14:40 +00:00
|
|
|
if state.online:
|
2016-06-24 10:27:05 +00:00
|
|
|
# logger.debug('get_preview(%s)', id)
|
2016-02-11 06:14:40 +00:00
|
|
|
with db.session():
|
|
|
|
i = Item.get(id)
|
|
|
|
if i:
|
|
|
|
i.get_preview()
|
|
|
|
else:
|
2016-02-24 06:52:36 +00:00
|
|
|
state.tasks.queue('getpreview', id)
|
|
|
|
time.sleep(0.5)
|
2016-01-24 07:44:43 +00:00
|
|
|
|
2016-01-19 10:05:16 +00:00
|
|
|
def sync_metadata(ids=None):
|
2016-06-24 10:27:05 +00:00
|
|
|
# logger.debug('sync_metadata(%s)', len(ids) if len(ids) > 10 else ids)
|
2016-01-19 10:05:16 +00:00
|
|
|
step = 1000
|
|
|
|
delay = 10
|
|
|
|
with db.session():
|
|
|
|
if not ids:
|
|
|
|
ids = [i.id for i in Item.query.options(load_only('id'))]
|
|
|
|
if len(ids) > step:
|
|
|
|
later = ids[step:]
|
|
|
|
ids = ids[:step]
|
|
|
|
else:
|
|
|
|
later = []
|
|
|
|
if ids:
|
2016-02-27 04:49:45 +00:00
|
|
|
done = set()
|
2016-01-19 10:05:16 +00:00
|
|
|
for i in Item.query.filter(Item.id.in_(ids)):
|
|
|
|
i.sync_metadata()
|
2016-02-27 04:49:45 +00:00
|
|
|
done.add(i.id)
|
|
|
|
if state.shutdown:
|
|
|
|
later = list((set(later) | set(ids)) - done)
|
|
|
|
if later and state.tasks:
|
|
|
|
state.tasks.queue('syncmetadata', later)
|
|
|
|
later = None
|
2016-01-19 10:05:16 +00:00
|
|
|
if later:
|
|
|
|
if state.main and state.tasks:
|
2016-01-24 07:44:43 +00:00
|
|
|
state.main.call_later(delay, lambda: state.tasks.queue('syncmetadata', later))
|
2016-06-24 10:27:05 +00:00
|
|
|
# else:
|
2016-01-19 10:05:16 +00:00
|
|
|
# logger.debug('sync_metadata done')
|