535 lines
20 KiB
Python
535 lines
20 KiB
Python
# -*- coding: utf-8 -*-
|
|
# vi:si:et:sw=4:sts=4:ts=4
|
|
|
|
import os
|
|
|
|
import settings
|
|
import db
|
|
from db import run_sql
|
|
|
|
from user.models import List, User
|
|
|
|
def create_db():
|
|
if not os.path.exists(settings.db_path):
|
|
sql = '''
|
|
CREATE TABLE item (
|
|
created DATETIME,
|
|
modified DATETIME,
|
|
id VARCHAR(32) NOT NULL,
|
|
info BLOB,
|
|
meta BLOB,
|
|
added DATETIME,
|
|
accessed DATETIME,
|
|
timesaccessed INTEGER,
|
|
PRIMARY KEY (id)
|
|
);
|
|
CREATE TABLE changelog (
|
|
id INTEGER NOT NULL,
|
|
created DATETIME,
|
|
timestamp BIGINT,
|
|
user_id VARCHAR(43),
|
|
revision BIGINT,
|
|
data TEXT,
|
|
sig VARCHAR(96),
|
|
PRIMARY KEY (id)
|
|
);
|
|
CREATE TABLE user (
|
|
created DATETIME,
|
|
modified DATETIME,
|
|
id VARCHAR(43) NOT NULL,
|
|
info BLOB,
|
|
nickname VARCHAR(256),
|
|
pending VARCHAR(64),
|
|
queued BOOLEAN,
|
|
peered BOOLEAN,
|
|
online BOOLEAN,
|
|
PRIMARY KEY (id),
|
|
UNIQUE (nickname),
|
|
CHECK (queued IN (0, 1)),
|
|
CHECK (peered IN (0, 1)),
|
|
CHECK (online IN (0, 1))
|
|
);
|
|
CREATE TABLE metadata (
|
|
created DATETIME,
|
|
modified DATETIME,
|
|
id INTEGER NOT NULL,
|
|
"key" VARCHAR(256),
|
|
value VARCHAR(256),
|
|
data BLOB,
|
|
PRIMARY KEY (id)
|
|
);
|
|
CREATE TABLE person (
|
|
name VARCHAR(1024) NOT NULL,
|
|
sortname VARCHAR,
|
|
numberofnames INTEGER,
|
|
PRIMARY KEY (name)
|
|
);
|
|
CREATE TABLE find (
|
|
id INTEGER NOT NULL,
|
|
item_id VARCHAR(32),
|
|
"key" VARCHAR(200),
|
|
value TEXT,
|
|
findvalue TEXT,
|
|
sortvalue TEXT,
|
|
PRIMARY KEY (id),
|
|
FOREIGN KEY(item_id) REFERENCES item (id)
|
|
);
|
|
CREATE INDEX ix_find_key ON find ("key");
|
|
CREATE TABLE list (
|
|
id INTEGER NOT NULL,
|
|
name VARCHAR,
|
|
index_ INTEGER,
|
|
type VARCHAR(64),
|
|
"query" BLOB,
|
|
user_id VARCHAR(43),
|
|
PRIMARY KEY (id),
|
|
FOREIGN KEY(user_id) REFERENCES user (id)
|
|
);
|
|
CREATE TABLE useritem (
|
|
user_id VARCHAR(43),
|
|
item_id VARCHAR(32),
|
|
FOREIGN KEY(item_id) REFERENCES item (id),
|
|
FOREIGN KEY(user_id) REFERENCES user (id)
|
|
);
|
|
CREATE TABLE sort (
|
|
item_id VARCHAR(32) NOT NULL,
|
|
title VARCHAR(1000),
|
|
author VARCHAR(1000),
|
|
publisher VARCHAR(1000),
|
|
place VARCHAR(1000),
|
|
date VARCHAR(1000),
|
|
language VARCHAR(1000),
|
|
pages BIGINT,
|
|
extension VARCHAR(1000),
|
|
size BIGINT,
|
|
created DATETIME,
|
|
added DATETIME,
|
|
modified DATETIME,
|
|
accessed DATETIME,
|
|
timesaccessed BIGINT,
|
|
mediastate VARCHAR(1000),
|
|
transferadded DATETIME,
|
|
transferprogress FLOAT,
|
|
id VARCHAR(1000),
|
|
isbn VARCHAR(1000),
|
|
random BIGINT,
|
|
PRIMARY KEY (item_id),
|
|
FOREIGN KEY(item_id) REFERENCES item (id)
|
|
);
|
|
CREATE INDEX ix_sort_accessed ON sort (accessed);
|
|
CREATE INDEX ix_sort_added ON sort (added);
|
|
CREATE INDEX ix_sort_author ON sort (author);
|
|
CREATE INDEX ix_sort_created ON sort (created);
|
|
CREATE INDEX ix_sort_date ON sort (date);
|
|
CREATE INDEX ix_sort_extension ON sort (extension);
|
|
CREATE INDEX ix_sort_id ON sort (id);
|
|
CREATE INDEX ix_sort_isbn ON sort (isbn);
|
|
CREATE INDEX ix_sort_language ON sort (language);
|
|
CREATE INDEX ix_sort_mediastate ON sort (mediastate);
|
|
CREATE INDEX ix_sort_modified ON sort (modified);
|
|
CREATE INDEX ix_sort_pages ON sort (pages);
|
|
CREATE INDEX ix_sort_place ON sort (place);
|
|
CREATE INDEX ix_sort_publisher ON sort (publisher);
|
|
CREATE INDEX ix_sort_random ON sort (random);
|
|
CREATE INDEX ix_sort_size ON sort (size);
|
|
CREATE INDEX ix_sort_timesaccessed ON sort (timesaccessed);
|
|
CREATE INDEX ix_sort_title ON sort (title);
|
|
CREATE INDEX ix_sort_transferadded ON sort (transferadded);
|
|
CREATE INDEX ix_sort_transferprogress ON sort (transferprogress);
|
|
CREATE TABLE file (
|
|
created DATETIME,
|
|
modified DATETIME,
|
|
sha1 VARCHAR(32) NOT NULL,
|
|
path VARCHAR(2048),
|
|
info BLOB,
|
|
item_id VARCHAR(32),
|
|
PRIMARY KEY (sha1),
|
|
FOREIGN KEY(item_id) REFERENCES item (id)
|
|
);
|
|
CREATE TABLE listitem (
|
|
list_id INTEGER,
|
|
item_id VARCHAR(32),
|
|
FOREIGN KEY(item_id) REFERENCES item (id),
|
|
FOREIGN KEY(list_id) REFERENCES list (id)
|
|
);
|
|
PRAGMA journal_mode=WAL
|
|
'''
|
|
for statement in sql.split(';'):
|
|
run_sql(statement)
|
|
create_default_lists()
|
|
settings.server['db_version'] = settings.DB_VERSION
|
|
update_database()
|
|
|
|
|
|
def upgrade_db(old, new=None):
|
|
|
|
def run_after(v):
|
|
return old <= v and new > v
|
|
|
|
if new:
|
|
if old <= '20140525-92-eac91e7' and new > '20140525-92-eac91e7':
|
|
with db.session():
|
|
import user.models
|
|
for u in user.models.User.query:
|
|
u.update_name()
|
|
u.save()
|
|
import item.models
|
|
for f in item.models.File.query:
|
|
changed = False
|
|
for key in ('mediastate', 'coverRatio', 'previewRatio'):
|
|
if key in f.info:
|
|
del f.info[key]
|
|
changed = True
|
|
if changed:
|
|
f.save()
|
|
if old <= '20140526-118-d451eb3' and new > '20140526-118-d451eb3':
|
|
with db.session():
|
|
import item.models
|
|
item.models.Find.query.filter_by(key='list').delete()
|
|
if old <= '20160109-573-094097b' and new > '20160109-573-094097b':
|
|
import sys
|
|
if sys.platform.startswith('linux'):
|
|
import integration
|
|
integration.install_xdg()
|
|
if old <= '20160110-581-e08780a' and new > '20160110-581-e08780a':
|
|
with db.session():
|
|
import item.models
|
|
from item.icons import icons
|
|
for f in item.models.File.query:
|
|
if f.info['extension'] == 'epub':
|
|
i = f.item
|
|
key = 'cover:%s'%i.id
|
|
cover = icons[key]
|
|
key = 'preview:%s'%i.id
|
|
preview = i.extract_preview()
|
|
update_item = False
|
|
if not preview:
|
|
del icons[key]
|
|
for resolution in (128, 256, 512):
|
|
del icons['%s:%s' % (key, resolution)]
|
|
if 'previewRatio' in i.info:
|
|
del i.info['previewRatio']
|
|
update_item = True
|
|
if not cover and 'coverRatio' in i.info:
|
|
del i.info['coverRatio']
|
|
update_item = True
|
|
if update_item:
|
|
i.save()
|
|
if old <= '20160110-583-59a3709' and new > '20160110-583-59a3709':
|
|
with db.session() as session:
|
|
import ox
|
|
from item.models import Item, Find
|
|
from meta.utils import decode_html_data
|
|
def cleanup_description(data):
|
|
if 'description' in data:
|
|
description = data['description']
|
|
description = description.replace('<br>', '\n').replace('</p><p>', '\n\n')
|
|
data['description'] = ox.strip_tags(description).strip()
|
|
for i in Item.query:
|
|
changed = False
|
|
meta = decode_html_data(i.meta.copy())
|
|
cleanup_description(meta)
|
|
if meta != i.meta:
|
|
#print(i, i.meta, '\n', meta, '\n\n')
|
|
i.meta = meta
|
|
changed = True
|
|
info = decode_html_data(i.info.copy())
|
|
cleanup_description(info)
|
|
if info != i.info:
|
|
#print(i, i.info, '\n', info, '\n\n')
|
|
i.info = info
|
|
changed = True
|
|
if changed:
|
|
i.update_sort()
|
|
i.update_find()
|
|
i.save()
|
|
for tag in (''', '"', '&#'):
|
|
items = set([f.item_id for f in Find.query.filter(Find.value.ilike('%'+tag+'%'))])
|
|
if items:
|
|
for i in Item.query.filter(Item.id.in_(items)):
|
|
#print(tag, i)
|
|
i.update_sort()
|
|
i.update_find()
|
|
session.commit()
|
|
if old <= '20160111-603-90648f9' and new > '20160111-603-90648f9':
|
|
for f in settings.ui['filters']:
|
|
if f['id'] == 'classification':
|
|
f['id'] = 'categories'
|
|
settings.ui._save()
|
|
if not db.table_exists('user_metadata'):
|
|
run_sql('''CREATE TABLE user_metadata (
|
|
created DATETIME,
|
|
modified DATETIME,
|
|
id INTEGER NOT NULL,
|
|
item_id VARCHAR(32),
|
|
user_id VARCHAR(43),
|
|
data_hash VARCHAR(40),
|
|
data BLOB,
|
|
PRIMARY KEY (id),
|
|
FOREIGN KEY(user_id) REFERENCES user (id)
|
|
)''')
|
|
run_sql('CREATE UNIQUE INDEX IF NOT EXISTS user_metadata_index ON user_metadata(id, user_id)')
|
|
run_sql('CREATE INDEX ix_user_metadata_data_hash ON user_metadata (data_hash)')
|
|
from meta.utils import to_isbn13
|
|
from item.models import Item
|
|
from user.models import Metadata
|
|
with db.session() as session:
|
|
for i in Item.query:
|
|
update = False
|
|
if 'primaryid' in i.meta:
|
|
del i.meta['primaryid']
|
|
update = True
|
|
if 'primaryid' in i.info:
|
|
del i.info['primaryid']
|
|
update = True
|
|
for key in i.meta_keys:
|
|
if key not in i.meta and key in i.info:
|
|
i.meta[key] = i.info[key]
|
|
update = True
|
|
if 'isbn' in i.meta and isinstance(i.meta['isbn'], list):
|
|
isbns = [to_isbn13(isbn) for isbn in i.meta['isbn']]
|
|
isbns = [isbn for isbn in isbns if isbn]
|
|
if isbns:
|
|
i.meta['isbn'] = isbns[0]
|
|
if 'isbn' in i.info:
|
|
i.info['isbn'] = i.meta['isbn']
|
|
else:
|
|
del i.meta['isbn']
|
|
if 'isbn' in i.info:
|
|
del i.info['isbn']
|
|
update = True
|
|
if 'isbn' in i.meta and not i.meta['isbn']:
|
|
del i.meta['isbn']
|
|
update = True
|
|
if update:
|
|
session.add(i)
|
|
for u in i.users:
|
|
if u.id != settings.USER_ID:
|
|
Metadata.get_or_create(u.id, i.id, i.meta, False)
|
|
session.commit()
|
|
if run_after('20160111-617-206e39c'):
|
|
from item.models import File
|
|
import media
|
|
with db.session() as session:
|
|
for f in File.query:
|
|
if f.info.get('extension') == 'epub':
|
|
if not 'tableofcontents' in f.item.meta:
|
|
f.info = media.metadata(f.fullpath())
|
|
if 'tableofcontents' in f.info:
|
|
f.item.meta['tableofcontents'] = f.info['tableofcontents']
|
|
f.item.update()
|
|
session.add(f.item)
|
|
session.add(f)
|
|
session.commit()
|
|
if run_after('20160112-651-de984a3'):
|
|
from item.models import File
|
|
import media
|
|
with db.session() as session:
|
|
for f in File.query:
|
|
if f.info.get('extension') == 'pdf':
|
|
if not 'tableofcontents' in f.item.meta:
|
|
f.info = media.metadata(f.fullpath())
|
|
if 'tableofcontents' in f.info:
|
|
f.item.meta['tableofcontents'] = f.info['tableofcontents']
|
|
f.item.update()
|
|
session.add(f.item)
|
|
session.add(f)
|
|
session.commit()
|
|
|
|
if old <= '20151118-346-7e86e68':
|
|
old_key = os.path.join(settings.data_path, 'node.ssl.key')
|
|
if os.path.exists(old_key):
|
|
os.unlink(old_key)
|
|
key_path = os.path.join(settings.data_path, 'node.key')
|
|
if os.path.exists(key_path):
|
|
import ed25519
|
|
with open(key_path, 'rb') as fd:
|
|
sk = ed25519.SigningKey(fd.read())
|
|
vk = sk.get_verifying_key()
|
|
OLD_USER_ID = vk.to_ascii(encoding='base64').decode()
|
|
statements = [
|
|
"UPDATE user SET id = '{nid}' WHERE id = '{oid}'",
|
|
"UPDATE list SET user_id = '{nid}' WHERE user_id = '{oid}'",
|
|
"UPDATE useritem SET user_id = '{nid}' WHERE user_id = '{oid}'",
|
|
"UPDATE changelog SET user_id = '{nid}' WHERE user_id = '{oid}'",
|
|
]
|
|
for sql in statements:
|
|
run_sql(sql.format(oid=OLD_USER_ID, nid=settings.USER_ID))
|
|
if old <= '20151201-384-03c2439':
|
|
with db.session():
|
|
import item.models
|
|
for i in item.models.Item.query:
|
|
for f in i.files.all():
|
|
f.move()
|
|
if old <= '20160103-423-05ca6c9':
|
|
with db.session():
|
|
import item.models
|
|
for i in item.models.Item.query:
|
|
if 'id' in i.meta:
|
|
del i.meta['id']
|
|
i.save()
|
|
if old <= '20160106-497-c86ba8a':
|
|
with db.session() as session:
|
|
u = User.get(settings.USER_ID)
|
|
if u:
|
|
l = u.library
|
|
for i in u.items.all():
|
|
if not i in l.items:
|
|
l.items.append(i)
|
|
session.add(l)
|
|
for u in User.query.filter_by(peered=True):
|
|
l = u.library
|
|
for i in u.items.all():
|
|
if not i in l.items:
|
|
l.items.append(i)
|
|
session.add(l)
|
|
l.items_count()
|
|
session.commit()
|
|
if old <= '20160107-508-a0c1970':
|
|
with db.session() as session:
|
|
for l in List.query.filter_by(name=' [2]'):
|
|
if not len(l.items):
|
|
l.remove()
|
|
if old <= '20160107-509-e0857fc':
|
|
add_useritem_index()
|
|
add_listitem_index()
|
|
if old <= '20160111-603-90648f9' and not new:
|
|
if not db.table_exists('user_metadata'):
|
|
run_sql('''CREATE TABLE user_metadata (
|
|
created DATETIME,
|
|
modified DATETIME,
|
|
id INTEGER NOT NULL,
|
|
item_id VARCHAR(32),
|
|
user_id VARCHAR(43),
|
|
data_hash VARCHAR(40),
|
|
data BLOB,
|
|
PRIMARY KEY (id),
|
|
FOREIGN KEY(user_id) REFERENCES user (id)
|
|
)''')
|
|
run_sql('CREATE UNIQUE INDEX IF NOT EXISTS user_metadata_index ON user_metadata(id, user_id)')
|
|
run_sql('CREATE INDEX ix_user_metadata_data_hash ON user_metadata (data_hash)')
|
|
|
|
def create_default_lists(user_id=None):
|
|
with db.session():
|
|
user_id = user_id or settings.USER_ID
|
|
user = User.get_or_create(user_id)
|
|
user.update_name()
|
|
for list in settings.config['lists']:
|
|
l = List.get(user_id, list['title'])
|
|
if not l:
|
|
l = List.create(user_id, list['title'], list.get('query'))
|
|
|
|
def update_database():
|
|
import sqlalchemy as sa
|
|
from sqlalchemy.schema import CreateTable
|
|
|
|
import changelog
|
|
import item.models
|
|
import item.person
|
|
import user.models
|
|
tables = [
|
|
changelog.Changelog,
|
|
item.models.File,
|
|
item.models.Find,
|
|
item.models.Item,
|
|
item.models.Sort,
|
|
item.models.user_items,
|
|
item.person.Person,
|
|
user.models.List,
|
|
user.models.List,
|
|
user.models.list_items,
|
|
user.models.Metadata,
|
|
user.models.User,
|
|
]
|
|
indexes = [
|
|
'CREATE INDEX ix_find_findvalue ON find (findvalue)',
|
|
'CREATE INDEX ix_find_key ON find ("key")',
|
|
'CREATE INDEX ix_useritem_user ON useritem ("user_id")',
|
|
'CREATE INDEX ix_user_metadata_data_hash ON user_metadata (data_hash)',
|
|
'CREATE UNIQUE INDEX listitem_index on listitem(list_id, item_id)',
|
|
'CREATE UNIQUE INDEX useritem_index on useritem(user_id, item_id)',
|
|
'CREATE UNIQUE INDEX user_metadata_index ON user_metadata(item_id, user_id)',
|
|
'CREATE UNIQUE INDEX list_username_index on list(user_id, name)',
|
|
]
|
|
layout = db.get_layout()
|
|
sql = []
|
|
for t in tables:
|
|
if isinstance(t, sa.Table):
|
|
table = t
|
|
else:
|
|
table = t.__table__
|
|
name = table.name
|
|
if name not in layout['tables']:
|
|
create_table = str(CreateTable(table).compile(db.engine))
|
|
sql.append(create_table)
|
|
index_names = set()
|
|
for index in indexes:
|
|
name = index.split('INDEX ')[1].split()[0]
|
|
if name not in layout['indexes']:
|
|
if name == 'useritem_index':
|
|
add_useritem_index()
|
|
elif name == 'listitem_index':
|
|
add_listitem_index()
|
|
elif name == 'list_username_index':
|
|
add_list_username_index()
|
|
else:
|
|
sql.append(index)
|
|
index_names.add(name)
|
|
for index in set(name for name in layout['indexes'] if not name.startswith('ix_sort_')) - index_names:
|
|
sql.append('DROP INDEX ' + index)
|
|
if sql:
|
|
with db.session() as s:
|
|
for q in sql:
|
|
s.connection().execute(q)
|
|
s.commit()
|
|
|
|
import item.models
|
|
item.models.update_sort_table()
|
|
|
|
def add_listitem_index():
|
|
if db.table_exists('listitem'):
|
|
with db.session() as session:
|
|
sql = "SELECT COUNT(*) AS c,list_id,item_id FROM listitem GROUP BY list_id, item_id HAVING c>1"
|
|
doubles = [r for r in session.execute(sql)]
|
|
for r in doubles:
|
|
params = {'list_id': r[1], 'item_id': r[2]}
|
|
sql = "DELETE FROM listitem WHERE list_id = :list_id AND item_id = :item_id"
|
|
session.execute(sql, params)
|
|
sql = "INSERT INTO listitem (list_id, item_id) VALUES (:list_id, :item_id)"
|
|
session.execute(sql, params)
|
|
session.commit()
|
|
sql = 'CREATE UNIQUE INDEX IF NOT EXISTS listitem_index on listitem(list_id,item_id)'
|
|
session.execute(sql)
|
|
session.commit()
|
|
|
|
def add_useritem_index():
|
|
if db.table_exists('useritem'):
|
|
with db.session() as session:
|
|
sql = "SELECT COUNT(*) AS c,user_id,item_id FROM useritem GROUP BY user_id, item_id HAVING c>1"
|
|
doubles = [r for r in session.execute(sql)]
|
|
for r in doubles:
|
|
params = {'user_id': r[1], 'item_id': r[2]}
|
|
sql = "DELETE FROM useritem WHERE user_id = :user_id AND item_id = :item_id"
|
|
session.execute(sql, params)
|
|
sql = "INSERT INTO useritem (user_id, item_id) VALUES (:user_id, :item_id)"
|
|
session.execute(sql, params)
|
|
session.commit()
|
|
sql = 'CREATE UNIQUE INDEX IF NOT EXISTS useritem_index on useritem(user_id,item_id)'
|
|
session.execute(sql)
|
|
session.commit()
|
|
|
|
def add_list_username_index():
|
|
from sqlalchemy.orm import load_only
|
|
import user.models
|
|
with db.session() as session:
|
|
lists = {}
|
|
for l in user.models.List.query.order_by('id'):
|
|
if l.public_id in lists:
|
|
ids = [i.id for i in l.get_items().options(load_only('id'))]
|
|
lists[l.public_id].add_items(ids)
|
|
session.delete(l)
|
|
else:
|
|
lists[l.public_id] = l
|
|
session.commit()
|
|
sql = 'CREATE UNIQUE INDEX IF NOT EXISTS list_username_index on list(user_id, name)'
|
|
session.connection().execute(sql)
|