2014-05-04 17:26:43 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
# vi:si:et:sw=4:sts=4:ts=4
|
|
|
|
|
2014-08-09 15:03:16 +00:00
|
|
|
import os
|
|
|
|
|
2014-05-04 17:26:43 +00:00
|
|
|
import settings
|
2014-08-09 18:32:22 +00:00
|
|
|
import db
|
2016-01-16 15:57:15 +00:00
|
|
|
from db import run_sql
|
2014-08-09 15:03:16 +00:00
|
|
|
|
2014-05-04 17:26:43 +00:00
|
|
|
from user.models import List, User
|
|
|
|
|
2014-08-09 15:03:16 +00:00
|
|
|
def create_db():
|
|
|
|
if not os.path.exists(settings.db_path):
|
2014-08-09 18:32:22 +00:00
|
|
|
sql = '''
|
|
|
|
CREATE TABLE item (
|
|
|
|
created DATETIME,
|
|
|
|
modified DATETIME,
|
|
|
|
id VARCHAR(32) NOT NULL,
|
|
|
|
info BLOB,
|
|
|
|
meta BLOB,
|
|
|
|
added DATETIME,
|
|
|
|
accessed DATETIME,
|
|
|
|
timesaccessed INTEGER,
|
|
|
|
PRIMARY KEY (id)
|
|
|
|
);
|
|
|
|
CREATE TABLE changelog (
|
|
|
|
id INTEGER NOT NULL,
|
|
|
|
created DATETIME,
|
|
|
|
timestamp BIGINT,
|
|
|
|
user_id VARCHAR(43),
|
|
|
|
revision BIGINT,
|
|
|
|
data TEXT,
|
|
|
|
sig VARCHAR(96),
|
|
|
|
PRIMARY KEY (id)
|
|
|
|
);
|
|
|
|
CREATE TABLE user (
|
|
|
|
created DATETIME,
|
|
|
|
modified DATETIME,
|
|
|
|
id VARCHAR(43) NOT NULL,
|
|
|
|
info BLOB,
|
|
|
|
nickname VARCHAR(256),
|
|
|
|
pending VARCHAR(64),
|
|
|
|
queued BOOLEAN,
|
|
|
|
peered BOOLEAN,
|
|
|
|
online BOOLEAN,
|
|
|
|
PRIMARY KEY (id),
|
|
|
|
UNIQUE (nickname),
|
|
|
|
CHECK (queued IN (0, 1)),
|
|
|
|
CHECK (peered IN (0, 1)),
|
|
|
|
CHECK (online IN (0, 1))
|
|
|
|
);
|
|
|
|
CREATE TABLE metadata (
|
|
|
|
created DATETIME,
|
|
|
|
modified DATETIME,
|
|
|
|
id INTEGER NOT NULL,
|
|
|
|
"key" VARCHAR(256),
|
|
|
|
value VARCHAR(256),
|
|
|
|
data BLOB,
|
|
|
|
PRIMARY KEY (id)
|
|
|
|
);
|
|
|
|
CREATE TABLE person (
|
|
|
|
name VARCHAR(1024) NOT NULL,
|
|
|
|
sortname VARCHAR,
|
|
|
|
numberofnames INTEGER,
|
|
|
|
PRIMARY KEY (name)
|
|
|
|
);
|
|
|
|
CREATE TABLE find (
|
|
|
|
id INTEGER NOT NULL,
|
|
|
|
item_id VARCHAR(32),
|
|
|
|
"key" VARCHAR(200),
|
|
|
|
value TEXT,
|
|
|
|
findvalue TEXT,
|
2016-01-25 17:32:04 +00:00
|
|
|
sortvalue TEXT,
|
2014-08-09 18:32:22 +00:00
|
|
|
PRIMARY KEY (id),
|
|
|
|
FOREIGN KEY(item_id) REFERENCES item (id)
|
|
|
|
);
|
|
|
|
CREATE INDEX ix_find_key ON find ("key");
|
|
|
|
CREATE TABLE list (
|
|
|
|
id INTEGER NOT NULL,
|
|
|
|
name VARCHAR,
|
|
|
|
index_ INTEGER,
|
|
|
|
type VARCHAR(64),
|
|
|
|
"query" BLOB,
|
|
|
|
user_id VARCHAR(43),
|
|
|
|
PRIMARY KEY (id),
|
|
|
|
FOREIGN KEY(user_id) REFERENCES user (id)
|
|
|
|
);
|
|
|
|
CREATE TABLE useritem (
|
|
|
|
user_id VARCHAR(43),
|
|
|
|
item_id VARCHAR(32),
|
|
|
|
FOREIGN KEY(item_id) REFERENCES item (id),
|
|
|
|
FOREIGN KEY(user_id) REFERENCES user (id)
|
|
|
|
);
|
|
|
|
CREATE TABLE sort (
|
|
|
|
item_id VARCHAR(32) NOT NULL,
|
|
|
|
title VARCHAR(1000),
|
|
|
|
author VARCHAR(1000),
|
|
|
|
publisher VARCHAR(1000),
|
|
|
|
place VARCHAR(1000),
|
|
|
|
date VARCHAR(1000),
|
|
|
|
language VARCHAR(1000),
|
|
|
|
pages BIGINT,
|
|
|
|
extension VARCHAR(1000),
|
|
|
|
size BIGINT,
|
|
|
|
created DATETIME,
|
|
|
|
added DATETIME,
|
|
|
|
modified DATETIME,
|
|
|
|
accessed DATETIME,
|
|
|
|
timesaccessed BIGINT,
|
|
|
|
mediastate VARCHAR(1000),
|
|
|
|
transferadded DATETIME,
|
|
|
|
transferprogress FLOAT,
|
|
|
|
id VARCHAR(1000),
|
|
|
|
isbn VARCHAR(1000),
|
|
|
|
random BIGINT,
|
|
|
|
PRIMARY KEY (item_id),
|
|
|
|
FOREIGN KEY(item_id) REFERENCES item (id)
|
|
|
|
);
|
|
|
|
CREATE INDEX ix_sort_accessed ON sort (accessed);
|
|
|
|
CREATE INDEX ix_sort_added ON sort (added);
|
|
|
|
CREATE INDEX ix_sort_author ON sort (author);
|
|
|
|
CREATE INDEX ix_sort_created ON sort (created);
|
|
|
|
CREATE INDEX ix_sort_date ON sort (date);
|
|
|
|
CREATE INDEX ix_sort_extension ON sort (extension);
|
|
|
|
CREATE INDEX ix_sort_id ON sort (id);
|
|
|
|
CREATE INDEX ix_sort_isbn ON sort (isbn);
|
|
|
|
CREATE INDEX ix_sort_language ON sort (language);
|
|
|
|
CREATE INDEX ix_sort_mediastate ON sort (mediastate);
|
|
|
|
CREATE INDEX ix_sort_modified ON sort (modified);
|
|
|
|
CREATE INDEX ix_sort_pages ON sort (pages);
|
|
|
|
CREATE INDEX ix_sort_place ON sort (place);
|
|
|
|
CREATE INDEX ix_sort_publisher ON sort (publisher);
|
|
|
|
CREATE INDEX ix_sort_random ON sort (random);
|
|
|
|
CREATE INDEX ix_sort_size ON sort (size);
|
|
|
|
CREATE INDEX ix_sort_timesaccessed ON sort (timesaccessed);
|
|
|
|
CREATE INDEX ix_sort_title ON sort (title);
|
|
|
|
CREATE INDEX ix_sort_transferadded ON sort (transferadded);
|
|
|
|
CREATE INDEX ix_sort_transferprogress ON sort (transferprogress);
|
|
|
|
CREATE TABLE file (
|
|
|
|
created DATETIME,
|
|
|
|
modified DATETIME,
|
|
|
|
sha1 VARCHAR(32) NOT NULL,
|
|
|
|
path VARCHAR(2048),
|
|
|
|
info BLOB,
|
|
|
|
item_id VARCHAR(32),
|
|
|
|
PRIMARY KEY (sha1),
|
|
|
|
FOREIGN KEY(item_id) REFERENCES item (id)
|
|
|
|
);
|
|
|
|
CREATE TABLE listitem (
|
|
|
|
list_id INTEGER,
|
|
|
|
item_id VARCHAR(32),
|
|
|
|
FOREIGN KEY(item_id) REFERENCES item (id),
|
|
|
|
FOREIGN KEY(list_id) REFERENCES list (id)
|
|
|
|
);
|
|
|
|
PRAGMA journal_mode=WAL
|
|
|
|
'''
|
|
|
|
for statement in sql.split(';'):
|
|
|
|
run_sql(statement)
|
2014-08-11 17:32:49 +00:00
|
|
|
create_default_lists()
|
2016-01-17 06:29:06 +00:00
|
|
|
settings.server['db_version'] = settings.DB_VERSION
|
|
|
|
update_database()
|
|
|
|
|
2014-08-09 15:03:16 +00:00
|
|
|
|
2014-08-11 13:02:24 +00:00
|
|
|
def upgrade_db(old, new=None):
|
2016-01-12 09:29:09 +00:00
|
|
|
|
|
|
|
def run_after(v):
|
|
|
|
return old <= v and new > v
|
|
|
|
|
2014-08-11 13:02:24 +00:00
|
|
|
if new:
|
|
|
|
if old <= '20140525-92-eac91e7' and new > '20140525-92-eac91e7':
|
2015-12-01 10:55:56 +00:00
|
|
|
with db.session():
|
|
|
|
import user.models
|
|
|
|
for u in user.models.User.query:
|
|
|
|
u.update_name()
|
|
|
|
u.save()
|
|
|
|
import item.models
|
|
|
|
for f in item.models.File.query:
|
|
|
|
changed = False
|
|
|
|
for key in ('mediastate', 'coverRatio', 'previewRatio'):
|
|
|
|
if key in f.info:
|
|
|
|
del f.info[key]
|
|
|
|
changed = True
|
|
|
|
if changed:
|
|
|
|
f.save()
|
2014-08-11 13:02:24 +00:00
|
|
|
if old <= '20140526-118-d451eb3' and new > '20140526-118-d451eb3':
|
2015-12-01 10:55:56 +00:00
|
|
|
with db.session():
|
|
|
|
import item.models
|
|
|
|
item.models.Find.query.filter_by(key='list').delete()
|
2016-01-09 10:55:49 +00:00
|
|
|
if old <= '20160109-573-094097b' and new > '20160109-573-094097b':
|
|
|
|
import sys
|
|
|
|
if sys.platform.startswith('linux'):
|
|
|
|
import integration
|
|
|
|
integration.install_xdg()
|
2016-01-10 07:26:46 +00:00
|
|
|
if old <= '20160110-581-e08780a' and new > '20160110-581-e08780a':
|
|
|
|
with db.session():
|
|
|
|
import item.models
|
|
|
|
from item.icons import icons
|
|
|
|
for f in item.models.File.query:
|
|
|
|
if f.info['extension'] == 'epub':
|
|
|
|
i = f.item
|
|
|
|
key = 'cover:%s'%i.id
|
|
|
|
cover = icons[key]
|
|
|
|
key = 'preview:%s'%i.id
|
|
|
|
preview = i.extract_preview()
|
|
|
|
update_item = False
|
|
|
|
if not preview:
|
|
|
|
del icons[key]
|
|
|
|
for resolution in (128, 256, 512):
|
|
|
|
del icons['%s:%s' % (key, resolution)]
|
|
|
|
if 'previewRatio' in i.info:
|
|
|
|
del i.info['previewRatio']
|
|
|
|
update_item = True
|
|
|
|
if not cover and 'coverRatio' in i.info:
|
|
|
|
del i.info['coverRatio']
|
|
|
|
update_item = True
|
|
|
|
if update_item:
|
|
|
|
i.save()
|
2016-01-10 08:06:58 +00:00
|
|
|
if old <= '20160110-583-59a3709' and new > '20160110-583-59a3709':
|
|
|
|
with db.session() as session:
|
|
|
|
import ox
|
2016-01-21 06:28:07 +00:00
|
|
|
from item.models import Item, Find
|
2016-01-10 08:06:58 +00:00
|
|
|
from meta.utils import decode_html_data
|
|
|
|
def cleanup_description(data):
|
|
|
|
if 'description' in data:
|
|
|
|
description = data['description']
|
|
|
|
description = description.replace('<br>', '\n').replace('</p><p>', '\n\n')
|
|
|
|
data['description'] = ox.strip_tags(description).strip()
|
|
|
|
for i in Item.query:
|
|
|
|
changed = False
|
|
|
|
meta = decode_html_data(i.meta.copy())
|
|
|
|
cleanup_description(meta)
|
|
|
|
if meta != i.meta:
|
|
|
|
#print(i, i.meta, '\n', meta, '\n\n')
|
|
|
|
i.meta = meta
|
|
|
|
changed = True
|
|
|
|
info = decode_html_data(i.info.copy())
|
|
|
|
cleanup_description(info)
|
|
|
|
if info != i.info:
|
|
|
|
#print(i, i.info, '\n', info, '\n\n')
|
|
|
|
i.info = info
|
|
|
|
changed = True
|
|
|
|
if changed:
|
|
|
|
i.update_sort()
|
|
|
|
i.update_find()
|
|
|
|
i.save()
|
|
|
|
for tag in (''', '"', '&#'):
|
|
|
|
items = set([f.item_id for f in Find.query.filter(Find.value.ilike('%'+tag+'%'))])
|
|
|
|
if items:
|
|
|
|
for i in Item.query.filter(Item.id.in_(items)):
|
|
|
|
#print(tag, i)
|
|
|
|
i.update_sort()
|
|
|
|
i.update_find()
|
|
|
|
session.commit()
|
2016-01-11 13:43:54 +00:00
|
|
|
if old <= '20160111-603-90648f9' and new > '20160111-603-90648f9':
|
|
|
|
for f in settings.ui['filters']:
|
|
|
|
if f['id'] == 'classification':
|
|
|
|
f['id'] = 'categories'
|
|
|
|
settings.ui._save()
|
2016-01-16 15:57:15 +00:00
|
|
|
if not db.table_exists('user_metadata'):
|
|
|
|
run_sql('''CREATE TABLE user_metadata (
|
|
|
|
created DATETIME,
|
|
|
|
modified DATETIME,
|
|
|
|
id INTEGER NOT NULL,
|
|
|
|
item_id VARCHAR(32),
|
|
|
|
user_id VARCHAR(43),
|
|
|
|
data_hash VARCHAR(40),
|
|
|
|
data BLOB,
|
|
|
|
PRIMARY KEY (id),
|
|
|
|
FOREIGN KEY(user_id) REFERENCES user (id)
|
|
|
|
)''')
|
|
|
|
run_sql('CREATE UNIQUE INDEX IF NOT EXISTS user_metadata_index ON user_metadata(id, user_id)')
|
|
|
|
run_sql('CREATE INDEX ix_user_metadata_data_hash ON user_metadata (data_hash)')
|
2016-01-11 13:43:54 +00:00
|
|
|
from meta.utils import to_isbn13
|
|
|
|
from item.models import Item
|
|
|
|
from user.models import Metadata
|
|
|
|
with db.session() as session:
|
|
|
|
for i in Item.query:
|
|
|
|
update = False
|
|
|
|
if 'primaryid' in i.meta:
|
|
|
|
del i.meta['primaryid']
|
|
|
|
update = True
|
|
|
|
if 'primaryid' in i.info:
|
|
|
|
del i.info['primaryid']
|
|
|
|
update = True
|
|
|
|
for key in i.meta_keys:
|
|
|
|
if key not in i.meta and key in i.info:
|
|
|
|
i.meta[key] = i.info[key]
|
|
|
|
update = True
|
|
|
|
if 'isbn' in i.meta and isinstance(i.meta['isbn'], list):
|
|
|
|
isbns = [to_isbn13(isbn) for isbn in i.meta['isbn']]
|
|
|
|
isbns = [isbn for isbn in isbns if isbn]
|
|
|
|
if isbns:
|
|
|
|
i.meta['isbn'] = isbns[0]
|
|
|
|
if 'isbn' in i.info:
|
|
|
|
i.info['isbn'] = i.meta['isbn']
|
|
|
|
else:
|
|
|
|
del i.meta['isbn']
|
|
|
|
if 'isbn' in i.info:
|
|
|
|
del i.info['isbn']
|
|
|
|
update = True
|
|
|
|
if 'isbn' in i.meta and not i.meta['isbn']:
|
|
|
|
del i.meta['isbn']
|
|
|
|
update = True
|
|
|
|
if update:
|
|
|
|
session.add(i)
|
|
|
|
for u in i.users:
|
|
|
|
if u.id != settings.USER_ID:
|
|
|
|
Metadata.get_or_create(u.id, i.id, i.meta, False)
|
|
|
|
session.commit()
|
2016-01-12 09:29:09 +00:00
|
|
|
if run_after('20160111-617-206e39c'):
|
2016-01-11 18:53:11 +00:00
|
|
|
from item.models import File
|
|
|
|
import media
|
|
|
|
with db.session() as session:
|
|
|
|
for f in File.query:
|
|
|
|
if f.info.get('extension') == 'epub':
|
|
|
|
if not 'tableofcontents' in f.item.meta:
|
|
|
|
f.info = media.metadata(f.fullpath())
|
|
|
|
if 'tableofcontents' in f.info:
|
|
|
|
f.item.meta['tableofcontents'] = f.info['tableofcontents']
|
|
|
|
f.item.update()
|
|
|
|
session.add(f.item)
|
|
|
|
session.add(f)
|
|
|
|
session.commit()
|
2016-01-12 09:29:09 +00:00
|
|
|
if run_after('20160112-651-de984a3'):
|
|
|
|
from item.models import File
|
|
|
|
import media
|
|
|
|
with db.session() as session:
|
|
|
|
for f in File.query:
|
|
|
|
if f.info.get('extension') == 'pdf':
|
|
|
|
if not 'tableofcontents' in f.item.meta:
|
|
|
|
f.info = media.metadata(f.fullpath())
|
|
|
|
if 'tableofcontents' in f.info:
|
|
|
|
f.item.meta['tableofcontents'] = f.info['tableofcontents']
|
|
|
|
f.item.update()
|
|
|
|
session.add(f.item)
|
|
|
|
session.add(f)
|
|
|
|
session.commit()
|
2015-11-26 00:26:10 +00:00
|
|
|
|
|
|
|
if old <= '20151118-346-7e86e68':
|
2016-01-18 06:34:20 +00:00
|
|
|
old_key = os.path.join(settings.data_path, 'node.ssl.key')
|
2015-11-26 00:26:10 +00:00
|
|
|
if os.path.exists(old_key):
|
|
|
|
os.unlink(old_key)
|
2016-02-19 09:13:26 +00:00
|
|
|
key_path = os.path.join(settings.data_path, 'node.key')
|
|
|
|
if os.path.exists(key_path):
|
|
|
|
import ed25519
|
|
|
|
with open(key_path, 'rb') as fd:
|
|
|
|
sk = ed25519.SigningKey(fd.read())
|
|
|
|
vk = sk.get_verifying_key()
|
|
|
|
OLD_USER_ID = vk.to_ascii(encoding='base64').decode()
|
2016-01-16 12:31:04 +00:00
|
|
|
statements = [
|
|
|
|
"UPDATE user SET id = '{nid}' WHERE id = '{oid}'",
|
|
|
|
"UPDATE list SET user_id = '{nid}' WHERE user_id = '{oid}'",
|
|
|
|
"UPDATE useritem SET user_id = '{nid}' WHERE user_id = '{oid}'",
|
|
|
|
"UPDATE changelog SET user_id = '{nid}' WHERE user_id = '{oid}'",
|
|
|
|
]
|
|
|
|
for sql in statements:
|
2016-02-19 09:13:26 +00:00
|
|
|
run_sql(sql.format(oid=OLD_USER_ID, nid=settings.USER_ID))
|
2015-12-01 10:55:56 +00:00
|
|
|
if old <= '20151201-384-03c2439':
|
|
|
|
with db.session():
|
|
|
|
import item.models
|
|
|
|
for i in item.models.Item.query:
|
|
|
|
for f in i.files.all():
|
|
|
|
f.move()
|
2016-01-03 17:36:20 +00:00
|
|
|
if old <= '20160103-423-05ca6c9':
|
|
|
|
with db.session():
|
|
|
|
import item.models
|
|
|
|
for i in item.models.Item.query:
|
|
|
|
if 'id' in i.meta:
|
|
|
|
del i.meta['id']
|
|
|
|
i.save()
|
2016-01-06 18:06:48 +00:00
|
|
|
if old <= '20160106-497-c86ba8a':
|
|
|
|
with db.session() as session:
|
|
|
|
u = User.get(settings.USER_ID)
|
2016-01-07 10:23:41 +00:00
|
|
|
if u:
|
2016-01-06 18:06:48 +00:00
|
|
|
l = u.library
|
|
|
|
for i in u.items.all():
|
|
|
|
if not i in l.items:
|
|
|
|
l.items.append(i)
|
|
|
|
session.add(l)
|
2016-01-07 10:23:41 +00:00
|
|
|
for u in User.query.filter_by(peered=True):
|
|
|
|
l = u.library
|
|
|
|
for i in u.items.all():
|
|
|
|
if not i in l.items:
|
|
|
|
l.items.append(i)
|
|
|
|
session.add(l)
|
|
|
|
l.items_count()
|
|
|
|
session.commit()
|
2016-01-07 04:39:27 +00:00
|
|
|
if old <= '20160107-508-a0c1970':
|
2016-01-07 04:30:15 +00:00
|
|
|
with db.session() as session:
|
2016-01-07 04:39:27 +00:00
|
|
|
for l in List.query.filter_by(name=' [2]'):
|
2016-01-07 04:30:15 +00:00
|
|
|
if not len(l.items):
|
2016-01-07 04:37:44 +00:00
|
|
|
l.remove()
|
2016-01-07 06:05:19 +00:00
|
|
|
if old <= '20160107-509-e0857fc':
|
2016-01-17 09:14:30 +00:00
|
|
|
add_useritem_index()
|
|
|
|
add_listitem_index()
|
2016-01-13 06:04:30 +00:00
|
|
|
if old <= '20160111-603-90648f9' and not new:
|
2016-01-16 15:57:15 +00:00
|
|
|
if not db.table_exists('user_metadata'):
|
|
|
|
run_sql('''CREATE TABLE user_metadata (
|
|
|
|
created DATETIME,
|
|
|
|
modified DATETIME,
|
|
|
|
id INTEGER NOT NULL,
|
|
|
|
item_id VARCHAR(32),
|
|
|
|
user_id VARCHAR(43),
|
|
|
|
data_hash VARCHAR(40),
|
|
|
|
data BLOB,
|
|
|
|
PRIMARY KEY (id),
|
|
|
|
FOREIGN KEY(user_id) REFERENCES user (id)
|
|
|
|
)''')
|
|
|
|
run_sql('CREATE UNIQUE INDEX IF NOT EXISTS user_metadata_index ON user_metadata(id, user_id)')
|
|
|
|
run_sql('CREATE INDEX ix_user_metadata_data_hash ON user_metadata (data_hash)')
|
2015-03-07 16:24:07 +00:00
|
|
|
|
2014-05-04 17:26:43 +00:00
|
|
|
def create_default_lists(user_id=None):
|
2014-08-09 18:32:22 +00:00
|
|
|
with db.session():
|
|
|
|
user_id = user_id or settings.USER_ID
|
|
|
|
user = User.get_or_create(user_id)
|
|
|
|
user.update_name()
|
|
|
|
for list in settings.config['lists']:
|
|
|
|
l = List.get(user_id, list['title'])
|
|
|
|
if not l:
|
|
|
|
l = List.create(user_id, list['title'], list.get('query'))
|
2014-05-04 17:26:43 +00:00
|
|
|
|
2016-01-17 06:29:06 +00:00
|
|
|
def update_database():
|
|
|
|
import sqlalchemy as sa
|
|
|
|
from sqlalchemy.schema import CreateTable
|
|
|
|
|
|
|
|
import changelog
|
|
|
|
import item.models
|
|
|
|
import item.person
|
|
|
|
import user.models
|
|
|
|
tables = [
|
|
|
|
changelog.Changelog,
|
|
|
|
item.models.File,
|
|
|
|
item.models.Find,
|
|
|
|
item.models.Item,
|
|
|
|
item.models.Sort,
|
|
|
|
item.models.user_items,
|
|
|
|
item.person.Person,
|
|
|
|
user.models.List,
|
|
|
|
user.models.List,
|
|
|
|
user.models.list_items,
|
|
|
|
user.models.Metadata,
|
|
|
|
user.models.User,
|
|
|
|
]
|
|
|
|
indexes = [
|
|
|
|
'CREATE INDEX ix_find_findvalue ON find (findvalue)',
|
|
|
|
'CREATE INDEX ix_find_key ON find ("key")',
|
|
|
|
'CREATE INDEX ix_useritem_user ON useritem ("user_id")',
|
|
|
|
'CREATE INDEX ix_user_metadata_data_hash ON user_metadata (data_hash)',
|
2016-01-17 09:14:30 +00:00
|
|
|
'CREATE UNIQUE INDEX listitem_index on listitem(list_id, item_id)',
|
|
|
|
'CREATE UNIQUE INDEX useritem_index on useritem(user_id, item_id)',
|
2016-01-19 10:05:16 +00:00
|
|
|
'CREATE UNIQUE INDEX user_metadata_index ON user_metadata(item_id, user_id)',
|
2016-01-17 06:29:06 +00:00
|
|
|
]
|
|
|
|
layout = db.get_layout()
|
|
|
|
sql = []
|
|
|
|
for t in tables:
|
|
|
|
if isinstance(t, sa.Table):
|
|
|
|
table = t
|
|
|
|
else:
|
|
|
|
table = t.__table__
|
|
|
|
name = table.name
|
|
|
|
if name not in layout['tables']:
|
|
|
|
create_table = str(CreateTable(table).compile(db.engine))
|
|
|
|
sql.append(create_table)
|
|
|
|
index_names = set()
|
|
|
|
for index in indexes:
|
|
|
|
name = index.split('INDEX ')[1].split()[0]
|
|
|
|
if name not in layout['indexes']:
|
2016-01-17 09:14:30 +00:00
|
|
|
if name == 'useritem_index':
|
|
|
|
add_useritem_index()
|
|
|
|
elif name == 'listitem_index':
|
|
|
|
add_listitem_index()
|
|
|
|
else:
|
|
|
|
sql.append(index)
|
2016-01-17 06:29:06 +00:00
|
|
|
index_names.add(name)
|
|
|
|
for index in set(name for name in layout['indexes'] if not name.startswith('ix_sort_')) - index_names:
|
|
|
|
sql.append('DROP INDEX ' + index)
|
|
|
|
if sql:
|
|
|
|
with db.session() as s:
|
|
|
|
for q in sql:
|
|
|
|
s.connection().execute(q)
|
|
|
|
s.commit()
|
|
|
|
|
|
|
|
import item.models
|
|
|
|
item.models.update_sort_table()
|
|
|
|
|
2016-01-17 09:14:30 +00:00
|
|
|
def add_listitem_index():
|
|
|
|
with db.session() as session:
|
|
|
|
sql = "SELECT COUNT(*) AS c,list_id,item_id FROM listitem GROUP BY list_id, item_id HAVING c>1"
|
|
|
|
doubles = [r for r in session.execute(sql)]
|
|
|
|
for r in doubles:
|
|
|
|
params = {'list_id': r[1], 'item_id': r[2]}
|
|
|
|
sql = "DELETE FROM listitem WHERE list_id = :list_id AND item_id = :item_id"
|
|
|
|
session.execute(sql, params)
|
|
|
|
sql = "INSERT INTO listitem (list_id, item_id) VALUES (:list_id, :item_id)"
|
|
|
|
session.execute(sql, params)
|
|
|
|
session.commit()
|
|
|
|
sql = 'CREATE UNIQUE INDEX IF NOT EXISTS listitem_index on listitem(list_id,item_id)'
|
|
|
|
session.execute(sql)
|
|
|
|
session.commit()
|
|
|
|
|
|
|
|
def add_useritem_index():
|
|
|
|
with db.session() as session:
|
|
|
|
sql = "SELECT COUNT(*) AS c,user_id,item_id FROM useritem GROUP BY user_id, item_id HAVING c>1"
|
|
|
|
doubles = [r for r in session.execute(sql)]
|
|
|
|
for r in doubles:
|
|
|
|
params = {'user_id': r[1], 'item_id': r[2]}
|
|
|
|
sql = "DELETE FROM useritem WHERE user_id = :user_id AND item_id = :item_id"
|
|
|
|
session.execute(sql, params)
|
|
|
|
sql = "INSERT INTO useritem (user_id, item_id) VALUES (:user_id, :item_id)"
|
|
|
|
session.execute(sql, params)
|
|
|
|
session.commit()
|
|
|
|
sql = 'CREATE UNIQUE INDEX IF NOT EXISTS useritem_index on useritem(user_id,item_id)'
|
|
|
|
session.execute(sql)
|
|
|
|
session.commit()
|