implement sharemetadata, fixes 165
This commit is contained in:
parent
8c5e39448b
commit
7a42473919
11 changed files with 162 additions and 43 deletions
|
@ -160,25 +160,17 @@ class Changelog(db.Model):
|
|||
i.modified = ts2datetime(timestamp)
|
||||
if user not in i.users:
|
||||
i.add_user(user)
|
||||
i.info['_from'] = user.id
|
||||
i.update()
|
||||
return True
|
||||
|
||||
def action_edititem(self, user, timestamp, itemid, meta):
|
||||
from user.models import Metadata
|
||||
m = Metadata.get_or_create(user.id, itemid)
|
||||
m.edit(meta)
|
||||
#FIXME: "sometimes" update item too...
|
||||
m.edit(meta, modified=ts2datetime(timestamp))
|
||||
from item.models import Item
|
||||
i = Item.get(itemid)
|
||||
if i:
|
||||
update = False
|
||||
if len(i.users) == 1 and user in i.users:
|
||||
update = True
|
||||
if i.info.get('_from') == user.id:
|
||||
update = True
|
||||
if update:
|
||||
i.edit(meta, ts2datetime(timestamp))
|
||||
i.sync_metadata()
|
||||
return True
|
||||
|
||||
def action_removeitem(self, user, timestamp, itemid):
|
||||
|
|
|
@ -140,7 +140,7 @@ def edit(data):
|
|||
for id in ids:
|
||||
item = models.Item.get(id)
|
||||
if item and item.json()['mediastate'] == 'available':
|
||||
item.edit(data, reset_from=True)
|
||||
item.edit(data)
|
||||
response = item.json()
|
||||
edited.append(id)
|
||||
else:
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
from datetime import datetime
|
||||
import base64
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
|
@ -10,6 +11,7 @@ import stat
|
|||
import unicodedata
|
||||
|
||||
import ox
|
||||
from sqlalchemy.orm import load_only
|
||||
from sqlalchemy.schema import CreateTable
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
@ -136,7 +138,7 @@ class Item(db.Model):
|
|||
for k in list(j):
|
||||
if k not in keys:
|
||||
del j[k]
|
||||
for key in [k['id'] for k in settings.config['itemKeys'] if isinstance(k['type'], list)]:
|
||||
for key in self.array_keys:
|
||||
if key in j and not isinstance(j[key], list):
|
||||
j[key] = [j[key]]
|
||||
return j
|
||||
|
@ -272,11 +274,12 @@ class Item(db.Model):
|
|||
'place',
|
||||
'publisher',
|
||||
'series',
|
||||
'sharemetadata',
|
||||
'tableofcontents',
|
||||
'title'
|
||||
)
|
||||
|
||||
def update_meta(self, data, modified=None, reset_from=False):
|
||||
def update_metadata(self, data, modified=None):
|
||||
update = False
|
||||
record = {}
|
||||
for key in self.meta_keys:
|
||||
|
@ -289,9 +292,6 @@ class Item(db.Model):
|
|||
if key not in self.meta_keys:
|
||||
del self.meta[key]
|
||||
update = True
|
||||
if reset_from and '_from' in self.info:
|
||||
del self.info['_from']
|
||||
update = True
|
||||
if update:
|
||||
self.update(modified)
|
||||
self.save()
|
||||
|
@ -300,13 +300,58 @@ class Item(db.Model):
|
|||
user = state.user()
|
||||
if record and user in self.users:
|
||||
Changelog.record_ts(user, modified, 'edititem', self.id, record)
|
||||
logger.debug('edititem foobar, got sharemetadata?', record)
|
||||
if 'sharemetadata' in record and not record['sharemetadata']:
|
||||
logger.debug('metadata no longer true, lets sync up')
|
||||
self.sync_metadata()
|
||||
|
||||
def edit(self, data, modified=None, reset_from=False):
|
||||
def edit(self, data, modified=None):
|
||||
Scrape.query.filter_by(item_id=self.id).delete()
|
||||
self.update_meta(data, modified, reset_from=reset_from)
|
||||
self.update_metadata(data, modified)
|
||||
for f in self.files.all():
|
||||
f.move()
|
||||
|
||||
def get_hash(self):
|
||||
return utils.get_meta_hash(self.meta.copy())
|
||||
|
||||
def sync_metadata(self):
|
||||
from user.models import Metadata
|
||||
if self.meta.get('sharemetadata'):
|
||||
return
|
||||
peers = [u for u in self.users if u.id != settings.USER_ID]
|
||||
peers.sort(key=lambda u: ox.sort_string(str(u.info.get('index', ''))
|
||||
+ 'Z' + (u.info.get('name') or '')))
|
||||
sync_from = None
|
||||
first_peer = None
|
||||
# get first peer with sharemetadata set
|
||||
for u in peers:
|
||||
m = Metadata.get(u.id, self.id)
|
||||
if m:
|
||||
if m.data.get('sharemetadata'):
|
||||
sync_from = m
|
||||
break
|
||||
if not first_peer:
|
||||
first_peer = m
|
||||
# of fall back to first peer that has this item
|
||||
# in case its not available locally
|
||||
if not sync_from and self.info['mediastate'] != 'available' and first_peer:
|
||||
logger.debug('syncing from first peer that has item %s', first_peer)
|
||||
sync_from = first_peer
|
||||
if sync_from:
|
||||
if self.get_hash() != sync_from.data_hash:
|
||||
logger.debug('update %s with metadata from %s', self, sync_from.user_id)
|
||||
record = {}
|
||||
for key in sync_from.data:
|
||||
if key != 'sharemetadata' and self.meta.get(key) != sync_from.data[key]:
|
||||
record[key] = self.meta[key] = sync_from.data[key]
|
||||
for key in set(self.meta)-set(sync_from.data):
|
||||
record[key] = self.meta[key] = [] if key in self.array_keys else ''
|
||||
self.update(sync_from.modified)
|
||||
self.save()
|
||||
user = state.user()
|
||||
if record and user in self.users:
|
||||
Changelog.record_ts(user, self.modified, 'edititem', self.id, record)
|
||||
|
||||
def extract_preview(self):
|
||||
path = self.get_path()
|
||||
if path:
|
||||
|
@ -493,6 +538,7 @@ for key in config['itemKeys']:
|
|||
Item.id_keys = ['isbn', 'lccn', 'olid', 'oclc', 'asin']
|
||||
Item.item_keys = config['itemKeys']
|
||||
Item.filter_keys = [k['id'] for k in config['itemKeys'] if k.get('filter')]
|
||||
Item.array_keys = [k['id'] for k in config['itemKeys'] if isinstance(k['type'], list)]
|
||||
|
||||
class Find(db.Model):
|
||||
__tablename__ = 'find'
|
||||
|
@ -814,3 +860,25 @@ def update_sort_table():
|
|||
for q in sql:
|
||||
s.connection().execute(q)
|
||||
s.commit()
|
||||
|
||||
|
||||
def sync_metadata(ids=None):
|
||||
#logger.debug('sync_metadata(%s)', len(ids) if ids else ids)
|
||||
step = 1000
|
||||
delay = 10
|
||||
with db.session():
|
||||
if not ids:
|
||||
ids = [i.id for i in Item.query.options(load_only('id'))]
|
||||
if len(ids) > step:
|
||||
later = ids[step:]
|
||||
ids = ids[:step]
|
||||
else:
|
||||
later = []
|
||||
if ids:
|
||||
for i in Item.query.filter(Item.id.in_(ids)):
|
||||
i.sync_metadata()
|
||||
if later:
|
||||
if state.main and state.tasks:
|
||||
state.main.call_later(delay, lambda: state.tasks.queue('syncmetadata', [later]))
|
||||
#else:
|
||||
# logger.debug('sync_metadata done')
|
||||
|
|
|
@ -101,4 +101,4 @@ USER_AGENT = 'OpenMediaLibrary/%s' % VERSION
|
|||
|
||||
DEBUG_HTTP = server.get('debug_http', False)
|
||||
|
||||
DB_VERSION = 4
|
||||
DB_VERSION = 5
|
||||
|
|
|
@ -477,7 +477,7 @@ def update_database():
|
|||
'CREATE INDEX ix_user_metadata_data_hash ON user_metadata (data_hash)',
|
||||
'CREATE UNIQUE INDEX listitem_index on listitem(list_id, item_id)',
|
||||
'CREATE UNIQUE INDEX useritem_index on useritem(user_id, item_id)',
|
||||
'CREATE UNIQUE INDEX user_metadata_index ON user_metadata(id, user_id)',
|
||||
'CREATE UNIQUE INDEX user_metadata_index ON user_metadata(item_id, user_id)',
|
||||
]
|
||||
layout = db.get_layout()
|
||||
sql = []
|
||||
|
|
|
@ -22,6 +22,7 @@ class Tasks(Thread):
|
|||
|
||||
def run(self):
|
||||
import item.scan
|
||||
from item.models import sync_metadata
|
||||
from user.models import export_list, update_user_peering
|
||||
while self.connected:
|
||||
m = self.q.get()
|
||||
|
@ -40,6 +41,10 @@ class Tasks(Thread):
|
|||
item.scan.import_folder()
|
||||
elif action == 'peering':
|
||||
update_user_peering(*data)
|
||||
elif action == 'syncmetadata':
|
||||
if not data:
|
||||
data = []
|
||||
sync_metadata(*data)
|
||||
else:
|
||||
trigger_event('error', {'error': 'unknown action'})
|
||||
except:
|
||||
|
|
|
@ -303,6 +303,8 @@ class Update(Thread):
|
|||
db_version = migrate_3()
|
||||
if db_version < 4:
|
||||
db_version = migrate_4()
|
||||
if db_version < 5:
|
||||
db_version = migrate_5()
|
||||
settings.server['db_version'] = settings.DB_VERSION
|
||||
|
||||
def run(self):
|
||||
|
@ -383,3 +385,29 @@ def migrate_4():
|
|||
session.add(i)
|
||||
session.commit()
|
||||
return 4
|
||||
|
||||
def migrate_5():
|
||||
db.run_sql([
|
||||
'DROP INDEX IF EXISTS user_metadata_index',
|
||||
'CREATE UNIQUE INDEX user_metadata_index ON user_metadata(item_id, user_id)'
|
||||
]),
|
||||
with db.session() as session:
|
||||
import user.models
|
||||
for m in user.models.Metadata.query:
|
||||
data_hash = m.get_hash()
|
||||
if m.data_hash != data_hash:
|
||||
m.data_hash = data_hash
|
||||
session.add(m)
|
||||
session.commit()
|
||||
import item.models
|
||||
for i in item.models.Item.query:
|
||||
update = False
|
||||
if '_from' in i.info:
|
||||
del i.info['_from']
|
||||
update = True
|
||||
if update:
|
||||
session.add(i)
|
||||
session.commit()
|
||||
for i in item.models.Item.query:
|
||||
i.sync_metadata()
|
||||
return 5
|
||||
|
|
|
@ -314,6 +314,8 @@ def sortUsers(data):
|
|||
n += 1
|
||||
state.db.session.add(u)
|
||||
state.db.session.commit()
|
||||
if state.tasks:
|
||||
state.tasks.queue('syncmetadata')
|
||||
return {}
|
||||
actions.register(sortUsers, cache=False)
|
||||
|
||||
|
|
|
@ -380,7 +380,7 @@ class Metadata(db.Model):
|
|||
|
||||
@classmethod
|
||||
def get(cls, user_id, item_id):
|
||||
return cls.query.filter_by(user_id=user_id,item_id=item_id).first()
|
||||
return cls.query.filter_by(item_id=item_id, user_id=user_id).first()
|
||||
|
||||
@classmethod
|
||||
def get_or_create(cls, user_id, item_id, data=None, commit=True):
|
||||
|
@ -392,19 +392,24 @@ class Metadata(db.Model):
|
|||
m.data = data
|
||||
else:
|
||||
m.data = {}
|
||||
m.save(commit)
|
||||
m.save(commit=commit)
|
||||
elif data:
|
||||
m.edit(data, commit)
|
||||
m.edit(data, commit=commit)
|
||||
return m
|
||||
|
||||
def save(self, commit=True):
|
||||
self.modified = datetime.utcnow()
|
||||
self.data_hash = hashlib.sha1(json.dumps(self.data, ensure_ascii=False, sort_keys=True).encode()).hexdigest()
|
||||
def get_hash(self):
|
||||
return utils.get_meta_hash(self.data.copy())
|
||||
|
||||
def save(self, commit=True, modified=None):
|
||||
if modified is None:
|
||||
self.modified = datetime.utcnow()
|
||||
else:
|
||||
self.modified = modified
|
||||
state.db.session.add(self)
|
||||
if commit:
|
||||
state.db.session.commit()
|
||||
|
||||
def edit(self, data, commit=True):
|
||||
def edit(self, data, commit=True, modified=True):
|
||||
changes = {}
|
||||
if 'isbn' in data and isinstance(data['isbn'], list):
|
||||
isbns = [utils.to_isbn13(isbn) for isbn in data['isbn']]
|
||||
|
@ -420,7 +425,8 @@ class Metadata(db.Model):
|
|||
self.data[key] = data[key]
|
||||
changes[key] = data[key]
|
||||
if changes:
|
||||
self.save(commit)
|
||||
self.data_hash = self.get_hash()
|
||||
self.save(commit=commit, modified=modified)
|
||||
return changes
|
||||
|
||||
def delete(self):
|
||||
|
|
31
oml/utils.py
31
oml/utils.py
|
@ -2,18 +2,19 @@
|
|||
# vi:si:et:sw=4:sts=4:ts=4
|
||||
|
||||
|
||||
import os
|
||||
import sys
|
||||
from PIL import Image
|
||||
from io import StringIO, BytesIO
|
||||
import re
|
||||
import stdnum.isbn
|
||||
import socket
|
||||
import time
|
||||
import hashlib
|
||||
from datetime import datetime
|
||||
import subprocess
|
||||
from io import StringIO, BytesIO
|
||||
from PIL import Image
|
||||
import base64
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import socket
|
||||
import stdnum.isbn
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
|
||||
import ox
|
||||
from OpenSSL.crypto import (
|
||||
|
@ -391,3 +392,13 @@ def get_ratio(data):
|
|||
return img.size[0]/img.size[1]
|
||||
except:
|
||||
return 1
|
||||
|
||||
|
||||
def get_meta_hash(data):
|
||||
if 'sharemetadata' in data:
|
||||
del data['sharemetadata']
|
||||
for key in list(data):
|
||||
if not data[key]:
|
||||
del data[key]
|
||||
return hashlib.sha1(json.dumps(data,
|
||||
ensure_ascii=False, sort_keys=True).encode()).hexdigest()
|
||||
|
|
|
@ -295,17 +295,24 @@ oml.ui.infoView = function(externalData, isMixed) {
|
|||
return $element;
|
||||
}
|
||||
|
||||
function renderShareButton() {
|
||||
function renderShareButton(data) {
|
||||
return Ox.Checkbox({
|
||||
style: 'squared',
|
||||
title: Ox._('Share Metadata'),
|
||||
value: false,
|
||||
value: data.sharemetadata,
|
||||
width: 128,
|
||||
})
|
||||
.css({marginTop: '8px'})
|
||||
.bindEvent({
|
||||
change: function(data) {
|
||||
// ...
|
||||
change: function(changeData) {
|
||||
oml.api.edit({
|
||||
id: data.id,
|
||||
sharemetadata: changeData.value
|
||||
}, function(result) {
|
||||
if (!changeData.value) {
|
||||
that.updateElement(result.data, [$icon, $info, $data]);
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -764,7 +771,7 @@ oml.ui.infoView = function(externalData, isMixed) {
|
|||
});
|
||||
|
||||
if (data.mediastate == 'available') {
|
||||
renderShareButton().appendTo($data);
|
||||
renderShareButton(data).appendTo($data);
|
||||
}
|
||||
|
||||
$('<div>').css({height: '16px'}).appendTo($data);
|
||||
|
|
Loading…
Reference in a new issue