Compare commits
No commits in common. "1c8a5c3764d9637b877f498ac248dacc4bf59b44" and "3d882425e467c4c57d2e4d42f8237166303f32a7" have entirely different histories.
1c8a5c3764
...
3d882425e4
10 changed files with 14 additions and 80 deletions
|
|
@ -241,12 +241,6 @@
|
||||||
"format": {"type": "boolean", "args": []},
|
"format": {"type": "boolean", "args": []},
|
||||||
"sort": true
|
"sort": true
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"id": "fulltext",
|
|
||||||
"title": "Full Text",
|
|
||||||
"find": true,
|
|
||||||
"type": "text"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"id": "random",
|
"id": "random",
|
||||||
"title": "Random",
|
"title": "Random",
|
||||||
|
|
|
||||||
|
|
@ -1,28 +0,0 @@
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
def find_fulltext_macos(query):
|
|
||||||
import settings
|
|
||||||
from item.models import File
|
|
||||||
prefix = os.path.join(os.path.expanduser(settings.preferences['libraryPath']), 'Books/')
|
|
||||||
cmd = ["mdfind", "-onlyin", prefix, query]
|
|
||||||
books = subprocess.check_output(cmd).decode().strip().split('\n')
|
|
||||||
books = [path[len(prefix):] for path in books]
|
|
||||||
ids = [b[0] for b in File.query.filter(operators.in_op(File.path, books)).values('sha1')]
|
|
||||||
return ids
|
|
||||||
|
|
||||||
def find_fulltext(query):
|
|
||||||
ids = []
|
|
||||||
if sys.platform == 'darwin':
|
|
||||||
ids = find_fulltext_macos(query)
|
|
||||||
else:
|
|
||||||
logger.debug('missing fulltext search implementation for %s', sys.platform)
|
|
||||||
return ids
|
|
||||||
|
|
||||||
def platform_supported():
|
|
||||||
return sys.platform == 'darwin'
|
|
||||||
|
|
@ -3,7 +3,6 @@
|
||||||
import json
|
import json
|
||||||
import hashlib
|
import hashlib
|
||||||
import os
|
import os
|
||||||
import unicodedata
|
|
||||||
|
|
||||||
from sqlalchemy.orm import load_only
|
from sqlalchemy.orm import load_only
|
||||||
from sqlalchemy.sql.expression import text
|
from sqlalchemy.sql.expression import text
|
||||||
|
|
@ -146,15 +145,13 @@ def edit(data):
|
||||||
ids = [ids]
|
ids = [ids]
|
||||||
edited = []
|
edited = []
|
||||||
for id in ids:
|
for id in ids:
|
||||||
state.db.session.begin(subtransactions=True)
|
item = models.Item.get(id)
|
||||||
item = models.Item.get(id, for_update=True)
|
|
||||||
if item and item.json().get('mediastate') == 'available':
|
if item and item.json().get('mediastate') == 'available':
|
||||||
item.edit(data)
|
item.edit(data)
|
||||||
response = item.json()
|
response = item.json()
|
||||||
edited.append(id)
|
edited.append(id)
|
||||||
else:
|
else:
|
||||||
logger.info('can only edit available items %s', id)
|
logger.info('can only edit available items %s', id)
|
||||||
state.db.session.commit()
|
|
||||||
if len(ids) > 1:
|
if len(ids) > 1:
|
||||||
response = data
|
response = data
|
||||||
response['id'] = edited
|
response['id'] = edited
|
||||||
|
|
@ -214,7 +211,6 @@ def autocomplete(data):
|
||||||
qs = qs.filter(models.Find.item_id.in_(items))
|
qs = qs.filter(models.Find.item_id.in_(items))
|
||||||
if data['value']:
|
if data['value']:
|
||||||
value = data['value'].lower()
|
value = data['value'].lower()
|
||||||
value = unicodedata.normalize('NFKD', value)
|
|
||||||
qs = qs.filter(models.Find.key.is_(data['key']))
|
qs = qs.filter(models.Find.key.is_(data['key']))
|
||||||
if op == '=':
|
if op == '=':
|
||||||
qs = qs.filter(models.Find.findvalue.contains(value))
|
qs = qs.filter(models.Find.findvalue.contains(value))
|
||||||
|
|
|
||||||
|
|
@ -23,7 +23,7 @@ from .icons import icons
|
||||||
from .person import get_sort_name, Person
|
from .person import get_sort_name, Person
|
||||||
from queryparser import Parser
|
from queryparser import Parser
|
||||||
from settings import config
|
from settings import config
|
||||||
from utils import remove_empty_folders, get_ratio, same_path
|
from utils import remove_empty_folders, get_ratio
|
||||||
from websocket import trigger_event
|
from websocket import trigger_event
|
||||||
import db
|
import db
|
||||||
import media
|
import media
|
||||||
|
|
@ -75,13 +75,10 @@ class Item(db.Model):
|
||||||
self.meta = {}
|
self.meta = {}
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get(cls, id, for_update=False):
|
def get(cls, id):
|
||||||
if isinstance(id, list):
|
if isinstance(id, list):
|
||||||
id = base64.b32encode(hashlib.sha1(''.join(id)).digest())
|
id = base64.b32encode(hashlib.sha1(''.join(id)).digest())
|
||||||
qs = cls.query.filter_by(id=id)
|
return cls.query.filter_by(id=id).first()
|
||||||
if for_update:
|
|
||||||
qs = qs.with_for_update()
|
|
||||||
return qs.first()
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_or_create(cls, id, info=None):
|
def get_or_create(cls, id, info=None):
|
||||||
|
|
@ -791,7 +788,7 @@ class File(db.Model):
|
||||||
new_path = os.path.join(first, author, filename)
|
new_path = os.path.join(first, author, filename)
|
||||||
if current_path == os.path.join(prefix, new_path):
|
if current_path == os.path.join(prefix, new_path):
|
||||||
break
|
break
|
||||||
if not same_path(self.path, new_path):
|
if unicodedata.normalize('NFD', self.path) != unicodedata.normalize('NFD', new_path):
|
||||||
path = os.path.join(prefix, new_path)
|
path = os.path.join(prefix, new_path)
|
||||||
ox.makedirs(os.path.dirname(path))
|
ox.makedirs(os.path.dirname(path))
|
||||||
mode = 0o644
|
mode = 0o644
|
||||||
|
|
|
||||||
|
|
@ -14,7 +14,7 @@ import ox
|
||||||
from changelog import add_record
|
from changelog import add_record
|
||||||
from item.models import File, Item
|
from item.models import File, Item
|
||||||
from user.models import List
|
from user.models import List
|
||||||
from utils import remove_empty_folders, same_path
|
from utils import remove_empty_folders
|
||||||
from websocket import trigger_event
|
from websocket import trigger_event
|
||||||
import db
|
import db
|
||||||
import media
|
import media
|
||||||
|
|
@ -50,7 +50,7 @@ def remove_missing(books=None):
|
||||||
if dirty:
|
if dirty:
|
||||||
state.db.session.commit()
|
state.db.session.commit()
|
||||||
dirty = False
|
dirty = False
|
||||||
nfd_books = {unicodedata.normalize('NFD', path) for path in books}
|
nfd_books = {unicodedata.normalize('NFD', path) for path in nfd_books}
|
||||||
removed = [
|
removed = [
|
||||||
path for path in db_paths
|
path for path in db_paths
|
||||||
if unicodedata.normalize('NFD', path) not in nfd_books
|
if unicodedata.normalize('NFD', path) not in nfd_books
|
||||||
|
|
@ -132,6 +132,8 @@ def collect_books(prefix, status=None):
|
||||||
logger.debug('found %s books', len(books))
|
logger.debug('found %s books', len(books))
|
||||||
return books
|
return books
|
||||||
|
|
||||||
|
def nfd_same(f1, f2):
|
||||||
|
return unicodedata.normalize('NFD', f1) == unicodedata.normalize('NFD', f2)
|
||||||
|
|
||||||
def run_scan():
|
def run_scan():
|
||||||
logger.debug('run_scan')
|
logger.debug('run_scan')
|
||||||
|
|
@ -153,7 +155,7 @@ def run_scan():
|
||||||
if file:
|
if file:
|
||||||
f1 = file.fullpath()
|
f1 = file.fullpath()
|
||||||
f2 = os.path.join(prefix, f)
|
f2 = os.path.join(prefix, f)
|
||||||
if not same_path(f1, f2) and os.path.exists(f1) and os.path.exists(f2):
|
if not nfd_same(f1, f2) and os.path.exists(f1) and os.path.exists(f2):
|
||||||
logger.debug('file exists in multiple locations %s', id)
|
logger.debug('file exists in multiple locations %s', id)
|
||||||
logger.debug('"%s" vs "%s"', f1, f2)
|
logger.debug('"%s" vs "%s"', f1, f2)
|
||||||
os.chmod(f2, stat.S_IWRITE)
|
os.chmod(f2, stat.S_IWRITE)
|
||||||
|
|
@ -164,7 +166,7 @@ def run_scan():
|
||||||
if file:
|
if file:
|
||||||
f1 = file.fullpath()
|
f1 = file.fullpath()
|
||||||
f2 = os.path.join(prefix, f)
|
f2 = os.path.join(prefix, f)
|
||||||
if not same_path(f1, f2) and os.path.exists(f1) and os.path.exists(f2):
|
if not nfd_same(f1, f2) and os.path.exists(f1) and os.path.exists(f2):
|
||||||
logger.debug('"%s" vs "%s"', f1, f2)
|
logger.debug('"%s" vs "%s"', f1, f2)
|
||||||
os.chmod(f2, stat.S_IWRITE)
|
os.chmod(f2, stat.S_IWRITE)
|
||||||
os.unlink(f2)
|
os.unlink(f2)
|
||||||
|
|
|
||||||
|
|
@ -209,7 +209,7 @@ def info(pdf):
|
||||||
if settings.server['extract_text']:
|
if settings.server['extract_text']:
|
||||||
text = extract_text(pdf)
|
text = extract_text(pdf)
|
||||||
data['textsize'] = len(text)
|
data['textsize'] = len(text)
|
||||||
if 'isbn' not in data:
|
if not 'isbn' in data:
|
||||||
isbn = extract_isbn(text)
|
isbn = extract_isbn(text)
|
||||||
if isbn:
|
if isbn:
|
||||||
data['isbn'] = isbn
|
data['isbn'] = isbn
|
||||||
|
|
|
||||||
|
|
@ -9,7 +9,6 @@ from sqlalchemy.sql.expression import text
|
||||||
|
|
||||||
import utils
|
import utils
|
||||||
import settings
|
import settings
|
||||||
from fulltext import find_fulltext
|
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
@ -26,7 +25,7 @@ def get_operator(op, type='str'):
|
||||||
'$': operators.endswith_op,
|
'$': operators.endswith_op,
|
||||||
'&': operators.in_op,
|
'&': operators.in_op,
|
||||||
},
|
},
|
||||||
'int': {
|
'int': {
|
||||||
'==': operators.eq,
|
'==': operators.eq,
|
||||||
'>': operators.gt,
|
'>': operators.gt,
|
||||||
'>=': operators.ge,
|
'>=': operators.ge,
|
||||||
|
|
@ -66,7 +65,7 @@ class Parser(object):
|
||||||
...
|
...
|
||||||
'''
|
'''
|
||||||
#logger.debug('parse_condition %s', condition)
|
#logger.debug('parse_condition %s', condition)
|
||||||
if 'value' not in condition:
|
if not 'value' in condition:
|
||||||
return None
|
return None
|
||||||
k = condition.get('key', '*')
|
k = condition.get('key', '*')
|
||||||
if not k:
|
if not k:
|
||||||
|
|
@ -123,18 +122,6 @@ class Parser(object):
|
||||||
in_op = operators.notin_op if exclude else operators.in_op
|
in_op = operators.notin_op if exclude else operators.in_op
|
||||||
q = in_op(self._model.id, ids)
|
q = in_op(self._model.id, ids)
|
||||||
return q
|
return q
|
||||||
elif k == 'fulltext':
|
|
||||||
ids = find_fulltext(v)
|
|
||||||
if ids:
|
|
||||||
in_op = operators.notin_op if exclude else operators.in_op
|
|
||||||
q = in_op(self._model.id, ids)
|
|
||||||
else:
|
|
||||||
# nothing
|
|
||||||
q = operators.eq(self._model.id, -1)
|
|
||||||
if exclude:
|
|
||||||
q = ~q
|
|
||||||
return q
|
|
||||||
|
|
||||||
elif key_type in ("string", "text"):
|
elif key_type in ("string", "text"):
|
||||||
if isinstance(v, str):
|
if isinstance(v, str):
|
||||||
v = unicodedata.normalize('NFKD', v).lower()
|
v = unicodedata.normalize('NFKD', v).lower()
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,6 @@ import os
|
||||||
|
|
||||||
from oml.pdict import pdict
|
from oml.pdict import pdict
|
||||||
from oml.utils import get_user_id
|
from oml.utils import get_user_id
|
||||||
from oml import fulltext
|
|
||||||
|
|
||||||
base_dir = os.path.normpath(os.path.join(os.path.abspath(os.path.dirname(__file__)), '..'))
|
base_dir = os.path.normpath(os.path.join(os.path.abspath(os.path.dirname(__file__)), '..'))
|
||||||
static_path = os.path.join(base_dir, 'static')
|
static_path = os.path.join(base_dir, 'static')
|
||||||
|
|
@ -91,10 +90,3 @@ DEBUG_HTTP = server.get('debug_http', False)
|
||||||
DEBUG_API = server.get('debug_api', False)
|
DEBUG_API = server.get('debug_api', False)
|
||||||
|
|
||||||
DB_VERSION = 13
|
DB_VERSION = 13
|
||||||
|
|
||||||
|
|
||||||
FULLTEXT_SUPPORT = fulltext.platform_supported()
|
|
||||||
|
|
||||||
if not FULLTEXT_SUPPORT:
|
|
||||||
config['itemKeys'] = [k for k in config['itemKeys'] if k['id'] != 'fulltext']
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -35,8 +35,6 @@ def init(data):
|
||||||
if os.path.exists(settings.oml_data_path):
|
if os.path.exists(settings.oml_data_path):
|
||||||
with open(settings.oml_data_path) as fd:
|
with open(settings.oml_data_path) as fd:
|
||||||
config = json.load(fd)
|
config = json.load(fd)
|
||||||
if not settings.FULLTEXT_SUPPORT:
|
|
||||||
config['itemKeys'] = [k for k in config['itemKeys'] if k['id'] != 'fulltext']
|
|
||||||
else:
|
else:
|
||||||
config = {}
|
config = {}
|
||||||
response['config'] = config
|
response['config'] = config
|
||||||
|
|
|
||||||
|
|
@ -15,7 +15,6 @@ import stdnum.isbn
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
import unicodedata
|
|
||||||
|
|
||||||
import ox
|
import ox
|
||||||
from OpenSSL.crypto import (
|
from OpenSSL.crypto import (
|
||||||
|
|
@ -463,6 +462,3 @@ def iexists(path):
|
||||||
return False
|
return False
|
||||||
files = {os.path.basename(f).lower() for f in files}
|
files = {os.path.basename(f).lower() for f in files}
|
||||||
return name in files
|
return name in files
|
||||||
|
|
||||||
def same_path(f1, f2):
|
|
||||||
return unicodedata.normalize('NFC', f1) == unicodedata.normalize('NFC', f2)
|
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue