2014-05-04 17:26:43 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
# vi:si:et:sw=4:sts=4:ts=4
|
2014-09-02 22:32:44 +00:00
|
|
|
|
2014-05-04 17:26:43 +00:00
|
|
|
|
2014-08-12 08:16:57 +00:00
|
|
|
from datetime import datetime
|
2014-05-04 17:26:43 +00:00
|
|
|
import os
|
|
|
|
import shutil
|
2014-05-28 15:42:19 +00:00
|
|
|
import time
|
2014-05-04 17:26:43 +00:00
|
|
|
|
|
|
|
import ox
|
|
|
|
|
|
|
|
from changelog import Changelog
|
2016-02-13 11:40:37 +00:00
|
|
|
from item.models import File, Item
|
2014-08-12 08:16:57 +00:00
|
|
|
from user.models import List
|
|
|
|
from utils import remove_empty_folders
|
2014-05-04 17:26:43 +00:00
|
|
|
from websocket import trigger_event
|
2014-08-12 08:16:57 +00:00
|
|
|
import db
|
|
|
|
import media
|
|
|
|
import settings
|
2014-05-17 00:14:15 +00:00
|
|
|
import state
|
2014-05-18 23:24:04 +00:00
|
|
|
|
|
|
|
import logging
|
2015-11-29 14:56:38 +00:00
|
|
|
logger = logging.getLogger(__name__)
|
2014-05-04 17:26:43 +00:00
|
|
|
|
2015-03-14 07:35:15 +00:00
|
|
|
extensions = ['epub', 'pdf', 'txt', 'cbr', 'cbz']
|
2014-05-16 14:30:16 +00:00
|
|
|
|
2016-02-13 11:40:37 +00:00
|
|
|
def remove_missing(books=None):
|
2014-05-04 17:26:43 +00:00
|
|
|
dirty = False
|
2016-02-13 11:40:37 +00:00
|
|
|
logger.debug('remove missing')
|
2016-02-14 08:55:54 +00:00
|
|
|
prefix = get_prefix()
|
2016-02-13 11:40:37 +00:00
|
|
|
if books is None:
|
2016-02-14 08:55:54 +00:00
|
|
|
books = collect_books(prefix)
|
2014-08-09 16:14:14 +00:00
|
|
|
with db.session():
|
2014-05-20 10:30:53 +00:00
|
|
|
if os.path.exists(prefix):
|
2016-02-13 11:40:37 +00:00
|
|
|
logger.debug('scan for removed files')
|
|
|
|
db_paths = []
|
|
|
|
items = {}
|
2014-05-20 10:30:53 +00:00
|
|
|
for f in File.query:
|
2016-01-31 16:45:14 +00:00
|
|
|
if state.shutdown:
|
2015-11-18 00:27:53 +00:00
|
|
|
return
|
2016-02-13 11:40:37 +00:00
|
|
|
path = f.fullpath()
|
|
|
|
db_paths.append(path)
|
|
|
|
items[path] = f.sha1
|
|
|
|
removed = set(db_paths) - set(books)
|
|
|
|
if removed:
|
|
|
|
logger.debug('%s files removed', len(removed))
|
|
|
|
ids = [items[path] for path in removed]
|
|
|
|
orphaned = set(ids)
|
|
|
|
for i in Item.query.filter(Item.id.in_(ids)):
|
|
|
|
i.remove_file()
|
|
|
|
orphaned.remove(i.id)
|
|
|
|
dirty = True
|
|
|
|
if orphaned:
|
|
|
|
logger.debug('%s files orphaned', len(orphaned))
|
|
|
|
for f in File.query.filter(File.sha1.in_(orphaned)):
|
|
|
|
state.db.session.delete(f)
|
2014-05-20 10:30:53 +00:00
|
|
|
dirty = True
|
|
|
|
if dirty:
|
2014-08-09 16:14:14 +00:00
|
|
|
state.db.session.commit()
|
2016-01-27 06:28:11 +00:00
|
|
|
state.cache.clear('group:')
|
2016-02-13 11:40:37 +00:00
|
|
|
logger.debug('update filenames')
|
2016-01-14 10:44:11 +00:00
|
|
|
for f in File.query:
|
2016-01-31 16:45:14 +00:00
|
|
|
if state.shutdown:
|
2016-01-14 10:44:11 +00:00
|
|
|
return
|
|
|
|
f.move()
|
2016-02-13 11:40:37 +00:00
|
|
|
logger.debug('remove empty folders')
|
2016-01-18 12:02:42 +00:00
|
|
|
remove_empty_folders(prefix, True)
|
2016-02-13 11:40:37 +00:00
|
|
|
logger.debug('remove missing done')
|
2014-05-04 17:26:43 +00:00
|
|
|
|
2016-02-11 06:15:17 +00:00
|
|
|
def add_file(id, f, prefix, from_=None, commit=True):
|
2014-05-18 23:24:04 +00:00
|
|
|
user = state.user()
|
|
|
|
path = f[len(prefix):]
|
2016-01-24 13:38:07 +00:00
|
|
|
logger.debug('%s extract metadata %s', id, path)
|
2014-05-27 14:08:14 +00:00
|
|
|
data = media.metadata(f, from_)
|
2016-01-24 13:38:07 +00:00
|
|
|
logger.debug('%s create file %s', id, path)
|
2014-05-18 23:24:04 +00:00
|
|
|
file = File.get_or_create(id, data, path)
|
|
|
|
item = file.item
|
2016-01-06 18:06:48 +00:00
|
|
|
item.add_user(user)
|
2014-05-20 00:43:54 +00:00
|
|
|
item.added = datetime.utcnow()
|
2016-01-24 13:38:07 +00:00
|
|
|
logger.debug('%s load metadata %s', id, path)
|
2016-01-11 13:43:54 +00:00
|
|
|
item.load_metadata()
|
|
|
|
Changelog.record(user, 'additem', item.id, file.info)
|
2016-01-05 16:16:50 +00:00
|
|
|
Changelog.record(user, 'edititem', item.id, item.meta)
|
2016-01-24 13:38:07 +00:00
|
|
|
logger.debug('%s extract icons %s', id, path)
|
2014-05-21 00:02:21 +00:00
|
|
|
item.update_icons()
|
2015-03-07 16:24:07 +00:00
|
|
|
item.modified = datetime.utcnow()
|
2016-01-24 13:38:07 +00:00
|
|
|
logger.debug('%s save item', id)
|
2016-02-11 06:15:17 +00:00
|
|
|
item.update(commit=commit)
|
2016-01-24 13:38:07 +00:00
|
|
|
logger.debug('%s added', id)
|
2014-05-18 23:24:04 +00:00
|
|
|
return file
|
|
|
|
|
2016-02-14 08:32:07 +00:00
|
|
|
def get_prefix():
|
2015-12-24 15:11:47 +00:00
|
|
|
prefs = settings.preferences
|
2016-01-04 09:49:14 +00:00
|
|
|
prefix = os.path.join(os.path.expanduser(prefs['libraryPath']), 'Books' + os.sep)
|
|
|
|
if not prefix[-1] == os.sep:
|
|
|
|
prefix += os.sep
|
2015-12-24 15:11:47 +00:00
|
|
|
assert isinstance(prefix, str)
|
2016-02-14 08:32:07 +00:00
|
|
|
return prefix
|
|
|
|
|
2016-02-14 08:55:54 +00:00
|
|
|
def collect_books(prefix, status=None):
|
2016-02-13 11:40:37 +00:00
|
|
|
logger.debug('collect books')
|
2015-12-24 15:11:47 +00:00
|
|
|
books = []
|
2016-02-14 08:55:54 +00:00
|
|
|
count = 0
|
2015-12-24 15:11:47 +00:00
|
|
|
for root, folders, files in os.walk(prefix):
|
|
|
|
for f in files:
|
2016-01-31 16:45:14 +00:00
|
|
|
if state.shutdown:
|
2016-02-14 08:55:54 +00:00
|
|
|
return []
|
2015-12-24 15:11:47 +00:00
|
|
|
if f.startswith('.'):
|
|
|
|
continue
|
|
|
|
f = os.path.join(root, f)
|
2016-02-13 10:28:06 +00:00
|
|
|
ext = f.split('.')[-1].lower()
|
2016-01-13 17:48:32 +00:00
|
|
|
if ext == 'kepub':
|
|
|
|
ext = 'epub'
|
2015-12-24 15:11:47 +00:00
|
|
|
if ext in extensions:
|
|
|
|
books.append(f)
|
2016-02-14 08:55:54 +00:00
|
|
|
count += 1
|
|
|
|
if status and not status(count):
|
|
|
|
return None
|
2016-02-13 11:40:37 +00:00
|
|
|
logger.debug('found %s books', len(books))
|
|
|
|
return books
|
|
|
|
|
|
|
|
def run_scan():
|
|
|
|
logger.debug('run_scan')
|
2016-02-14 08:32:07 +00:00
|
|
|
prefix = get_prefix()
|
2016-02-14 08:55:54 +00:00
|
|
|
books = collect_books(prefix)
|
2016-02-13 11:40:37 +00:00
|
|
|
remove_missing(books)
|
2015-12-24 15:11:47 +00:00
|
|
|
|
|
|
|
added = 0
|
|
|
|
for f in ox.sorted_strings(books):
|
2016-01-31 16:45:14 +00:00
|
|
|
if state.shutdown:
|
2015-12-24 15:11:47 +00:00
|
|
|
return
|
2016-02-13 11:40:37 +00:00
|
|
|
if os.path.exists(f):
|
|
|
|
id = media.get_id(f)
|
|
|
|
with db.session():
|
2016-02-02 19:30:40 +00:00
|
|
|
file = File.get(id)
|
|
|
|
if not file:
|
|
|
|
file = add_file(id, f, prefix, f)
|
|
|
|
added += 1
|
2016-02-14 08:55:54 +00:00
|
|
|
if added:
|
|
|
|
trigger_event('change', {})
|
|
|
|
logger.debug('imported %s unknown books', added)
|
2014-05-04 17:26:43 +00:00
|
|
|
|
2016-01-27 06:28:11 +00:00
|
|
|
def change_path(old, new):
|
|
|
|
new_books = os.path.join(new, 'Books')
|
|
|
|
if not os.path.exists(new_books):
|
|
|
|
ox.makedirs(new)
|
|
|
|
shutil.move(os.path.join(old, 'Books'), new_books)
|
|
|
|
remove_empty_folders(old)
|
|
|
|
else:
|
|
|
|
ox.makedirs(new_books)
|
|
|
|
run_scan()
|
|
|
|
trigger_event('change', {})
|
|
|
|
|
2014-05-16 14:30:16 +00:00
|
|
|
def run_import(options=None):
|
|
|
|
options = options or {}
|
2015-12-24 15:11:47 +00:00
|
|
|
logger.debug('run_import')
|
2016-01-23 12:37:25 +00:00
|
|
|
if state.activity.get('cancel'):
|
|
|
|
logger.debug('import canceled')
|
|
|
|
state.activity = {}
|
|
|
|
return
|
|
|
|
state.activity = {}
|
2015-12-24 15:11:47 +00:00
|
|
|
prefs = settings.preferences
|
|
|
|
prefix = os.path.expanduser(options.get('path', prefs['importPath']))
|
|
|
|
if os.path.islink(prefix):
|
|
|
|
prefix = os.path.realpath(prefix)
|
2016-01-04 09:49:14 +00:00
|
|
|
if not prefix[-1] == os.sep:
|
|
|
|
prefix += os.sep
|
2016-02-14 08:32:07 +00:00
|
|
|
prefix_books = get_prefix()
|
2016-01-20 07:12:45 +00:00
|
|
|
prefix_imported = os.path.join(prefix_books, '.import' + os.sep)
|
2015-12-24 15:11:47 +00:00
|
|
|
if prefix_books.startswith(prefix) or prefix.startswith(prefix_books):
|
|
|
|
error = 'invalid path'
|
|
|
|
elif not os.path.exists(prefix):
|
|
|
|
error = 'path not found'
|
|
|
|
elif not os.path.isdir(prefix):
|
|
|
|
error = 'path must be a folder'
|
|
|
|
else:
|
|
|
|
error = None
|
|
|
|
if error:
|
|
|
|
trigger_event('activity', {
|
2014-05-17 00:14:15 +00:00
|
|
|
'activity': 'import',
|
2015-12-24 15:11:47 +00:00
|
|
|
'progress': [0, 0],
|
|
|
|
'status': {'code': 404, 'text': error}
|
|
|
|
})
|
|
|
|
state.activity = {}
|
|
|
|
return
|
|
|
|
listname = options.get('list')
|
|
|
|
if listname:
|
|
|
|
listitems = []
|
|
|
|
assert isinstance(prefix, str)
|
|
|
|
books = []
|
2016-02-14 08:55:54 +00:00
|
|
|
def activity(count):
|
|
|
|
if count % 100 == 0:
|
|
|
|
state.activity = {
|
|
|
|
'activity': 'import',
|
|
|
|
'path': prefix,
|
|
|
|
'progress': [0, count],
|
|
|
|
}
|
|
|
|
trigger_event('activity', state.activity)
|
|
|
|
if state.activity.get('cancel'):
|
|
|
|
logger.debug('active import canceled')
|
|
|
|
state.activity = {}
|
|
|
|
return False
|
|
|
|
return True
|
|
|
|
books = collect_books(prefix, status=activity)
|
|
|
|
if books is None:
|
|
|
|
return
|
2015-12-24 15:11:47 +00:00
|
|
|
state.activity = {
|
|
|
|
'activity': 'import',
|
|
|
|
'path': prefix,
|
|
|
|
'progress': [0, len(books)],
|
|
|
|
}
|
|
|
|
trigger_event('activity', state.activity)
|
|
|
|
position = 0
|
|
|
|
added = 0
|
|
|
|
last = 0
|
|
|
|
for f in ox.sorted_strings(books):
|
|
|
|
position += 1
|
|
|
|
if not os.path.exists(f):
|
|
|
|
continue
|
|
|
|
with db.session():
|
2014-05-04 17:26:43 +00:00
|
|
|
id = media.get_id(f)
|
|
|
|
file = File.get(id)
|
2016-01-18 12:02:42 +00:00
|
|
|
f_import = f
|
2014-05-04 17:26:43 +00:00
|
|
|
if not file:
|
|
|
|
f = f.replace(prefix, prefix_imported)
|
|
|
|
ox.makedirs(os.path.dirname(f))
|
2014-05-16 14:30:16 +00:00
|
|
|
if options.get('mode') == 'move':
|
2016-01-18 12:02:42 +00:00
|
|
|
try:
|
|
|
|
shutil.move(f_import, f)
|
|
|
|
except:
|
|
|
|
shutil.copy2(f_import, f)
|
2014-05-16 14:30:16 +00:00
|
|
|
else:
|
2016-01-18 12:02:42 +00:00
|
|
|
shutil.copy2(f_import, f)
|
2014-05-27 14:08:14 +00:00
|
|
|
file = add_file(id, f, prefix_books, f_import)
|
2014-05-17 11:45:57 +00:00
|
|
|
file.move()
|
2014-05-04 17:26:43 +00:00
|
|
|
added += 1
|
2016-01-18 12:02:42 +00:00
|
|
|
elif options.get('mode') == 'move':
|
|
|
|
os.unlink(f_import)
|
|
|
|
if listname:
|
|
|
|
listitems.append(file.item.id)
|
2016-01-28 11:48:01 +00:00
|
|
|
if state.activity.get('cancel'):
|
|
|
|
state.activity = {}
|
|
|
|
return
|
2016-01-31 16:45:14 +00:00
|
|
|
if state.shutdown:
|
2016-01-28 11:48:01 +00:00
|
|
|
return
|
2015-12-24 15:11:47 +00:00
|
|
|
if time.time() - last > 5:
|
|
|
|
last = time.time()
|
|
|
|
state.activity = {
|
|
|
|
'activity': 'import',
|
|
|
|
'progress': [position, len(books)],
|
|
|
|
'path': prefix,
|
|
|
|
'added': added,
|
|
|
|
}
|
|
|
|
trigger_event('activity', state.activity)
|
2014-05-26 10:41:01 +00:00
|
|
|
|
2016-02-14 08:55:54 +00:00
|
|
|
if listname and listitems:
|
|
|
|
with db.session():
|
2014-05-18 23:24:04 +00:00
|
|
|
l = List.get(settings.USER_ID, listname)
|
|
|
|
if l:
|
|
|
|
l.add_items(listitems)
|
2015-12-24 15:11:47 +00:00
|
|
|
trigger_event('activity', {
|
|
|
|
'activity': 'import',
|
|
|
|
'progress': [position, len(books)],
|
|
|
|
'path': prefix,
|
|
|
|
'status': {'code': 200, 'text': ''},
|
|
|
|
'added': added,
|
|
|
|
})
|
|
|
|
state.activity = {}
|
|
|
|
remove_empty_folders(prefix_books)
|
|
|
|
if options.get('mode') == 'move':
|
2016-01-18 12:02:42 +00:00
|
|
|
remove_empty_folders(prefix, True)
|
|
|
|
|
|
|
|
def import_folder():
|
2016-01-25 06:48:24 +00:00
|
|
|
if not (state.activity and state.activity.get('activity') == 'import'):
|
|
|
|
import_path = settings.preferences['importPath']
|
|
|
|
logger.debug('scan importPath %s', import_path)
|
|
|
|
if os.path.exists(import_path):
|
|
|
|
run_import({
|
|
|
|
'path': import_path,
|
|
|
|
'mode': 'move'
|
|
|
|
})
|
|
|
|
remove_empty_folders(import_path, True)
|
2016-01-18 12:02:42 +00:00
|
|
|
if state.main:
|
|
|
|
state.main.call_later(10*60, lambda: state.tasks.queue('scanimport'))
|