2014-05-04 17:26:43 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
# vi:si:et:sw=4:sts=4:ts=4
|
2014-09-02 22:32:44 +00:00
|
|
|
|
2014-05-04 17:26:43 +00:00
|
|
|
|
2014-08-12 08:16:57 +00:00
|
|
|
from datetime import datetime
|
2014-05-04 17:26:43 +00:00
|
|
|
import os
|
|
|
|
import shutil
|
2014-05-28 15:42:19 +00:00
|
|
|
import time
|
2014-05-04 17:26:43 +00:00
|
|
|
|
|
|
|
import ox
|
|
|
|
|
|
|
|
from changelog import Changelog
|
2016-01-18 12:02:42 +00:00
|
|
|
from item.models import File
|
2014-08-12 08:16:57 +00:00
|
|
|
from user.models import List
|
|
|
|
from utils import remove_empty_folders
|
2014-05-04 17:26:43 +00:00
|
|
|
from websocket import trigger_event
|
2014-08-12 08:16:57 +00:00
|
|
|
import db
|
|
|
|
import media
|
|
|
|
import settings
|
2014-05-17 00:14:15 +00:00
|
|
|
import state
|
2014-05-18 23:24:04 +00:00
|
|
|
|
|
|
|
import logging
|
2015-11-29 14:56:38 +00:00
|
|
|
logger = logging.getLogger(__name__)
|
2014-05-04 17:26:43 +00:00
|
|
|
|
2015-03-14 07:35:15 +00:00
|
|
|
extensions = ['epub', 'pdf', 'txt', 'cbr', 'cbz']
|
2014-05-16 14:30:16 +00:00
|
|
|
|
2014-05-04 17:26:43 +00:00
|
|
|
def remove_missing():
|
|
|
|
dirty = False
|
2014-08-09 16:14:14 +00:00
|
|
|
with db.session():
|
2014-05-04 17:26:43 +00:00
|
|
|
prefs = settings.preferences
|
2016-01-04 09:49:14 +00:00
|
|
|
prefix = os.path.join(os.path.expanduser(prefs['libraryPath']), 'Books' + os.sep)
|
2014-05-20 10:30:53 +00:00
|
|
|
if os.path.exists(prefix):
|
|
|
|
for f in File.query:
|
2015-11-18 00:27:53 +00:00
|
|
|
if not state.tasks.connected:
|
|
|
|
return
|
2016-01-11 14:29:55 +00:00
|
|
|
if f.item:
|
|
|
|
path = f.item.get_path()
|
|
|
|
if not os.path.exists(path):
|
|
|
|
dirty = True
|
|
|
|
f.item.remove_file()
|
|
|
|
else:
|
|
|
|
state.db.session.delete(f)
|
2014-05-20 10:30:53 +00:00
|
|
|
dirty = True
|
|
|
|
if dirty:
|
2014-08-09 16:14:14 +00:00
|
|
|
state.db.session.commit()
|
2016-01-14 10:44:11 +00:00
|
|
|
for f in File.query:
|
|
|
|
if not state.tasks.connected:
|
|
|
|
return
|
|
|
|
f.move()
|
2016-01-18 12:02:42 +00:00
|
|
|
remove_empty_folders(prefix, True)
|
2014-05-04 17:26:43 +00:00
|
|
|
|
2014-05-27 14:08:14 +00:00
|
|
|
def add_file(id, f, prefix, from_=None):
|
2014-05-18 23:24:04 +00:00
|
|
|
user = state.user()
|
|
|
|
path = f[len(prefix):]
|
2016-01-24 13:38:07 +00:00
|
|
|
logger.debug('%s extract metadata %s', id, path)
|
2014-05-27 14:08:14 +00:00
|
|
|
data = media.metadata(f, from_)
|
2016-01-24 13:38:07 +00:00
|
|
|
logger.debug('%s create file %s', id, path)
|
2014-05-18 23:24:04 +00:00
|
|
|
file = File.get_or_create(id, data, path)
|
|
|
|
item = file.item
|
2016-01-06 18:06:48 +00:00
|
|
|
item.add_user(user)
|
2014-05-20 00:43:54 +00:00
|
|
|
item.added = datetime.utcnow()
|
2016-01-24 13:38:07 +00:00
|
|
|
logger.debug('%s load metadata %s', id, path)
|
2016-01-11 13:43:54 +00:00
|
|
|
item.load_metadata()
|
|
|
|
Changelog.record(user, 'additem', item.id, file.info)
|
2016-01-05 16:16:50 +00:00
|
|
|
Changelog.record(user, 'edititem', item.id, item.meta)
|
2016-01-24 13:38:07 +00:00
|
|
|
logger.debug('%s extract icons %s', id, path)
|
2014-05-21 00:02:21 +00:00
|
|
|
item.update_icons()
|
2015-03-07 16:24:07 +00:00
|
|
|
item.modified = datetime.utcnow()
|
2016-01-24 13:38:07 +00:00
|
|
|
logger.debug('%s save item', id)
|
2015-03-07 16:24:07 +00:00
|
|
|
item.update()
|
2016-01-24 13:38:07 +00:00
|
|
|
logger.debug('%s added', id)
|
2014-05-18 23:24:04 +00:00
|
|
|
return file
|
|
|
|
|
2014-05-04 17:26:43 +00:00
|
|
|
def run_scan():
|
|
|
|
remove_missing()
|
2015-12-24 15:11:47 +00:00
|
|
|
prefs = settings.preferences
|
2016-01-04 09:49:14 +00:00
|
|
|
prefix = os.path.join(os.path.expanduser(prefs['libraryPath']), 'Books' + os.sep)
|
|
|
|
if not prefix[-1] == os.sep:
|
|
|
|
prefix += os.sep
|
2015-12-24 15:11:47 +00:00
|
|
|
assert isinstance(prefix, str)
|
|
|
|
books = []
|
|
|
|
for root, folders, files in os.walk(prefix):
|
|
|
|
for f in files:
|
2015-11-18 00:27:53 +00:00
|
|
|
if not state.tasks.connected:
|
|
|
|
return
|
2015-12-24 15:11:47 +00:00
|
|
|
#if f.startswith('._') or f == '.DS_Store':
|
|
|
|
if f.startswith('.'):
|
|
|
|
continue
|
|
|
|
f = os.path.join(root, f)
|
|
|
|
ext = f.split('.')[-1]
|
2016-01-13 17:48:32 +00:00
|
|
|
if ext == 'kepub':
|
|
|
|
ext = 'epub'
|
2015-12-24 15:11:47 +00:00
|
|
|
if ext in extensions:
|
|
|
|
books.append(f)
|
|
|
|
|
|
|
|
position = 0
|
|
|
|
added = 0
|
|
|
|
for f in ox.sorted_strings(books):
|
|
|
|
if not state.tasks.connected:
|
|
|
|
return
|
|
|
|
position += 1
|
|
|
|
with db.session():
|
2014-05-04 17:26:43 +00:00
|
|
|
id = media.get_id(f)
|
|
|
|
file = File.get(id)
|
|
|
|
if not file:
|
2014-05-27 14:08:14 +00:00
|
|
|
file = add_file(id, f, prefix, f)
|
2014-05-04 17:26:43 +00:00
|
|
|
added += 1
|
2014-05-17 00:14:15 +00:00
|
|
|
trigger_event('change', {})
|
2014-05-04 17:26:43 +00:00
|
|
|
|
2014-05-16 14:30:16 +00:00
|
|
|
def run_import(options=None):
|
|
|
|
options = options or {}
|
2015-12-24 15:11:47 +00:00
|
|
|
logger.debug('run_import')
|
2016-01-23 12:37:25 +00:00
|
|
|
if state.activity.get('cancel'):
|
|
|
|
logger.debug('import canceled')
|
|
|
|
state.activity = {}
|
|
|
|
return
|
|
|
|
state.activity = {}
|
2015-12-24 15:11:47 +00:00
|
|
|
prefs = settings.preferences
|
|
|
|
prefix = os.path.expanduser(options.get('path', prefs['importPath']))
|
|
|
|
if os.path.islink(prefix):
|
|
|
|
prefix = os.path.realpath(prefix)
|
2016-01-04 09:49:14 +00:00
|
|
|
if not prefix[-1] == os.sep:
|
|
|
|
prefix += os.sep
|
|
|
|
prefix_books = os.path.join(os.path.expanduser(prefs['libraryPath']), 'Books' + os.sep)
|
2016-01-20 07:12:45 +00:00
|
|
|
prefix_imported = os.path.join(prefix_books, '.import' + os.sep)
|
2015-12-24 15:11:47 +00:00
|
|
|
if prefix_books.startswith(prefix) or prefix.startswith(prefix_books):
|
|
|
|
error = 'invalid path'
|
|
|
|
elif not os.path.exists(prefix):
|
|
|
|
error = 'path not found'
|
|
|
|
elif not os.path.isdir(prefix):
|
|
|
|
error = 'path must be a folder'
|
|
|
|
else:
|
|
|
|
error = None
|
|
|
|
if error:
|
|
|
|
trigger_event('activity', {
|
2014-05-17 00:14:15 +00:00
|
|
|
'activity': 'import',
|
2015-12-24 15:11:47 +00:00
|
|
|
'progress': [0, 0],
|
|
|
|
'status': {'code': 404, 'text': error}
|
|
|
|
})
|
|
|
|
state.activity = {}
|
|
|
|
return
|
|
|
|
listname = options.get('list')
|
|
|
|
if listname:
|
|
|
|
listitems = []
|
|
|
|
assert isinstance(prefix, str)
|
|
|
|
books = []
|
|
|
|
count = 0
|
|
|
|
for root, folders, files in os.walk(prefix):
|
|
|
|
for f in files:
|
|
|
|
if not state.tasks.connected:
|
|
|
|
return
|
|
|
|
#if f.startswith('._') or f == '.DS_Store':
|
|
|
|
if f.startswith('.'):
|
2014-05-17 00:14:15 +00:00
|
|
|
continue
|
2015-12-24 15:11:47 +00:00
|
|
|
f = os.path.join(root, f)
|
|
|
|
ext = f.split('.')[-1]
|
|
|
|
if ext in extensions:
|
|
|
|
books.append(f)
|
|
|
|
count += 1
|
|
|
|
if state.activity.get('cancel'):
|
2016-01-23 12:37:25 +00:00
|
|
|
logger.debug('active import canceled')
|
2015-12-24 15:11:47 +00:00
|
|
|
state.activity = {}
|
|
|
|
return
|
2016-01-24 13:38:07 +00:00
|
|
|
if count % 100 == 0:
|
2015-12-24 15:11:47 +00:00
|
|
|
state.activity = {
|
|
|
|
'activity': 'import',
|
|
|
|
'path': prefix,
|
|
|
|
'progress': [0, count],
|
|
|
|
}
|
|
|
|
trigger_event('activity', state.activity)
|
|
|
|
state.activity = {
|
|
|
|
'activity': 'import',
|
|
|
|
'path': prefix,
|
|
|
|
'progress': [0, len(books)],
|
|
|
|
}
|
|
|
|
trigger_event('activity', state.activity)
|
|
|
|
position = 0
|
|
|
|
added = 0
|
|
|
|
last = 0
|
|
|
|
for f in ox.sorted_strings(books):
|
|
|
|
position += 1
|
|
|
|
if not os.path.exists(f):
|
|
|
|
continue
|
|
|
|
with db.session():
|
2014-05-04 17:26:43 +00:00
|
|
|
id = media.get_id(f)
|
|
|
|
file = File.get(id)
|
2016-01-18 12:02:42 +00:00
|
|
|
f_import = f
|
2014-05-04 17:26:43 +00:00
|
|
|
if not file:
|
|
|
|
f = f.replace(prefix, prefix_imported)
|
|
|
|
ox.makedirs(os.path.dirname(f))
|
2014-05-16 14:30:16 +00:00
|
|
|
if options.get('mode') == 'move':
|
2016-01-18 12:02:42 +00:00
|
|
|
try:
|
|
|
|
shutil.move(f_import, f)
|
|
|
|
except:
|
|
|
|
shutil.copy2(f_import, f)
|
2014-05-16 14:30:16 +00:00
|
|
|
else:
|
2016-01-18 12:02:42 +00:00
|
|
|
shutil.copy2(f_import, f)
|
2014-05-27 14:08:14 +00:00
|
|
|
file = add_file(id, f, prefix_books, f_import)
|
2014-05-17 11:45:57 +00:00
|
|
|
file.move()
|
2014-05-04 17:26:43 +00:00
|
|
|
added += 1
|
2016-01-18 12:02:42 +00:00
|
|
|
elif options.get('mode') == 'move':
|
|
|
|
os.unlink(f_import)
|
|
|
|
if listname:
|
|
|
|
listitems.append(file.item.id)
|
2015-12-24 15:11:47 +00:00
|
|
|
if time.time() - last > 5:
|
|
|
|
last = time.time()
|
|
|
|
state.activity = {
|
|
|
|
'activity': 'import',
|
|
|
|
'progress': [position, len(books)],
|
|
|
|
'path': prefix,
|
|
|
|
'added': added,
|
|
|
|
}
|
|
|
|
trigger_event('activity', state.activity)
|
2014-05-26 10:41:01 +00:00
|
|
|
|
2015-12-24 15:11:47 +00:00
|
|
|
if state.activity.get('cancel'):
|
|
|
|
state.activity = {}
|
|
|
|
return
|
|
|
|
with db.session():
|
2014-05-18 23:24:04 +00:00
|
|
|
if listname and listitems:
|
|
|
|
l = List.get(settings.USER_ID, listname)
|
|
|
|
if l:
|
|
|
|
l.add_items(listitems)
|
2015-12-24 15:11:47 +00:00
|
|
|
trigger_event('activity', {
|
|
|
|
'activity': 'import',
|
|
|
|
'progress': [position, len(books)],
|
|
|
|
'path': prefix,
|
|
|
|
'status': {'code': 200, 'text': ''},
|
|
|
|
'added': added,
|
|
|
|
})
|
|
|
|
state.activity = {}
|
|
|
|
remove_empty_folders(prefix_books)
|
|
|
|
if options.get('mode') == 'move':
|
2016-01-18 12:02:42 +00:00
|
|
|
remove_empty_folders(prefix, True)
|
|
|
|
|
|
|
|
def import_folder():
|
2016-01-25 06:48:24 +00:00
|
|
|
if not (state.activity and state.activity.get('activity') == 'import'):
|
|
|
|
import_path = settings.preferences['importPath']
|
|
|
|
logger.debug('scan importPath %s', import_path)
|
|
|
|
if os.path.exists(import_path):
|
|
|
|
run_import({
|
|
|
|
'path': import_path,
|
|
|
|
'mode': 'move'
|
|
|
|
})
|
|
|
|
remove_empty_folders(import_path, True)
|
2016-01-18 12:02:42 +00:00
|
|
|
if state.main:
|
|
|
|
state.main.call_later(10*60, lambda: state.tasks.queue('scanimport'))
|