lots of stuff
This commit is contained in:
parent
c0cab079bc
commit
feddea0ccd
24 changed files with 1385 additions and 226 deletions
|
@ -31,6 +31,9 @@ class Changelog(db.Model):
|
|||
editcontact string
|
||||
addpeer peerid peername
|
||||
removepeer peerid peername
|
||||
|
||||
editmeta key, value data (i.e. 'isbn', '0000000000', {title: 'Example'})
|
||||
resetmeta key, value
|
||||
'''
|
||||
id = db.Column(db.Integer(), primary_key=True)
|
||||
|
||||
|
@ -164,13 +167,16 @@ class Changelog(db.Model):
|
|||
keys = filter(lambda k: k in Item.id_keys, meta.keys())
|
||||
if keys:
|
||||
key = keys[0]
|
||||
if not meta[key] and i.meta.get('mainid') == key:
|
||||
logger.debug('remove id mapping %s currently %s', key, meta[key], i.meta[key])
|
||||
i.update_mainid(key, meta[key])
|
||||
elif meta[key] and (i.meta.get('mainid') != key or meta[key] != i.meta.get(key)):
|
||||
logger.debug('new mapping %s %s currently %s %s', key, meta[key], i.meta.get('mainid'), i.meta.get(i.meta.get('mainid')))
|
||||
i.update_mainid(key, meta[key])
|
||||
primary = [key, meta[key]]
|
||||
if not meta[key] and i.meta.get('primaryid', [''])[0] == key:
|
||||
logger.debug('remove id mapping %s %s', i.id, primary)
|
||||
i.update_primaryid(*primary)
|
||||
elif meta[key] and i.meta.get('primaryid') != primary:
|
||||
logger.debug('edit mapping %s %s', i.id, primary)
|
||||
i.update_primaryid(*primary)
|
||||
else:
|
||||
if 'primaryid' in i.meta:
|
||||
return True
|
||||
i.update_meta(meta)
|
||||
i.modified = ts2datetime(timestamp)
|
||||
i.save()
|
||||
|
@ -261,3 +267,20 @@ class Changelog(db.Model):
|
|||
user.save()
|
||||
#fixme, remove from User table if no other connection exists
|
||||
return True
|
||||
|
||||
def action_editmeta(self, user, timestamp, key, value, data):
|
||||
from item.models import Metadata
|
||||
m = Metadata.get(key, value)
|
||||
if not m or m.timestamp < timestamp:
|
||||
if not m:
|
||||
m = Metadata.get_or_create(key, value)
|
||||
if m.edit(data):
|
||||
m.update_items()
|
||||
return True
|
||||
|
||||
def action_resetmeta(self, user, timestamp, key, value):
|
||||
from item.models import Metadata
|
||||
m = Metadata.get(key, value)
|
||||
if m and m.timestamp < timestamp:
|
||||
m.reset()
|
||||
return True
|
||||
|
|
|
@ -81,7 +81,24 @@ class PostUpdate(Command):
|
|||
]
|
||||
|
||||
def run(selfi, old, new):
|
||||
pass
|
||||
if old <= '20140506-2-796c77b' and new > '20140506-2-796c77b':
|
||||
print 'migrate database content'
|
||||
import item.models
|
||||
for i in item.models.Item.query:
|
||||
if 'mainid' in i.meta:
|
||||
mainid = i.meta.pop('mainid')
|
||||
pid = {'isbn10': 'isbn', 'isbn13': 'isbn'}.get(mainid, mainid)
|
||||
i.meta['primaryid'] = [pid, i.meta[mainid]]
|
||||
isbns = i.meta.get('isbn', [])
|
||||
for key in ('isbn10', 'isbn13'):
|
||||
if key in i.meta:
|
||||
isbns.append(i.meta.pop(key))
|
||||
if isbns:
|
||||
i.meta['isbn'] = isbns
|
||||
for key in ('asin', 'lccn', 'olid', 'oclc'):
|
||||
if key in i.meta and isinstance(i.meta[key], basestring):
|
||||
i.meta[key] = [i.meta[key]]
|
||||
i.update()
|
||||
|
||||
class Setup(Command):
|
||||
"""
|
||||
|
|
|
@ -38,7 +38,7 @@ class Downloads(Thread):
|
|||
while self._running:
|
||||
if state.online:
|
||||
self.download_next()
|
||||
time.sleep(10)
|
||||
time.sleep(0.5)
|
||||
else:
|
||||
time.sleep(20)
|
||||
|
||||
|
|
|
@ -113,24 +113,21 @@ def edit(data):
|
|||
setting identifier or base metadata is possible not both at the same time
|
||||
'''
|
||||
response = {}
|
||||
logger.debug('edit %s', data)
|
||||
item = models.Item.get(data['id'])
|
||||
keys = filter(lambda k: k in models.Item.id_keys, data.keys())
|
||||
logger.debug('edit of %s id keys: %s', item, keys)
|
||||
if item and item.json()['mediastate'] == 'available':
|
||||
if keys:
|
||||
key = keys[0]
|
||||
logger.debug('update mainid %s %s', key, data[key])
|
||||
if key in ('isbn10', 'isbn13'):
|
||||
data[key] = utils.normalize_isbn(data[key])
|
||||
item.update_mainid(key, data[key])
|
||||
response = item.json()
|
||||
elif not item.meta.get('mainid'):
|
||||
logger.debug('setting chustom metadata %s', data)
|
||||
item.update_meta(data)
|
||||
if 'primaryid' in data:
|
||||
if data['primaryid']:
|
||||
key, value = data['primaryid']
|
||||
logger.debug('update primaryid %s %s', key, value)
|
||||
if key == 'isbn':
|
||||
value = utils.normalize_isbn(value)
|
||||
item.update_primaryid(key, value)
|
||||
else:
|
||||
item.update_primaryid()
|
||||
response = item.json()
|
||||
else:
|
||||
logger.debug('invalid metadata %s', data)
|
||||
item.edit_metadata(data)
|
||||
response = item.json()
|
||||
else:
|
||||
logger.info('can only edit available items')
|
||||
return response
|
||||
|
@ -154,10 +151,7 @@ actions.register(remove, cache=False)
|
|||
def findMetadata(data):
|
||||
'''
|
||||
takes {
|
||||
title: string,
|
||||
author: [string],
|
||||
publisher: string,
|
||||
date: string
|
||||
query: string,
|
||||
}
|
||||
returns {
|
||||
items: [{
|
||||
|
@ -168,28 +162,42 @@ def findMetadata(data):
|
|||
'''
|
||||
response = {}
|
||||
logger.debug('findMetadata %s', data)
|
||||
response['items'] = meta.find(**data)
|
||||
response['items'] = meta.find(data['query'])
|
||||
return response
|
||||
actions.register(findMetadata)
|
||||
|
||||
|
||||
def getMetadata(data):
|
||||
'''
|
||||
takes {
|
||||
key: value
|
||||
includeEdits: boolean
|
||||
}
|
||||
key can be one of the supported identifiers: isbn10, isbn13, oclc, olid,...
|
||||
'''
|
||||
logger.debug('getMetadata %s', data)
|
||||
if 'includeEdits' in data:
|
||||
include_edits = data.pop('includeEdits')
|
||||
else:
|
||||
include_edits = False
|
||||
key, value = data.iteritems().next()
|
||||
if key in ('isbn10', 'isbn13'):
|
||||
if key == 'isbn':
|
||||
value = utils.normalize_isbn(value)
|
||||
response = meta.lookup(key, value)
|
||||
if include_edits:
|
||||
response.update(models.Metadata.load(key, value))
|
||||
if response:
|
||||
response['mainid'] = key
|
||||
response['primaryid'] = [key, value]
|
||||
return response
|
||||
actions.register(getMetadata)
|
||||
|
||||
def resetMetadata(data):
|
||||
item = models.Item.get(data['id'])
|
||||
if item and 'primaryid' in item.meta:
|
||||
meta = models.Metadata.get(*item.meta['primaryid'])
|
||||
if meta:
|
||||
meta.reset()
|
||||
return {}
|
||||
actions.register(resetMetadata)
|
||||
|
||||
def download(data):
|
||||
'''
|
||||
|
|
|
@ -16,9 +16,12 @@ from oxtornado import run_async
|
|||
from utils import resize_image
|
||||
|
||||
|
||||
from settings import covers_db_path
|
||||
from settings import icons_db_path
|
||||
|
||||
class Covers(dict):
|
||||
import logging
|
||||
logger = logging.getLogger('oml.item.icons')
|
||||
|
||||
class Icons(dict):
|
||||
def __init__(self, db):
|
||||
self._db = db
|
||||
self.create()
|
||||
|
@ -30,7 +33,7 @@ class Covers(dict):
|
|||
def create(self):
|
||||
conn = self.connect()
|
||||
c = conn.cursor()
|
||||
c.execute(u'CREATE TABLE IF NOT EXISTS cover (id varchar(64) unique, data blob)')
|
||||
c.execute(u'CREATE TABLE IF NOT EXISTS icon (id varchar(64) unique, data blob)')
|
||||
c.execute(u'CREATE TABLE IF NOT EXISTS setting (key varchar(256) unique, value text)')
|
||||
if int(self.get_setting(c, 'version', 0)) < 1:
|
||||
self.set_setting(c, 'version', 1)
|
||||
|
@ -53,7 +56,7 @@ class Covers(dict):
|
|||
return data
|
||||
|
||||
def __getitem__(self, id, default=None):
|
||||
sql = u'SELECT data FROM cover WHERE id=?'
|
||||
sql = u'SELECT data FROM icon WHERE id=?'
|
||||
conn = self.connect()
|
||||
c = conn.cursor()
|
||||
c.execute(sql, (id, ))
|
||||
|
@ -66,7 +69,7 @@ class Covers(dict):
|
|||
return data
|
||||
|
||||
def __setitem__(self, id, data):
|
||||
sql = u'INSERT OR REPLACE INTO cover values (?, ?)'
|
||||
sql = u'INSERT OR REPLACE INTO icon values (?, ?)'
|
||||
conn = self.connect()
|
||||
c = conn.cursor()
|
||||
data = sqlite3.Binary(data)
|
||||
|
@ -76,7 +79,7 @@ class Covers(dict):
|
|||
conn.close()
|
||||
|
||||
def __delitem__(self, id):
|
||||
sql = u'DELETE FROM cover WHERE id = ?'
|
||||
sql = u'DELETE FROM icon WHERE id = ?'
|
||||
conn = self.connect()
|
||||
c = conn.cursor()
|
||||
c.execute(sql, (id, ))
|
||||
|
@ -84,51 +87,64 @@ class Covers(dict):
|
|||
c.close()
|
||||
conn.close()
|
||||
|
||||
covers = Covers(covers_db_path)
|
||||
icons = Icons(icons_db_path)
|
||||
|
||||
@run_async
|
||||
def get_cover(app, id, size, callback):
|
||||
def get_icon(app, id, type_, size, callback):
|
||||
with app.app_context():
|
||||
from item.models import Item
|
||||
item = Item.get(id)
|
||||
if not item:
|
||||
callback('')
|
||||
else:
|
||||
if type_ == 'cover' and not item.meta.get('cover'):
|
||||
type_ = 'preview'
|
||||
if type_ == 'preview' and not item.files.count():
|
||||
type_ = 'cover'
|
||||
if size:
|
||||
skey = '%s:%s:%s' % (type_, id, size)
|
||||
key = '%s:%s' % (type_, id)
|
||||
data = None
|
||||
if size:
|
||||
data = covers['%s:%s' % (id, size)]
|
||||
data = icons[skey]
|
||||
if data:
|
||||
size = None
|
||||
if not data:
|
||||
data = covers[id]
|
||||
data = icons[key]
|
||||
if not data:
|
||||
data = item.update_cover()
|
||||
if not data:
|
||||
data = covers.black()
|
||||
data = icons.black()
|
||||
size = None
|
||||
if size:
|
||||
data = covers['%s:%s' % (id, size)] = resize_image(data, size=size)
|
||||
data = str(data)
|
||||
if not 'coverRatio' in item.info:
|
||||
img = Image.open(StringIO(data))
|
||||
item.info['coverRatio'] = img.size[0]/img.size[1]
|
||||
item.save()
|
||||
data = data or ''
|
||||
data = icons[skey] = resize_image(data, size=size)
|
||||
data = str(data) or ''
|
||||
callback(data)
|
||||
|
||||
class CoverHandler(tornado.web.RequestHandler):
|
||||
class IconHandler(tornado.web.RequestHandler):
|
||||
|
||||
def initialize(self, app):
|
||||
self._app = app
|
||||
|
||||
@tornado.web.asynchronous
|
||||
@tornado.gen.coroutine
|
||||
def get(self, id, size=None):
|
||||
size = int(size) if size else None
|
||||
response = yield tornado.gen.Task(get_cover, self._app, id, size)
|
||||
if not response:
|
||||
def get(self, id, type_, size=None):
|
||||
def fail():
|
||||
self.set_status(404)
|
||||
self.write('')
|
||||
else:
|
||||
self.set_header('Content-Type', 'image/jpeg')
|
||||
self.write(response)
|
||||
self.finish()
|
||||
|
||||
size = int(size) if size else None
|
||||
|
||||
if type_ not in ('cover', 'preview'):
|
||||
fail()
|
||||
return
|
||||
|
||||
self.set_header('Content-Type', 'image/jpeg')
|
||||
|
||||
response = yield tornado.gen.Task(get_icon, self._app, id, type_, size)
|
||||
if not response:
|
||||
fail()
|
||||
return
|
||||
if self._finished:
|
||||
return
|
||||
self.write(response)
|
||||
self.finish()
|
|
@ -28,7 +28,7 @@ import utils
|
|||
|
||||
from oxflask.db import MutableDict
|
||||
|
||||
from covers import covers
|
||||
from icons import icons
|
||||
from changelog import Changelog
|
||||
from websocket import trigger_event
|
||||
from utils import remove_empty_folders
|
||||
|
@ -105,7 +105,7 @@ class Item(db.Model):
|
|||
|
||||
@property
|
||||
def timestamp(self):
|
||||
return self.modified.strftime('%s')
|
||||
return utils.datetime2ts(self.modified)
|
||||
|
||||
def __repr__(self):
|
||||
return self.id
|
||||
|
@ -155,7 +155,7 @@ class Item(db.Model):
|
|||
if self.meta:
|
||||
j.update(self.meta)
|
||||
|
||||
for key in self.id_keys + ['mainid']:
|
||||
for key in self.id_keys + ['primaryid']:
|
||||
if key not in self.meta and key in j:
|
||||
del j[key]
|
||||
'''
|
||||
|
@ -213,7 +213,7 @@ class Item(db.Model):
|
|||
db.session.add(f)
|
||||
|
||||
for key in config['itemKeys']:
|
||||
if key.get('find') or key.get('filter'):
|
||||
if key.get('find') or key.get('filter') or key.get('type') in [['string'], 'string']:
|
||||
value = self.json().get(key['id'], None)
|
||||
if key.get('filterMap') and value:
|
||||
value = re.compile(key.get('filterMap')).findall(value)
|
||||
|
@ -248,7 +248,7 @@ class Item(db.Model):
|
|||
db.session.add(f)
|
||||
|
||||
def update(self):
|
||||
for key in ('mediastate', 'coverRatio'):
|
||||
for key in ('mediastate', 'coverRatio', 'previewRatio'):
|
||||
if key in self.meta:
|
||||
if key not in self.info:
|
||||
self.info[key] = self.meta[key]
|
||||
|
@ -259,6 +259,9 @@ class Item(db.Model):
|
|||
self.info['mediastate'] = 'transferring'
|
||||
else:
|
||||
self.info['mediastate'] = 'available' if settings.USER_ID in users else 'unavailable'
|
||||
#fixme: also load metadata for other ids?
|
||||
if 'primaryid' in self.meta:
|
||||
self.meta.update(Metadata.load(*self.meta['primaryid']))
|
||||
self.update_sort()
|
||||
self.update_find()
|
||||
self.update_lists()
|
||||
|
@ -269,86 +272,123 @@ class Item(db.Model):
|
|||
db.session.add(self)
|
||||
db.session.commit()
|
||||
|
||||
meta_keys = ('title', 'author', 'date', 'publisher', 'edition', 'language')
|
||||
|
||||
def update_meta(self, data):
|
||||
if data != self.meta:
|
||||
self.meta = data
|
||||
update = False
|
||||
record = {}
|
||||
for key in self.meta_keys:
|
||||
if key in data:
|
||||
self.meta[key] = data[key]
|
||||
record[key] = data[key]
|
||||
update = True
|
||||
for key in self.meta.keys():
|
||||
if key not in self.meta_keys:
|
||||
del self.meta[key]
|
||||
update = True
|
||||
if update:
|
||||
self.update()
|
||||
self.modified = datetime.utcnow()
|
||||
self.save()
|
||||
user = state.user()
|
||||
if user in self.users:
|
||||
Changelog.record(user, 'edititem', self.id, data)
|
||||
Changelog.record(user, 'edititem', self.id, record)
|
||||
|
||||
def update_mainid(self, key, id):
|
||||
def update_primaryid(self, key=None, id=None):
|
||||
if key is None and id is None:
|
||||
if 'primaryid' not in self.meta:
|
||||
return
|
||||
else:
|
||||
key = self.meta['primaryid'][0]
|
||||
record = {}
|
||||
if id:
|
||||
self.meta[key] = id
|
||||
self.meta['mainid'] = key
|
||||
self.meta['primaryid'] = [key, id]
|
||||
record[key] = id
|
||||
else:
|
||||
if key in self.meta:
|
||||
del self.meta[key]
|
||||
if 'mainid' in self.meta:
|
||||
del self.meta['mainid']
|
||||
if 'primaryid' in self.meta:
|
||||
del self.meta['primaryid']
|
||||
record[key] = ''
|
||||
for k in self.id_keys:
|
||||
if k != key:
|
||||
if k in self.meta:
|
||||
del self.meta[k]
|
||||
logger.debug('mainid %s %s', 'mainid' in self.meta, self.meta.get('mainid'))
|
||||
logger.debug('key %s %s', key, self.meta.get(key))
|
||||
logger.debug('set primaryid %s %s', key, id)
|
||||
|
||||
# get metadata from external resources
|
||||
self.scrape()
|
||||
self.update()
|
||||
self.update_cover()
|
||||
self.update_icons()
|
||||
self.modified = datetime.utcnow()
|
||||
self.save()
|
||||
user = state.user()
|
||||
if user in self.users:
|
||||
Changelog.record(user, 'edititem', self.id, record)
|
||||
|
||||
def extract_cover(self):
|
||||
def edit_metadata(self, data):
|
||||
if 'primaryid' in self.meta:
|
||||
m = Metadata.get_or_create(*self.meta['primaryid'])
|
||||
m.edit(data)
|
||||
m.update_items()
|
||||
else:
|
||||
self.update_meta(data)
|
||||
|
||||
def extract_preview(self):
|
||||
path = self.get_path()
|
||||
if path:
|
||||
return getattr(media, self.info['extension']).cover(path)
|
||||
|
||||
def update_cover(self):
|
||||
def update_icons(self):
|
||||
def get_ratio(data):
|
||||
img = Image.open(StringIO(data))
|
||||
return img.size[0]/img.size[1]
|
||||
key = 'cover:%s'%self.id
|
||||
cover = None
|
||||
if 'cover' in self.meta and self.meta['cover']:
|
||||
cover = ox.cache.read_url(self.meta['cover'])
|
||||
#covers[self.id] = requests.get(self.meta['cover']).content
|
||||
if cover:
|
||||
covers[self.id] = cover
|
||||
icons[key] = cover
|
||||
self.info['coverRatio'] = get_ratio(cover)
|
||||
else:
|
||||
if covers[self.id]:
|
||||
del covers[self.id]
|
||||
if icons[key]:
|
||||
del icons[key]
|
||||
path = self.get_path()
|
||||
if not cover and path:
|
||||
cover = self.extract_cover()
|
||||
if cover:
|
||||
covers[self.id] = cover
|
||||
if cover:
|
||||
img = Image.open(StringIO(cover))
|
||||
self.info['coverRatio'] = img.size[0]/img.size[1]
|
||||
for p in (':128', ':256', ':512'):
|
||||
del covers['%s%s' % (self.id, p)]
|
||||
return cover
|
||||
key = 'preview:%s'%self.id
|
||||
if path:
|
||||
preview = self.extract_preview()
|
||||
if preview:
|
||||
icons[key] = preview
|
||||
self.info['previewRatio'] = get_ratio(preview)
|
||||
if not cover:
|
||||
self.info['coverRatio'] = self.info['previewRatio']
|
||||
elif cover:
|
||||
self.info['previewRatio'] = self.info['coverRatio']
|
||||
for key in ('cover', 'preview'):
|
||||
key = '%s:%s' % (key, self.id)
|
||||
for resolution in (128, 256, 512):
|
||||
del icons['%s:%s' % (key, resolution)]
|
||||
|
||||
def scrape(self):
|
||||
mainid = self.meta.get('mainid')
|
||||
logger.debug('scrape %s %s', mainid, self.meta.get(mainid))
|
||||
if mainid:
|
||||
m = meta.lookup(mainid, self.meta[mainid])
|
||||
self.meta.update(m)
|
||||
primaryid = self.meta.get('primaryid')
|
||||
logger.debug('scrape %s', primaryid)
|
||||
if primaryid:
|
||||
m = meta.lookup(*primaryid)
|
||||
m['primaryid'] = primaryid
|
||||
self.meta = m
|
||||
self.update()
|
||||
|
||||
def queue_download(self):
|
||||
u = state.user()
|
||||
if not u in self.users:
|
||||
logger.debug('queue %s for download', self.id)
|
||||
self.transferprogress = 0
|
||||
self.transferadded = datetime.utcnow()
|
||||
self.users.append(u)
|
||||
else:
|
||||
logger.debug('%s already queued for download? %s %s', self.id, self.transferprogress, self.transferadded)
|
||||
|
||||
def save_file(self, content):
|
||||
u = state.user()
|
||||
|
@ -372,7 +412,7 @@ class Item(db.Model):
|
|||
Changelog.record(u, 'additem', self.id, self.info)
|
||||
self.update()
|
||||
f.move()
|
||||
self.update_cover()
|
||||
self.update_icons()
|
||||
trigger_event('transfer', {
|
||||
'id': self.id, 'progress': 1
|
||||
})
|
||||
|
@ -416,7 +456,7 @@ for key in config['itemKeys']:
|
|||
col = db.Column(db.String(1000), index=True)
|
||||
setattr(Item, 'sort_%s' % key['id'], col)
|
||||
|
||||
Item.id_keys = ['isbn10', 'isbn13', 'lccn', 'olid', 'oclc', 'asin']
|
||||
Item.id_keys = ['isbn', 'lccn', 'olid', 'oclc', 'asin']
|
||||
Item.item_keys = config['itemKeys']
|
||||
Item.filter_keys = [k['id'] for k in config['itemKeys'] if k.get('filter')]
|
||||
|
||||
|
@ -529,3 +569,71 @@ class File(db.Model):
|
|||
def save(self):
|
||||
db.session.add(self)
|
||||
db.session.commit()
|
||||
|
||||
class Metadata(db.Model):
|
||||
|
||||
created = db.Column(db.DateTime())
|
||||
modified = db.Column(db.DateTime())
|
||||
|
||||
id = db.Column(db.Integer(), primary_key=True)
|
||||
|
||||
key = db.Column(db.String(256))
|
||||
value = db.Column(db.String(256))
|
||||
|
||||
data = db.Column(MutableDict.as_mutable(db.PickleType(pickler=json)))
|
||||
|
||||
def __repr__(self):
|
||||
return '='.join([self.key, self.value])
|
||||
|
||||
@property
|
||||
def timestamp(self):
|
||||
return utils.datetime2ts(self.modified)
|
||||
|
||||
@classmethod
|
||||
def get(cls, key, value):
|
||||
return cls.query.filter_by(key=key, value=value).first()
|
||||
|
||||
@classmethod
|
||||
def get_or_create(cls, key, value):
|
||||
m = cls.get(key, value)
|
||||
if not m:
|
||||
m = cls(key=key, value=value)
|
||||
m.created = datetime.utcnow()
|
||||
m.data = {}
|
||||
m.save()
|
||||
return m
|
||||
|
||||
def save(self):
|
||||
self.modified = datetime.utcnow()
|
||||
db.session.add(self)
|
||||
db.session.commit()
|
||||
|
||||
def reset(self):
|
||||
user = state.user()
|
||||
Changelog.record(user, 'resetmeta', self.key, self.value)
|
||||
db.session.delete(self)
|
||||
db.session.commit()
|
||||
self.update_items()
|
||||
|
||||
def edit(self, data):
|
||||
changed = {}
|
||||
for key in data:
|
||||
if key not in data or data[key] != self.data.get(key):
|
||||
self.data[key] = data[key]
|
||||
changed[key] = data[key]
|
||||
if changed:
|
||||
self.save()
|
||||
user = state.user()
|
||||
Changelog.record(user, 'editmeta', self.key, self.value, changed)
|
||||
return changed
|
||||
|
||||
def update_items(self):
|
||||
for f in Find.query.filter_by(key=self.key, value=self.value):
|
||||
f.item.scrape()
|
||||
|
||||
@classmethod
|
||||
def load(self, key, value):
|
||||
m = self.get(key, value)
|
||||
if m:
|
||||
return m.data
|
||||
return {}
|
||||
|
|
|
@ -46,22 +46,19 @@ def add_file(id, f, prefix):
|
|||
data = media.metadata(f)
|
||||
file = File.get_or_create(id, data, path)
|
||||
item = file.item
|
||||
if 'mainid' in file.info:
|
||||
del file.info['mainid']
|
||||
if 'primaryid' in file.info:
|
||||
del file.info['primaryid']
|
||||
db.session.add(file)
|
||||
if 'mainid' in item.info:
|
||||
item.meta['mainid'] = item.info.pop('mainid')
|
||||
item.meta[item.meta['mainid']] = item.info[item.meta['mainid']]
|
||||
if 'primaryid' in item.info:
|
||||
item.meta['primaryid'] = item.info.pop('primaryid')
|
||||
db.session.add(item)
|
||||
item.users.append(user)
|
||||
Changelog.record(user, 'additem', item.id, item.info)
|
||||
if item.meta.get('mainid'):
|
||||
Changelog.record(user, 'edititem', item.id, {
|
||||
item.meta['mainid']: item.meta[item.meta['mainid']]
|
||||
})
|
||||
if item.meta.get('primaryid'):
|
||||
Changelog.record(user, 'edititem', item.id, dict([item.meta['primaryid']]))
|
||||
item.added = datetime.utcnow()
|
||||
item.scrape()
|
||||
item.update_cover()
|
||||
item.update_icons()
|
||||
item.save()
|
||||
return file
|
||||
|
||||
|
@ -168,10 +165,6 @@ def run_import(options=None):
|
|||
added += 1
|
||||
if state.activity.get('cancel'):
|
||||
state.activity = {}
|
||||
trigger_event('activity', {
|
||||
'activity': 'import',
|
||||
'status': {'code': 200, 'text': 'canceled'}
|
||||
})
|
||||
return
|
||||
state.activity = {
|
||||
'activity': 'import',
|
||||
|
|
|
@ -46,13 +46,7 @@ def metadata(f):
|
|||
data[key] = info[key]
|
||||
|
||||
if 'isbn' in data:
|
||||
value = data.pop('isbn')
|
||||
if len(value) == 10:
|
||||
data['isbn10'] = value
|
||||
data['mainid'] = 'isbn10'
|
||||
else:
|
||||
data['isbn13'] = value
|
||||
data['mainid'] = 'isbn13'
|
||||
data['primaryid'] = ['isbn', data['isbn'][0]]
|
||||
if not 'title' in data:
|
||||
data['title'] = os.path.splitext(os.path.basename(f))[0]
|
||||
if 'author' in data and isinstance(data['author'], basestring):
|
||||
|
|
|
@ -21,7 +21,7 @@ def cover(path):
|
|||
z = zipfile.ZipFile(path)
|
||||
data = None
|
||||
for f in z.filelist:
|
||||
if 'cover' in f.filename and f.filename.split('.')[-1] in ('jpg', 'jpeg', 'png'):
|
||||
if 'cover' in f.filename.lower() and f.filename.split('.')[-1] in ('jpg', 'jpeg', 'png'):
|
||||
logger.debug('using %s', f.filename)
|
||||
data = z.read(f.filename)
|
||||
break
|
||||
|
@ -31,7 +31,12 @@ def cover(path):
|
|||
info = ET.fromstring(z.read(opf[0]))
|
||||
manifest = info.findall('{http://www.idpf.org/2007/opf}manifest')[0]
|
||||
for e in manifest.getchildren():
|
||||
if 'html' in e.attrib['media-type']:
|
||||
if 'image' in e.attrib['media-type']:
|
||||
filename = e.attrib['href']
|
||||
filename = os.path.normpath(os.path.join(os.path.dirname(opf[0]), filename))
|
||||
data = z.read(filename)
|
||||
break
|
||||
elif 'html' in e.attrib['media-type']:
|
||||
filename = e.attrib['href']
|
||||
filename = os.path.normpath(os.path.join(os.path.dirname(opf[0]), filename))
|
||||
html = z.read(filename)
|
||||
|
@ -66,7 +71,7 @@ def info(epub):
|
|||
if key == 'identifier':
|
||||
value = normalize_isbn(value)
|
||||
if stdnum.isbn.is_valid(value):
|
||||
data['isbn'] = value
|
||||
data['isbn'] = [value]
|
||||
else:
|
||||
data[key] = e.text
|
||||
text = extract_text(epub)
|
||||
|
@ -74,7 +79,7 @@ def info(epub):
|
|||
if not 'isbn' in data:
|
||||
isbn = extract_isbn(text)
|
||||
if isbn:
|
||||
data['isbn'] = isbn
|
||||
data['isbn'] = [isbn]
|
||||
if 'date' in data and 'T' in data['date']:
|
||||
data['date'] = data['date'].split('T')[0]
|
||||
return data
|
||||
|
|
|
@ -99,7 +99,7 @@ def info(pdf):
|
|||
if 'identifier' in data:
|
||||
value = normalize_isbn(data['identifier'])
|
||||
if stdnum.isbn.is_valid(value):
|
||||
data['isbn'] = value
|
||||
data['isbn'] = [value]
|
||||
del data['identifier']
|
||||
'''
|
||||
cmd = ['pdfinfo', pdf]
|
||||
|
@ -120,7 +120,7 @@ def info(pdf):
|
|||
if not 'isbn' in data:
|
||||
isbn = extract_isbn(text)
|
||||
if isbn:
|
||||
data['isbn'] = isbn
|
||||
data['isbn'] = [isbn]
|
||||
return data
|
||||
|
||||
'''
|
||||
|
|
|
@ -23,7 +23,7 @@ def info(path):
|
|||
text = extract_text(path)
|
||||
isbn = extract_isbn(text)
|
||||
if isbn:
|
||||
data['isbn'] = isbn
|
||||
data['isbn'] = [isbn]
|
||||
data['textsize'] = len(text)
|
||||
return data
|
||||
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
from __future__ import division
|
||||
|
||||
import stdnum.isbn
|
||||
import ox
|
||||
|
||||
import abebooks
|
||||
import loc
|
||||
|
@ -21,27 +22,23 @@ providers = [
|
|||
('loc', 'lccn'),
|
||||
('worldcat', 'oclc'),
|
||||
('lookupbyisbn', 'asin'),
|
||||
('abebooks', 'isbn10')
|
||||
('abebooks', 'isbn')
|
||||
]
|
||||
|
||||
def find(**kargs):
|
||||
title = kargs.get('title')
|
||||
author = kargs.get('author')
|
||||
publisher = kargs.get('publisher')
|
||||
date = kargs.get('date')
|
||||
#results = google.find(title=title, author=author, publisher=publisher, date=date)
|
||||
results = duckduckgo.find(title=title, author=author, publisher=publisher, date=date)
|
||||
def find(query):
|
||||
#results = google.find(query)
|
||||
results = duckduckgo.find(query)
|
||||
'''
|
||||
results = openlibrary.find(title=title, author=author, publisher=publisher, date=date)
|
||||
results = openlibrary.find(query)
|
||||
for r in results:
|
||||
r['mainid'] = 'olid'
|
||||
r['primaryid'] = 'olid'
|
||||
'''
|
||||
return results
|
||||
|
||||
def lookup(key, value):
|
||||
if not isvalid_id(key, value):
|
||||
return {}
|
||||
data = {key: value}
|
||||
data = {key: [value]}
|
||||
ids = [(key, value)]
|
||||
provider_data = {}
|
||||
done = False
|
||||
|
@ -53,11 +50,17 @@ def lookup(key, value):
|
|||
if not kv in ids:
|
||||
ids.append(kv)
|
||||
done = False
|
||||
logger.debug('lookup %s=%s => %s', ids[0][0], ids[0][1], ids)
|
||||
logger.debug('FIXME: sort ids')
|
||||
ids.sort(key=lambda i: ox.sort_string(u''.join(i)))
|
||||
logger.debug('IDS %s', ids)
|
||||
for k, v in ids:
|
||||
for provider, id in providers:
|
||||
if id == k and provider not in provider_data:
|
||||
provider_data[provider] = globals()[provider].lookup(v)
|
||||
if id == k:
|
||||
if provider not in provider_data:
|
||||
provider_data[provider] = {}
|
||||
for k_, v_ in globals()[provider].lookup(v).iteritems():
|
||||
if k_ not in provider_data[provider]:
|
||||
provider_data[provider][k_] = v_
|
||||
for provider in sorted(
|
||||
provider_data.keys(),
|
||||
key=lambda x: -len(provider_data[x])
|
||||
|
@ -66,11 +69,16 @@ def lookup(key, value):
|
|||
for k_, v_ in provider_data[provider].iteritems():
|
||||
if not k_ in data:
|
||||
data[k_] = v_
|
||||
for k, v in ids:
|
||||
if k not in data:
|
||||
data[k] = []
|
||||
if v not in data[k]:
|
||||
data[k].append(v)
|
||||
return data
|
||||
|
||||
def isvalid_id(key, value):
|
||||
if key in ('isbn10', 'isbn13'):
|
||||
if 'isbn%d'%len(value) != key or not stdnum.isbn.is_valid(value):
|
||||
if key == 'isbn':
|
||||
if len(value) not in (10, 13) or not stdnum.isbn.is_valid(value):
|
||||
return False
|
||||
if key == 'asin' and len(value) != 10:
|
||||
return False
|
||||
|
|
|
@ -13,7 +13,7 @@ base = 'http://www.abebooks.com'
|
|||
|
||||
def get_ids(key, value):
|
||||
ids = []
|
||||
if key in ('isbn10', 'isbn13'):
|
||||
if key == 'isbn':
|
||||
url = '%s/servlet/SearchResults?isbn=%s&sts=t' % (base, id)
|
||||
data = read_url(url)
|
||||
urls = re.compile('href="(/servlet/BookDetailsPL[^"]+)"').findall(data)
|
||||
|
|
960
oml/meta/dewey.py
Normal file
960
oml/meta/dewey.py
Normal file
|
@ -0,0 +1,960 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# vi:si:et:sw=4:sts=4:ts=4
|
||||
|
||||
def get_classification(id):
|
||||
name = u'%s' % id
|
||||
base = str(int(id.split('/')[0].split('.')[0]))
|
||||
if base in DEWEY:
|
||||
name = u'%s %s' % (name, DEWEY[base].decode('utf-8'))
|
||||
return name
|
||||
|
||||
DEWEY = {
|
||||
"0": "Computer science, information & general works",
|
||||
"1": "Philosophy & psychology",
|
||||
"10": "Philosophy",
|
||||
"100": "Philosophy, parapsychology and occultism, psychology",
|
||||
"101": "Theory of philosophy",
|
||||
"102": "Miscellany of philosophy",
|
||||
"103": "Dictionaries, encyclopedias, concordances of philosophy",
|
||||
"105": "Serial publications",
|
||||
"106": "Organizations and management of philosophy",
|
||||
"107": "Education, research, related topics of philosophy",
|
||||
"108": "Groups of people",
|
||||
"109": "Historical and collected persons treatment of philosophy",
|
||||
"11": "Metaphysics",
|
||||
"110": "Metaphysics",
|
||||
"111": "Ontology",
|
||||
"113": "Cosmology (Philosophy of nature)",
|
||||
"114": "Space",
|
||||
"115": "Time",
|
||||
"116": "Change",
|
||||
"117": "Structure",
|
||||
"118": "Force and energy",
|
||||
"119": "Number and quantity",
|
||||
"12": "Epistemology",
|
||||
"120": "Epistemology, causation & humankind",
|
||||
"121": "Epistemology (Theory of knowledge)",
|
||||
"122": "Causation",
|
||||
"123": "Determinism and indeterminism",
|
||||
"124": "Teleology",
|
||||
"126": "The self",
|
||||
"127": "The unconscious and the subconscious",
|
||||
"128": "Humankind",
|
||||
"129": "Origin and destiny of individual souls",
|
||||
"13": "Parapsychology & occultism",
|
||||
"130": "Parapsychology and occultism",
|
||||
"131": "Parapsychological and occult techniques for achieving well-being, happiness, success",
|
||||
"133": "Specific topics in parapsychology & occultism",
|
||||
"135": "Dreams and mysteries",
|
||||
"137": "Divinatory graphology",
|
||||
"138": "Physiognomy",
|
||||
"139": "Phrenology",
|
||||
"14": "Philosophical schools of thought",
|
||||
"140": "Specific philosophical schools",
|
||||
"141": "Idealism & related systems",
|
||||
"142": "Critical philosophy",
|
||||
"143": "Bergsonism and intuitionism",
|
||||
"144": "Humanism and related systems and doctrines",
|
||||
"145": "Sensationalism",
|
||||
"146": "Naturalism and related systems and doctrines",
|
||||
"147": "Pantheism and related systems and doctrines",
|
||||
"148": "Dogmatism, eclecticism, liberalism, syncretism, traditionalism",
|
||||
"149": "Other philosophical systems",
|
||||
"15": "Psychology",
|
||||
"150": "Psychology",
|
||||
"152": "Sensory perception, movement, emotions, physiological drives",
|
||||
"153": "Conscious mental processes and intelligence",
|
||||
"154": "Subconscious and altered states and processes",
|
||||
"155": "Differential and developmental psychology",
|
||||
"156": "Comparative psychology",
|
||||
"158": "Applied psychology",
|
||||
"16": "Philosophical logic",
|
||||
"160": "Logic",
|
||||
"161": "Induction",
|
||||
"162": "Deduction",
|
||||
"165": "Fallacies and sources of error",
|
||||
"166": "Syllogisms",
|
||||
"167": "Hypotheses",
|
||||
"168": "Argument and persuasion",
|
||||
"169": "Analogy",
|
||||
"17": "Ethics",
|
||||
"170": "Ethics",
|
||||
"171": "Ethical systems",
|
||||
"172": "Political ethics",
|
||||
"173": "Ethics of family relationships",
|
||||
"174": "Occupational ethics",
|
||||
"175": "Ethics of recreation, leisure, public performances, communication",
|
||||
"176": "Ethics of sex and reproduction",
|
||||
"177": "Ethics of social relations",
|
||||
"178": "Ethics of consumption",
|
||||
"179": "Other ethical norms",
|
||||
"18": "Ancient, medieval & eastern philosophy",
|
||||
"180": "Ancient, medieval, eastern philosophy",
|
||||
"181": "Eastern philosophy",
|
||||
"182": "Pre-Socratic Greek philosophies",
|
||||
"183": "Sophistic, Socratic, related Greek philosophies",
|
||||
"184": "Platonic philosophy",
|
||||
"185": "Aristotelian philosophy",
|
||||
"186": "Skeptic and Neoplatonic philosophies",
|
||||
"187": "Epicurean philosophy",
|
||||
"188": "Stoic philosophy",
|
||||
"189": "Medieval western philosophy",
|
||||
"19": "Modern western philosophy",
|
||||
"190": "Modern western and other noneastern philosophy",
|
||||
"191": "United States and Canada",
|
||||
"192": "Philosophy of British Isles",
|
||||
"193": "Philosophy of Germany and Austria",
|
||||
"194": "Philosophy of France",
|
||||
"195": "Philosophy of Italy",
|
||||
"196": "Philosophy of Spain and Portugal",
|
||||
"197": "Philosophy of Russia",
|
||||
"198": "Philosophy of Scandinavia and Finland",
|
||||
"199": "Philosophy in other geographic areas",
|
||||
"2": "Religion",
|
||||
"20": "Religion",
|
||||
"200": "Religion",
|
||||
"201": "Religious mythology, general classes of religion, interreligious relations and attitudes, social theology",
|
||||
"202": "Doctrines",
|
||||
"203": "Public worship and other practices",
|
||||
"204": "Religious experience, life, practice",
|
||||
"205": "Religious ethics",
|
||||
"206": "Leaders & organization",
|
||||
"207": "Missions & religious education",
|
||||
"208": "Sources",
|
||||
"209": "Sects and reform movements",
|
||||
"21": "Philosophy & theory of religion",
|
||||
"210": "Philosophy & theory of religion",
|
||||
"211": "Concepts of God",
|
||||
"212": "Existence of God, ways of knowing God, attributes of God",
|
||||
"213": "Creation",
|
||||
"214": "Theodicy",
|
||||
"215": "Science and religion",
|
||||
"218": "Humankind",
|
||||
"22": "The Bible",
|
||||
"220": "Bible",
|
||||
"221": "Old Testament (Tanakh)",
|
||||
"222": "Historical books of Old Testament",
|
||||
"223": "Poetic books of Old Testament",
|
||||
"224": "Prophetic books of Old Testament",
|
||||
"225": "New Testament",
|
||||
"226": "Gospels and Acts",
|
||||
"227": "Epistles",
|
||||
"228": "Revelation (Apocalypse)",
|
||||
"229": "Apocrypha & pseudepigrapha",
|
||||
"23": "Christianity",
|
||||
"230": "Christianity Christian theology",
|
||||
"231": "God",
|
||||
"232": "Jesus Christ and his family",
|
||||
"233": "Humankind",
|
||||
"234": "Salvation and grace",
|
||||
"235": "Spiritual beings",
|
||||
"236": "Eschatology",
|
||||
"238": "Creeds, confessions of faith, covenants, catechisms",
|
||||
"239": "Apologetics and polemics",
|
||||
"24": "Christian practice & observance",
|
||||
"240": "Christian moral & devotional theology",
|
||||
"241": "Christian ethics",
|
||||
"242": "Devotional literature",
|
||||
"243": "Evangelistic writings for individuals and families",
|
||||
"246": "Use of art in Christianity",
|
||||
"247": "Church furnishings and related articles",
|
||||
"248": "Christian experience, practice, life",
|
||||
"249": "Christian observances in family life",
|
||||
"25": "Christian pastoral practice & religious orders",
|
||||
"250": "Local Christian church and Christian religious orders",
|
||||
"251": "Preaching (Homiletics)",
|
||||
"252": "Texts of sermons",
|
||||
"253": "Pastoral office and work (Pastoral theology)",
|
||||
"254": "Parish administration",
|
||||
"255": "Religious congregations & orders",
|
||||
"259": "Pastoral care of specific kinds of persons",
|
||||
"26": "Christian organization, social work & worship",
|
||||
"260": "Christian social and ecclesiastical theology",
|
||||
"261": "Social theology and interreligious relations and attitudes",
|
||||
"262": "Ecclesiology",
|
||||
"263": "Days, times & places of observance",
|
||||
"264": "Public worship",
|
||||
"265": "Sacraments, other rites and acts",
|
||||
"266": "Missions",
|
||||
"267": "Associations for religious work",
|
||||
"268": "Religious education",
|
||||
"269": "Spiritual renewal",
|
||||
"27": "History of Christianity",
|
||||
"270": "History of Christianity & Christian church",
|
||||
"271": "Religious congregations and orders in church history",
|
||||
"272": "Persecutions in general church history",
|
||||
"273": "Doctrinal controversies and heresies in general church history",
|
||||
"274": "Christianity in Europe",
|
||||
"275": "History of Christianity in Asia",
|
||||
"276": "Christianity in Africa",
|
||||
"277": "Christianity in North America",
|
||||
"278": "Christianity in South America",
|
||||
"279": "Christianity in Australasia, Pacific Ocean islands, Atlantic Ocean islands, Arctic islands, Antarctica",
|
||||
"28": "Christian denominations",
|
||||
"280": "Denominations and sects of Christian church",
|
||||
"281": "Early church and Eastern churches",
|
||||
"282": "Roman Catholic Church",
|
||||
"283": "Anglican churches",
|
||||
"284": "Protestant denominations of Continental origin and related bodies",
|
||||
"285": "Presbyterian churches, Reformed churches centered in America, Congregational churches, Puritanism",
|
||||
"286": "Baptist, Restoration movement, Adventist churches",
|
||||
"287": "Methodist churches; churches related to Methodism",
|
||||
"289": "Other denominations & sects",
|
||||
"29": "Other religions",
|
||||
"290": "Other religions",
|
||||
"292": "Classical religion (Greek and Roman religion)",
|
||||
"293": "Germanic religion",
|
||||
"294": "Religions of Indic origin",
|
||||
"295": "Zoroastrianism (Mazdaism, Parseeism)",
|
||||
"296": "Judaism",
|
||||
"297": "Islam, Babism, Bahai Faith",
|
||||
"299": "Religions not provided for elsewhere",
|
||||
"3": "Social sciences",
|
||||
"30": "Social sciences, sociology & anthropology",
|
||||
"300": "Social sciences",
|
||||
"301": "Sociology and anthropology",
|
||||
"302": "Social interaction",
|
||||
"303": "Social processes",
|
||||
"304": "Factors affecting social behavior",
|
||||
"305": "Groups of people",
|
||||
"306": "Culture and institutions",
|
||||
"307": "Communities",
|
||||
"31": "Statistics",
|
||||
"310": "Collections of general statistics",
|
||||
"314": "General statistics of Europe",
|
||||
"315": "General statistics of Asia",
|
||||
"316": "General statistics of Africa",
|
||||
"317": "General statistics of North America",
|
||||
"318": "General statistics of South America",
|
||||
"319": "General statistics of other parts of the world Of Pacific Ocean islands",
|
||||
"32": "Political science",
|
||||
"320": "Political science (Politics and government)",
|
||||
"321": "Systems of governments and states",
|
||||
"322": "Relation of state to organized groups",
|
||||
"323": "Civil and political rights",
|
||||
"324": "The political process",
|
||||
"325": "International migration and colonization",
|
||||
"326": "Slavery and emancipation",
|
||||
"327": "International relations",
|
||||
"328": "The legislative process",
|
||||
"33": "Economics",
|
||||
"330": "Economics",
|
||||
"331": "Labor economics",
|
||||
"332": "Financial economics",
|
||||
"333": "Economics of land and energy",
|
||||
"334": "Cooperatives",
|
||||
"335": "Socialism and related systems",
|
||||
"336": "Public finance",
|
||||
"337": "International economics",
|
||||
"338": "Production",
|
||||
"339": "Macroeconomics and related topics",
|
||||
"34": "Law",
|
||||
"340": "Law",
|
||||
"341": "Law of nations",
|
||||
"342": "Constitutional and administrative law",
|
||||
"343": "Military, defense, public property, public finance, tax, commerce (trade), industrial law",
|
||||
"344": "Labor, social, education & cultural law",
|
||||
"345": "Criminal law",
|
||||
"346": "Private law",
|
||||
"347": "Procedure and courts",
|
||||
"348": "Laws, regulations, cases",
|
||||
"349": "Law of specific jurisdictions, areas, socioeconomic regions, regional intergovernmental organizations",
|
||||
"35": "Public administration & military science",
|
||||
"350": "Public administration and military science",
|
||||
"351": "Public administration",
|
||||
"352": "General considerations of public administration",
|
||||
"353": "Specific fields of public administration",
|
||||
"354": "Public administration of economy and environment",
|
||||
"355": "Military science",
|
||||
"356": "Foot forces and warfare",
|
||||
"357": "Mounted forces & warfare",
|
||||
"358": "Air and other specialized forces and warfare; engineering and related services",
|
||||
"359": "Sea forces and warfare",
|
||||
"36": "Social problems & social services",
|
||||
"360": "Social problems & social services",
|
||||
"361": "Social problems & social welfare in general",
|
||||
"362": "Social welfare problems and services",
|
||||
"363": "Other social problems and services",
|
||||
"364": "Criminology",
|
||||
"365": "Penal and related institutions",
|
||||
"366": "Secret associations and societies",
|
||||
"367": "General clubs",
|
||||
"368": "Insurance",
|
||||
"369": "Miscellaneous kinds of associations",
|
||||
"37": "Education",
|
||||
"370": "Education",
|
||||
"371": "Schools and their activities; special education",
|
||||
"372": "Primary education (Elementary education)",
|
||||
"373": "Secondary education",
|
||||
"374": "Adult education",
|
||||
"375": "Curricula",
|
||||
"378": "Higher education (Tertiary education)",
|
||||
"379": "Public policy issues in education",
|
||||
"38": "Commerce, communications & transportation",
|
||||
"380": "Commerce, communications, transportation",
|
||||
"381": "Commerce (Trade)",
|
||||
"382": "International commerce (Foreign trade)",
|
||||
"383": "Postal communication",
|
||||
"384": "Communications",
|
||||
"385": "Railroad transportation",
|
||||
"386": "Inland waterway & ferry transportation",
|
||||
"387": "Water, air & space transportation",
|
||||
"388": "Transportation",
|
||||
"389": "Metrology and standardization",
|
||||
"39": "Customs, etiquette & folklore",
|
||||
"390": "Customs, etiquette, folklore",
|
||||
"391": "Costume and personal appearance",
|
||||
"392": "Customs of life cycle and domestic life",
|
||||
"393": "Death customs",
|
||||
"394": "General customs",
|
||||
"395": "Etiquette (Manners)",
|
||||
"398": "Folklore",
|
||||
"399": "Customs of war and diplomacy",
|
||||
"4": "Language",
|
||||
"40": "Language",
|
||||
"400": "Language",
|
||||
"401": "Philosophy and theory; international languages",
|
||||
"402": "Miscellany",
|
||||
"403": "Dictionaries, encyclopedias, concordances",
|
||||
"404": "Special topics of language",
|
||||
"405": "Serial publications",
|
||||
"406": "Organizations and management",
|
||||
"407": "Education, research & related topics",
|
||||
"408": "Groups of people",
|
||||
"409": "Geographic treatment and biography",
|
||||
"41": "Linguistics",
|
||||
"410": "Linguistics",
|
||||
"411": "Writing systems",
|
||||
"412": "Etymology of standard forms of languages",
|
||||
"413": "Dictionaries of standard forms of languages",
|
||||
"414": "Phonology & phonetics",
|
||||
"415": "Grammar of standard forms of languages",
|
||||
"417": "Dialectology and historical linguistics",
|
||||
"418": "Standard usage (Prescriptive linguistics)",
|
||||
"419": "Sign languages",
|
||||
"42": "English & Old English languages",
|
||||
"420": "English & Old English languages",
|
||||
"421": "Writing system, phonology, phonetics of standard English",
|
||||
"422": "Etymology of standard English",
|
||||
"423": "Dictionaries of standard English",
|
||||
"425": "Grammar of standard English",
|
||||
"427": "Historical and geographic variations, modern nongeographic variations of English",
|
||||
"428": "Standard English usage (Prescriptive linguistics)",
|
||||
"429": "Old English (Anglo-Saxon)",
|
||||
"43": "German & related languages",
|
||||
"430": "German & related languages",
|
||||
"431": "German writing systems & phonology",
|
||||
"432": "Etymology of standard German",
|
||||
"433": "Dictionaries of standard German",
|
||||
"435": "Grammar of standard German",
|
||||
"437": "Historical and geographic variations, modern nongeographic variations of German",
|
||||
"438": "Standard German usage",
|
||||
"439": "Other Germanic languages",
|
||||
"44": "French & related languages",
|
||||
"440": "Romance languages French",
|
||||
"441": "Writing systems, phonology, phonetics of standard French",
|
||||
"442": "Etymology of standard French",
|
||||
"443": "Dictionaries of standard French",
|
||||
"445": "Grammar of standard French",
|
||||
"447": "Historical and geographic variations, modern nongeographic variations of French",
|
||||
"448": "Standard French usage (Prescriptive linguistics)",
|
||||
"449": "Occitan, Catalan, Franco-Provençal",
|
||||
"45": "Italian, Romanian & related languages",
|
||||
"450": "Italian, Dalmatian, Romanian, Rhaetian, Sardinian, Corsican",
|
||||
"451": "Writing systems, phonology, phonetics of standard Italian",
|
||||
"452": "Etymology of standard Italian",
|
||||
"453": "Dictionaries of standard Italian",
|
||||
"455": "Grammar of standard Italian",
|
||||
"457": "Historical and geographic variations, modern nongeographic variations of Italian",
|
||||
"458": "Standard Italian usage",
|
||||
"459": "Sardinian",
|
||||
"46": "Spanish, Portuguese, Galician",
|
||||
"460": "Spanish, Portuguese, Galician",
|
||||
"461": "Writing systems, phonology, phonetics of standard Spanish",
|
||||
"462": "Etymology of standard Spanish",
|
||||
"463": "Dictionaries of standard Spanish",
|
||||
"465": "Grammar of standard Spanish",
|
||||
"467": "Historical and geographic variations, modern nongeographic variations of Spanish",
|
||||
"468": "Standard Spanish usage",
|
||||
"469": "Portuguese",
|
||||
"47": "Latin & Italic languages",
|
||||
"470": "Italic languages Latin",
|
||||
"471": "Writing systems, phonology, phonetics of classical Latin",
|
||||
"472": "Classical Latin etymology",
|
||||
"473": "Dictionaries of classical Latin",
|
||||
"475": "Grammar of classical Latin",
|
||||
"477": "Old, postclassical & Vulgar Latin",
|
||||
"478": "Classical Latin usage (Prescriptive linguistics)",
|
||||
"479": "Other Italic languages",
|
||||
"48": "Classical & modern Greek languages",
|
||||
"480": "Classical Greek and related Hellenic languages",
|
||||
"481": "Writing systems, phonology, phonetics of classical Greek",
|
||||
"482": "Etymology of classical Greek",
|
||||
"483": "Dictionaries of classical Greek",
|
||||
"485": "Grammar of classical Greek",
|
||||
"487": "Preclassical and postclassical Greek",
|
||||
"488": "Classical Greek usage (Prescriptive linguistics)",
|
||||
"489": "Other Hellenic languages",
|
||||
"49": "Other languages",
|
||||
"490": "Other languages",
|
||||
"491": "East Indo-European and Celtic languages",
|
||||
"492": "Afro-Asiatic languages",
|
||||
"493": "Non-Semitic Afro-Asiatic languages",
|
||||
"494": "Altaic, Uralic, Hyperborean, Dravidian languages, miscellaneous languages of south Asia",
|
||||
"495": "Languages of east and southeast Asia",
|
||||
"496": "African languages",
|
||||
"497": "North American native languages",
|
||||
"498": "South American native languages",
|
||||
"499": "Austronesian & other languages",
|
||||
"5": "Science",
|
||||
"50": "Science",
|
||||
"500": "Science",
|
||||
"501": "Philosophy & theory",
|
||||
"502": "Miscellany",
|
||||
"503": "Dictionaries, encyclopedias, concordances",
|
||||
"505": "Serial publications",
|
||||
"506": "Organizations and management",
|
||||
"507": "Education, research, related topics",
|
||||
"508": "Natural history",
|
||||
"509": "Historical, geographic & persons treatment",
|
||||
"51": "Mathematics",
|
||||
"510": "Mathematics",
|
||||
"511": "General principles of mathematics",
|
||||
"512": "Algebra",
|
||||
"513": "Arithmetic",
|
||||
"514": "Topology",
|
||||
"515": "Analysis",
|
||||
"516": "Geometry",
|
||||
"518": "Numerical analysis",
|
||||
"519": "Probabilities and applied mathematics",
|
||||
"52": "Astronomy",
|
||||
"520": "Astronomy and allied sciences",
|
||||
"521": "Celestial mechanics",
|
||||
"522": "Techniques, procedures, apparatus, equipment, materials",
|
||||
"523": "Specific celestial bodies and phenomena",
|
||||
"525": "Earth (Astronomical geography)",
|
||||
"526": "Mathematical geography",
|
||||
"527": "Celestial navigation",
|
||||
"528": "Ephemerides",
|
||||
"529": "Chronology",
|
||||
"53": "Physics",
|
||||
"530": "Physics",
|
||||
"531": "Classical mechanics",
|
||||
"532": "Fluid mechanics; liquid mechanics",
|
||||
"533": "Pneumatics (Gas mechanics)",
|
||||
"534": "Sound and related vibrations",
|
||||
"535": "Light and infrared and ultraviolet phenomena",
|
||||
"536": "Heat",
|
||||
"537": "Electricity & electronics",
|
||||
"538": "Magnetism",
|
||||
"539": "Modern physics",
|
||||
"54": "Chemistry",
|
||||
"540": "Chemistry and allied sciences",
|
||||
"541": "Physical chemistry",
|
||||
"542": "Techniques, equipment & materials",
|
||||
"543": "Analytical chemistry",
|
||||
"546": "Inorganic chemistry",
|
||||
"547": "Organic chemistry",
|
||||
"548": "Crystallography",
|
||||
"549": "Mineralogy",
|
||||
"55": "Earth sciences & geology",
|
||||
"550": "Earth sciences",
|
||||
"551": "Geology, hydrology, meteorology",
|
||||
"552": "Petrology",
|
||||
"553": "Economic geology",
|
||||
"554": "Earth sciences of Europe",
|
||||
"555": "Earth sciences of Asia",
|
||||
"556": "Earth sciences of Africa",
|
||||
"557": "Earth sciences of North America",
|
||||
"558": "Earth sciences of South America",
|
||||
"559": "Earth sciences of Australasia, Pacific Ocean islands, Atlantic Ocean islands, Arctic islands, Antarctica, extraterrestrial worlds",
|
||||
"56": "Fossils & prehistoric life",
|
||||
"560": "Paleontology",
|
||||
"561": "Paleobotany; fossil microorganisms",
|
||||
"562": "Fossil invertebrates",
|
||||
"563": "Miscellaneous fossil marine and seashore invertebrates",
|
||||
"564": "Fossil Mollusca and Molluscoidea",
|
||||
"565": "Fossil Arthropoda",
|
||||
"566": "Fossil Chordata",
|
||||
"567": "Fossil cold-blooded vertebrates",
|
||||
"568": "Fossil birds",
|
||||
"569": "Fossil mammals",
|
||||
"57": "Biology",
|
||||
"570": "Life sciences Biology",
|
||||
"571": "Physiology and related subjects",
|
||||
"572": "Biochemistry",
|
||||
"573": "Specific physiological systems in animals, regional histology and physiology in animals",
|
||||
"575": "Specific parts of and physiological systems in plants",
|
||||
"576": "Genetics and evolution",
|
||||
"577": "Ecology",
|
||||
"578": "Natural history of organisms and related subjects",
|
||||
"579": "Microorganisms, fungi, algae",
|
||||
"58": "Plants (Botany)",
|
||||
"580": "Plants",
|
||||
"581": "Specific topics in natural history of plants",
|
||||
"582": "Plants noted for specific vegetative characteristics and flowers",
|
||||
"583": "Dicotyledons",
|
||||
"584": "Monocotyledons",
|
||||
"585": "Pinophyta (Gymnosperms)",
|
||||
"586": "Seedless plants",
|
||||
"587": "Vascular seedless plants",
|
||||
"588": "Bryophyta",
|
||||
"59": "Animals (Zoology)",
|
||||
"590": "Animals",
|
||||
"591": "Specific topics in natural history",
|
||||
"592": "Invertebrates",
|
||||
"593": "Miscellaneous marine and seashore invertebrates",
|
||||
"594": "Mollusks & molluscoids",
|
||||
"595": "Arthropoda",
|
||||
"596": "Chordata",
|
||||
"597": "Cold-blooded vertebrates",
|
||||
"598": "Aves (Birds)",
|
||||
"599": "Mammalia (Mammals)",
|
||||
"6": "Technology",
|
||||
"60": "Technology",
|
||||
"600": "Technology",
|
||||
"601": "Philosophy and theory",
|
||||
"602": "Miscellany",
|
||||
"603": "Dictionaries & encyclopedias",
|
||||
"604": "Technical drawing, hazardous materials technology; groups of people",
|
||||
"605": "Serial publications",
|
||||
"606": "Organizations",
|
||||
"607": "Education, research, related topics",
|
||||
"608": "Patents",
|
||||
"609": "Historical, geographic, persons treatment",
|
||||
"61": "Medicine & health",
|
||||
"610": "Medicine and health",
|
||||
"611": "Human anatomy, cytology, histology",
|
||||
"612": "Human physiology",
|
||||
"613": "Personal health and safety",
|
||||
"614": "Forensic medicine; incidence of injuries, wounds, disease; public preventive medicine",
|
||||
"615": "Pharmacology and therapeutics",
|
||||
"616": "Diseases",
|
||||
"617": "Surgery, regional medicine, dentistry, ophthalmology, otology, audiology",
|
||||
"618": "Other branches of medicine Gynecology and obstetrics",
|
||||
"62": "Engineering",
|
||||
"620": "Engineering and allied operations",
|
||||
"621": "Applied physics",
|
||||
"622": "Mining and related operations",
|
||||
"623": "Military and nautical engineering",
|
||||
"624": "Civil engineering",
|
||||
"625": "Engineering of railroads & roads",
|
||||
"627": "Hydraulic engineering",
|
||||
"628": "Sanitary engineering",
|
||||
"629": "Other branches of engineering",
|
||||
"63": "Agriculture",
|
||||
"630": "Agriculture and related technologies",
|
||||
"631": "Specific techniques; apparatus, equipment, materials",
|
||||
"632": "Plant injuries, diseases, pests",
|
||||
"633": "Field and plantation crops",
|
||||
"634": "Orchards, fruits, forestry",
|
||||
"635": "Garden crops (Horticulture)",
|
||||
"636": "Animal husbandry",
|
||||
"637": "Processing dairy & related products",
|
||||
"638": "Insect culture",
|
||||
"639": "Hunting, fishing, conservation, related technologies",
|
||||
"64": "Home & family management",
|
||||
"640": "Home and family management",
|
||||
"641": "Food & drink",
|
||||
"642": "Meals and table service",
|
||||
"643": "Housing and household equipment",
|
||||
"644": "Household utilities",
|
||||
"645": "Household furnishings",
|
||||
"646": "Sewing, clothing, management of personal and family life",
|
||||
"647": "Management of public households (Institutional housekeeping)",
|
||||
"648": "Housekeeping",
|
||||
"649": "Child rearing; home care of people with disabilities and illnesses",
|
||||
"65": "Management & public relations",
|
||||
"650": "Management and auxiliary services",
|
||||
"651": "Office services",
|
||||
"652": "Processes of written communication",
|
||||
"653": "Shorthand",
|
||||
"657": "Accounting",
|
||||
"658": "General management",
|
||||
"659": "Advertising and public relations",
|
||||
"66": "Chemical engineering",
|
||||
"660": "Chemical engineering and related technologies",
|
||||
"661": "Technology of industrial chemicals",
|
||||
"662": "Technology of explosives, fuels, related products",
|
||||
"663": "Beverage technology",
|
||||
"664": "Food technology",
|
||||
"665": "Technology of industrial oils, fats, waxes, gases",
|
||||
"666": "Ceramic and allied technologies",
|
||||
"667": "Cleaning, color, coating, related technologies",
|
||||
"668": "Technology of other organic products",
|
||||
"669": "Metallurgy",
|
||||
"67": "Manufacturing",
|
||||
"670": "Manufacturing",
|
||||
"671": "Metalworking processes and primary metal products",
|
||||
"672": "Iron, steel, other iron alloys",
|
||||
"673": "Nonferrous metals",
|
||||
"674": "Lumber processing, wood products, cork",
|
||||
"675": "Leather and fur processing",
|
||||
"676": "Pulp and paper technology",
|
||||
"677": "Textiles",
|
||||
"678": "Elastomers and elastomer products",
|
||||
"679": "Other products of specific materials",
|
||||
"68": "Manufacture for specific uses",
|
||||
"680": "Manufacture of products for specific uses",
|
||||
"681": "Precision instruments and other devices",
|
||||
"682": "Small forge work (Blacksmithing)",
|
||||
"683": "Hardware and household appliances",
|
||||
"684": "Furnishings and home workshops",
|
||||
"685": "Leather and fur goods, and related products",
|
||||
"686": "Printing and related activities",
|
||||
"687": "Clothing and accessories",
|
||||
"688": "Other final products & packaging",
|
||||
"69": "Construction of buildings",
|
||||
"690": "Buildings",
|
||||
"691": "Building materials",
|
||||
"692": "Auxiliary construction practices",
|
||||
"693": "Construction in specific types of materials and for specific purposes",
|
||||
"694": "Wood construction",
|
||||
"695": "Roof covering",
|
||||
"696": "Utilities",
|
||||
"697": "Heating, ventilating & air-conditioning",
|
||||
"698": "Detail finishing",
|
||||
"7": "Arts & recreation",
|
||||
"70": "Arts",
|
||||
"700": "Arts",
|
||||
"701": "Philosophy and theory of fine and decorative arts",
|
||||
"702": "Miscellany of fine and decorative arts",
|
||||
"703": "Dictionaries, encyclopedias, concordances of fine and decorative arts",
|
||||
"704": "Special topics in fine and decorative arts",
|
||||
"705": "Serial publications of fine and decorative arts",
|
||||
"706": "Organizations and management of fine and decorative arts",
|
||||
"707": "Education, research, related topics of fine and decorative arts",
|
||||
"708": "Galleries, museums, private collections of fine and decorative arts",
|
||||
"709": "Historical, geographic & persons treatment",
|
||||
"71": "Area planning & landscape architecture",
|
||||
"710": "Area planning and landscape architecture",
|
||||
"711": "Area planning (Civic art)",
|
||||
"712": "Landscape architecture (Landscape design)",
|
||||
"713": "Landscape architecture of trafficways",
|
||||
"714": "Water features in landscape architecture",
|
||||
"715": "Woody plants in landscape architecture",
|
||||
"716": "Herbaceous plants in landscape architecture",
|
||||
"717": "Structures in landscape architecture",
|
||||
"718": "Landscape design of cemeteries",
|
||||
"719": "Natural landscapes",
|
||||
"72": "Architecture",
|
||||
"720": "Architecture",
|
||||
"721": "Architectural materials and structural elements",
|
||||
"722": "Architecture from earliest times to ca. 300",
|
||||
"723": "Architecture from ca. 300 to 1399",
|
||||
"724": "Architecture from 1400",
|
||||
"725": "Public structures",
|
||||
"726": "Buildings for religious purposes",
|
||||
"727": "Buildings for educational and research purposes",
|
||||
"728": "Residential and related buildings",
|
||||
"729": "Design and decoration of structures and accessories",
|
||||
"73": "Sculpture, ceramics & metalwork",
|
||||
"730": "Plastic arts Sculpture",
|
||||
"731": "Processes, forms & subjects of sculpture",
|
||||
"732": "Sculpture from earliest times to ca. 500, sculpture of nonliterate peoples",
|
||||
"733": "Greek, Etruscan, Roman sculpture",
|
||||
"734": "Sculpture from ca. 500 to 1399",
|
||||
"735": "Sculpture from 1400",
|
||||
"736": "Carving and carvings",
|
||||
"737": "Numismatics and sigillography",
|
||||
"738": "Ceramic arts",
|
||||
"739": "Art metalwork",
|
||||
"74": "Graphic arts & decorative arts",
|
||||
"740": "Graphic arts",
|
||||
"741": "Drawing and drawings",
|
||||
"742": "Perspective in drawing",
|
||||
"743": "Drawing and drawings by subject",
|
||||
"745": "Decorative arts",
|
||||
"746": "Textile arts",
|
||||
"747": "Interior decoration",
|
||||
"748": "Glass",
|
||||
"749": "Furniture and accessories",
|
||||
"75": "Painting",
|
||||
"750": "Painting and paintings",
|
||||
"751": "Techniques, procedures, apparatus, equipment, materials, forms",
|
||||
"752": "Color",
|
||||
"753": "Symbolism, allegory, mythology, legend",
|
||||
"754": "Genre paintings",
|
||||
"755": "Religion",
|
||||
"757": "Human figures",
|
||||
"758": "Nature, architectural subjects and cityscapes, other specific subjects",
|
||||
"759": "History, geographic treatment, biography",
|
||||
"76": "Printmaking & prints",
|
||||
"760": "Printmaking and prints",
|
||||
"761": "Relief processes (Block printing)",
|
||||
"763": "Lithographic processes (Planographic processes)",
|
||||
"764": "Chromolithography and serigraphy",
|
||||
"765": "Metal engraving",
|
||||
"766": "Mezzotinting, aquatinting, related processes",
|
||||
"767": "Etching and drypoint",
|
||||
"769": "Prints",
|
||||
"77": "Photography, computer art, film, video",
|
||||
"770": "Photography, computer art, cinematography, videography",
|
||||
"771": "Techniques, procedures, apparatus, equipment, materials",
|
||||
"772": "Metallic salt processes",
|
||||
"773": "Pigment processes of printing",
|
||||
"774": "Holography",
|
||||
"775": "Digital photography",
|
||||
"776": "Computer art (Digital art)",
|
||||
"777": "Cinematography and videography",
|
||||
"778": "Specific fields and special kinds of photography",
|
||||
"779": "Photographs",
|
||||
"78": "Music",
|
||||
"780": "Music",
|
||||
"781": "General principles & musical forms",
|
||||
"782": "Vocal music",
|
||||
"783": "Music for single voices",
|
||||
"784": "Instruments & instrumental ensembles",
|
||||
"785": "Ensembles with only one instrument per part",
|
||||
"786": "Keyboard, mechanical, electrophonic, percussion instruments",
|
||||
"787": "Stringed instruments (Chordophones)",
|
||||
"788": "Wind instruments (Aerophones)",
|
||||
"79": "Sports, games & entertainment",
|
||||
"790": "Recreational and performing arts",
|
||||
"791": "Public performances",
|
||||
"792": "Stage presentations",
|
||||
"793": "Indoor games and amusements",
|
||||
"794": "Indoor games of skill",
|
||||
"795": "Games of chance",
|
||||
"796": "Athletic and outdoor sports and games",
|
||||
"797": "Aquatic & air sports",
|
||||
"798": "Equestrian sports and animal racing",
|
||||
"799": "Fishing, hunting, shooting",
|
||||
"8": "Literature",
|
||||
"80": "Literature, rhetoric & criticism",
|
||||
"800": "Literature (Belles-lettres) and rhetoric",
|
||||
"801": "Philosophy and theory",
|
||||
"802": "Miscellany",
|
||||
"803": "Dictionaries, encyclopedias, concordances",
|
||||
"805": "Serial publications",
|
||||
"806": "Organizations and management",
|
||||
"807": "Education, research, related topics",
|
||||
"808": "Rhetoric and collections of literary texts from more than two literatures",
|
||||
"809": "History, description, critical appraisal of more than two literatures",
|
||||
"81": "American literature in English",
|
||||
"810": "American literature in English",
|
||||
"811": "American poetry in English",
|
||||
"812": "American drama in English",
|
||||
"813": "American fiction in English",
|
||||
"814": "American essays in English",
|
||||
"815": "American speeches in English",
|
||||
"816": "American letters in English",
|
||||
"817": "American humor and satire in English",
|
||||
"818": "American miscellaneous writings",
|
||||
"82": "English & Old English literatures",
|
||||
"820": "English and Old English (Anglo-Saxon) literatures",
|
||||
"821": "English poetry",
|
||||
"822": "English drama",
|
||||
"823": "English fiction",
|
||||
"824": "English essays",
|
||||
"825": "English speeches",
|
||||
"826": "English letters",
|
||||
"827": "English humor and satire",
|
||||
"828": "English miscellaneous writings",
|
||||
"829": "Old English (Anglo-Saxon) literature",
|
||||
"83": "German & related literatures",
|
||||
"830": "Literatures of Germanic languages German literature",
|
||||
"831": "German poetry",
|
||||
"832": "German drama",
|
||||
"833": "German fiction",
|
||||
"834": "German essays",
|
||||
"835": "German speeches",
|
||||
"836": "German letters",
|
||||
"837": "German humor & satire",
|
||||
"838": "German miscellaneous writings",
|
||||
"839": "Other Germanic literatures",
|
||||
"84": "French & related literatures",
|
||||
"840": "French literature and literatures of related Romance languages",
|
||||
"841": "French poetry",
|
||||
"842": "French drama",
|
||||
"843": "French fiction",
|
||||
"844": "French essays",
|
||||
"845": "French speeches",
|
||||
"846": "French letters",
|
||||
"847": "French humor & satire",
|
||||
"848": "French miscellaneous writings",
|
||||
"849": "Occitan, Catalan, Franco-Provençal literatures",
|
||||
"85": "Italian, Romanian & related literatures",
|
||||
"850": "Literatures of Italian, Dalmatian, Romanian, Rhaetian, Sardinian, Corsican languages",
|
||||
"851": "Italian poetry",
|
||||
"852": "Italian drama",
|
||||
"853": "Italian fiction",
|
||||
"854": "Italian essays",
|
||||
"855": "Italian speeches",
|
||||
"856": "Italian letters",
|
||||
"857": "Italian humor and satire",
|
||||
"858": "Italian miscellaneous writings",
|
||||
"859": "Literatures of Romanian, Rhaetian, Sardinian, Corsican languages",
|
||||
"86": "Spanish, Portuguese, Galician literatures",
|
||||
"860": "Spanish & Portuguese literatures",
|
||||
"861": "Spanish poetry",
|
||||
"862": "Spanish drama",
|
||||
"863": "Spanish fiction",
|
||||
"864": "Spanish essays",
|
||||
"865": "Spanish speeches",
|
||||
"866": "Spanish letters",
|
||||
"867": "Spanish humor and satire",
|
||||
"868": "Spanish miscellaneous writings",
|
||||
"869": "Literatures of Portuguese and Galician languages",
|
||||
"87": "Latin & Italic literatures",
|
||||
"870": "Latin & Italic literatures",
|
||||
"871": "Latin poetry",
|
||||
"872": "Latin dramatic poetry and drama",
|
||||
"873": "Latin epic poetry and fiction",
|
||||
"874": "Latin lyric poetry",
|
||||
"875": "Latin speeches",
|
||||
"876": "Latin letters",
|
||||
"877": "Latin humor and satire",
|
||||
"878": "Latin miscellaneous writings",
|
||||
"879": "Literatures of other Italic languages",
|
||||
"88": "Classical & modern Greek literatures",
|
||||
"880": "Literatures of Hellenic languages Classical Greek literature",
|
||||
"881": "Classical Greek poetry",
|
||||
"882": "Classical Greek dramatic poetry and drama",
|
||||
"883": "Classical Greek epic poetry and fiction",
|
||||
"884": "Classical Greek lyric poetry",
|
||||
"885": "Classical Greek speeches",
|
||||
"886": "Classical Greek letters",
|
||||
"887": "Classical Greek humor and satire",
|
||||
"888": "Classical Greek miscellaneous writings",
|
||||
"889": "Modern Greek literature",
|
||||
"89": "Other literatures",
|
||||
"890": "Literatures of other specific languages and language families",
|
||||
"891": "East Indo-European and Celtic literatures",
|
||||
"892": "Afro-Asiatic literatures",
|
||||
"893": "Non-Semitic Afro-Asiatic literatures",
|
||||
"894": "Literatures of Altaic, Uralic, Hyperborean, Dravidian languages; literatures of miscellaneous languages of south Asia",
|
||||
"895": "Literatures of East and Southeast Asia",
|
||||
"896": "African literatures",
|
||||
"897": "North American native literatures",
|
||||
"898": "Literatures of South American native languages",
|
||||
"899": "Literatures of non-Austronesian languages of Oceania, of Austronesian languages, of miscellaneous languages",
|
||||
"9": "History & geography",
|
||||
"90": "History",
|
||||
"900": "History, geography, and auxiliary disciplines",
|
||||
"901": "Philosophy and theory of history",
|
||||
"902": "Miscellany",
|
||||
"903": "Dictionaries, encyclopedias, concordances of history",
|
||||
"904": "Collected accounts of events",
|
||||
"905": "Serial publications of history",
|
||||
"906": "Organizations and management of history",
|
||||
"907": "Education, research & related topics",
|
||||
"908": "History with respect to groups of people",
|
||||
"909": "World history",
|
||||
"91": "Geography & travel",
|
||||
"910": "Geography and travel",
|
||||
"911": "Historical geography",
|
||||
"912": "Graphic representations of surface of earth and of extraterrestrial worlds",
|
||||
"913": "Geography of and travel in ancient world",
|
||||
"914": "Geography of and travel in Europe",
|
||||
"915": "Geography of and travel in Asia",
|
||||
"916": "Geography of and travel in Africa",
|
||||
"917": "Geography of and travel in North America",
|
||||
"918": "Geography of & travel in South America",
|
||||
"919": "Geography of and travel in Australasia, Pacific Ocean islands, Atlantic Ocean islands, Arctic islands, Antarctica and on extraterrestrial worlds",
|
||||
"92": "Biography & genealogy",
|
||||
"920": "Biography, genealogy, insignia",
|
||||
"929": "Genealogy, names, insignia",
|
||||
"93": "History of ancient world (to ca. 499)",
|
||||
"930": "History of ancient world to ca. 499",
|
||||
"931": "China to 420",
|
||||
"932": "Egypt to 640",
|
||||
"933": "Palestine to 70",
|
||||
"934": "South Asia to 647",
|
||||
"935": "Mesopotamia to 637 and Iranian Plateau to 637",
|
||||
"936": "Europe north and west of Italian Peninsula to ca. 499",
|
||||
"937": "Italian Peninsula to 476 and adjacent territories to 476",
|
||||
"938": "Greece to 323",
|
||||
"939": "Other parts of ancient world to ca. 640",
|
||||
"94": "History of Europe",
|
||||
"940": "History of Europe",
|
||||
"941": "British Isles",
|
||||
"942": "England and Wales",
|
||||
"943": "Germany and neighboring central European countries",
|
||||
"944": "France and Monaco",
|
||||
"945": "Italy, San Marino, Vatican City, Malta",
|
||||
"946": "Spain, Andorra, Gibraltar, Portugal",
|
||||
"947": "Russia and neighboring east European countries",
|
||||
"948": "Scandinavia",
|
||||
"949": "Other parts of Europe",
|
||||
"95": "History of Asia",
|
||||
"950": "History of Asia",
|
||||
"951": "China and adjacent areas",
|
||||
"952": "Japan",
|
||||
"953": "Arabian Peninsula and adjacent areas",
|
||||
"954": "India and neighboring south Asian countries",
|
||||
"955": "Iran",
|
||||
"956": "Middle East (Near East)",
|
||||
"957": "Siberia (Asiatic Russia)",
|
||||
"958": "Central Asia",
|
||||
"959": "Southeast Asia",
|
||||
"96": "History of Africa",
|
||||
"960": "History of Africa",
|
||||
"961": "Tunisia & Libya",
|
||||
"962": "Egypt, Sudan, South Sudan",
|
||||
"963": "Ethiopia and Eritrea",
|
||||
"964": "Northwest African coast & offshore islands",
|
||||
"965": "Algeria",
|
||||
"966": "West Africa and offshore islands",
|
||||
"967": "Central Africa and offshore islands",
|
||||
"968": "Republic of South Africa and neighboring southern African countries",
|
||||
"969": "South Indian Ocean islands",
|
||||
"97": "History of North America",
|
||||
"970": "History of North America",
|
||||
"971": "Canada",
|
||||
"972": "Middle America; Mexico",
|
||||
"973": "United States",
|
||||
"974": "Northeastern United States (New England and Middle Atlantic states)",
|
||||
"975": "Southeastern United States (South Atlantic states)",
|
||||
"976": "South central United States Gulf Coast states",
|
||||
"977": "North central United States",
|
||||
"978": "Western United States",
|
||||
"979": "Great Basin and Pacific Slope region of United States",
|
||||
"98": "History of South America",
|
||||
"980": "History of South America",
|
||||
"981": "Brazil",
|
||||
"982": "Argentina",
|
||||
"983": "Chile",
|
||||
"984": "Bolivia",
|
||||
"985": "Peru",
|
||||
"986": "Colombia and Ecuador",
|
||||
"987": "Venezuela",
|
||||
"988": "Guiana",
|
||||
"989": "Paraguay and Uruguay",
|
||||
"99": "History of other areas",
|
||||
"990": "History of Australasia, Pacific Ocean islands, Atlantic Ocean islands, Arctic islands, Antarctica, extraterrestrial worlds",
|
||||
"993": "New Zealand",
|
||||
"994": "Australia",
|
||||
"995": "New Guinea and neighboring countries of Melanesia",
|
||||
"996": "Other parts of Pacific Polynesia",
|
||||
"997": "Atlantic Ocean islands",
|
||||
"998": "Arctic islands and Antarctica",
|
||||
"999": "Extraterrestrial worlds"
|
||||
}
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
import json
|
||||
import re
|
||||
from ox.cache import read_url
|
||||
|
||||
dewey = {}
|
||||
for i in range(0, 1000):
|
||||
url = 'http://dewey.info/class/%s/about.en.json' % i
|
||||
print url
|
||||
data = json.loads(read_url(url))
|
||||
for d in data.values():
|
||||
if 'http://www.w3.org/2004/02/skos/core#prefLabel' in d:
|
||||
value = d['http://www.w3.org/2004/02/skos/core#prefLabel'][0]['value']
|
||||
dewey[str(i)] = value
|
||||
break
|
||||
|
||||
data = json.dumps(dewey, indent=4, ensure_ascii=False, sort_keys=True).encode('utf-8')
|
||||
with open(__file__) as f:
|
||||
pydata = f.read()
|
||||
pydata = re.sub(
|
||||
re.compile('\nDEWEY = {.*?}\n\n', re.DOTALL),
|
||||
'\nDEWEY = %s\n\n' % data, pydata)
|
||||
|
||||
with open(__file__, 'w') as f:
|
||||
f.write(pydata)
|
|
@ -11,13 +11,8 @@ import logging
|
|||
logger = logging.getLogger('meta.duckduckgo')
|
||||
|
||||
|
||||
def find(title, author=None, publisher=None, date=None):
|
||||
logger.debug('find %s %s %s %s', title, author, publisher, date)
|
||||
query = title
|
||||
if author:
|
||||
if isinstance(author, list):
|
||||
author = ' '.join(author)
|
||||
query += ' ' + author
|
||||
def find(query):
|
||||
logger.debug('find %s', query)
|
||||
query += ' isbn'
|
||||
isbns = []
|
||||
for r in ox.web.duckduckgo.find(query):
|
||||
|
@ -26,12 +21,9 @@ def find(title, author=None, publisher=None, date=None):
|
|||
done = set()
|
||||
for isbn in isbns:
|
||||
if isbn not in done:
|
||||
key = 'isbn%d'%len(isbn)
|
||||
#r = lookup(key, isbn)
|
||||
#r['mainid'] = key
|
||||
r = {
|
||||
key: isbn,
|
||||
'mainid': key
|
||||
'isbn': [isbn],
|
||||
'primaryid': ['isbn', isbn]
|
||||
}
|
||||
results.append(r)
|
||||
done.add(isbn)
|
||||
|
|
|
@ -11,13 +11,8 @@ import logging
|
|||
logger = logging.getLogger('meta.google')
|
||||
|
||||
|
||||
def find(title, author=None, publisher=None, date=None):
|
||||
logger.debug('find %s %s %s %s', title, author, publisher, date)
|
||||
query = title
|
||||
if author:
|
||||
if isinstance(author, list):
|
||||
author = ' '.join(author)
|
||||
query += ' ' + author
|
||||
def find(query):
|
||||
logger.debug('find %s', query)
|
||||
query += ' isbn'
|
||||
isbns = []
|
||||
for r in ox.web.google.find(query):
|
||||
|
@ -27,17 +22,14 @@ def find(title, author=None, publisher=None, date=None):
|
|||
done = set()
|
||||
for isbn in isbns:
|
||||
if isbn not in done:
|
||||
key = 'isbn%d'%len(isbn)
|
||||
#r = lookup(key, isbn)
|
||||
#r['mainid'] = key
|
||||
r = {
|
||||
key: isbn,
|
||||
'mainid': key
|
||||
'isbn': isbn,
|
||||
'primaryid': ['isbn', isbn]
|
||||
}
|
||||
results.append(r)
|
||||
done.add(isbn)
|
||||
if len(isbn) == 10:
|
||||
done.add(stdnum.isbn.to_isbn13(isbn))
|
||||
if len(isbn) == 13:
|
||||
if len(isbn) == 13 and isbn.startswith('978'):
|
||||
done.add(stdnum.isbn.to_isbn10(isbn))
|
||||
return results
|
||||
|
|
|
@ -9,18 +9,25 @@ import xml.etree.ElementTree as ET
|
|||
|
||||
from utils import normalize_isbn
|
||||
from marc_countries import COUNTRIES
|
||||
from dewey import get_classification
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger('meta.loc')
|
||||
|
||||
def get_ids(key, value):
|
||||
ids = []
|
||||
if key in ['isbn10', 'isbn13']:
|
||||
if key == 'isbn':
|
||||
url = 'http://www.loc.gov/search/?q=%s&all=true' % value
|
||||
html = ox.cache.read_url(url)
|
||||
match = re.search('"http://lccn.loc.gov/(\d+)"', html)
|
||||
if match:
|
||||
ids.append(('lccn', match.group(1)))
|
||||
elif key == 'lccn':
|
||||
info = lookup(value)
|
||||
for key in ('oclc', 'isbn'):
|
||||
if key in info:
|
||||
for value in info[key]:
|
||||
ids.append((key, value))
|
||||
if ids:
|
||||
logger.debug('get_ids %s,%s => %s', key, value, ids)
|
||||
return ids
|
||||
|
@ -33,7 +40,7 @@ def lookup(id):
|
|||
mods = ET.fromstring(data)
|
||||
|
||||
info = {
|
||||
'lccn': id
|
||||
'lccn': [id]
|
||||
}
|
||||
title = mods.findall(ns + 'titleInfo')
|
||||
if not title:
|
||||
|
@ -55,16 +62,20 @@ def lookup(id):
|
|||
info['publisher'] = publisher[0]
|
||||
info['date'] = ''.join([e.text for e in origin[0].findall(ns + 'dateIssued')])
|
||||
for i in mods.findall(ns + 'identifier'):
|
||||
if i.attrib['type'] == 'oclc':
|
||||
info['oclc'] = i.text.replace('ocn', '')
|
||||
if i.attrib['type'] == 'lccn':
|
||||
info['lccn'] = i.text
|
||||
if i.attrib['type'] == 'isbn':
|
||||
isbn = normalize_isbn(i.text)
|
||||
info['isbn%s'%len(isbn)] = isbn
|
||||
key = i.attrib['type']
|
||||
value = i.text
|
||||
if key in ('oclc', 'lccn', 'isbn'):
|
||||
if i.attrib['type'] == 'oclc':
|
||||
value = value.replace('ocn', '').replace('ocm', '')
|
||||
if i.attrib['type'] == 'isbn':
|
||||
value = normalize_isbn(i.text)
|
||||
if not key in info:
|
||||
info[key] = []
|
||||
if value not in info[key]:
|
||||
info[key].append(value)
|
||||
for i in mods.findall(ns + 'classification'):
|
||||
if i.attrib['authority'] == 'ddc':
|
||||
info['classification'] = i.text
|
||||
info['classification'] = get_classification(i.text.split('/')[0])
|
||||
info['author'] = []
|
||||
for a in mods.findall(ns + 'name'):
|
||||
if a.attrib.get('usage') == 'primary':
|
||||
|
|
|
@ -3,6 +3,8 @@ from ox import find_re, strip_tags, decode_html
|
|||
import re
|
||||
import stdnum.isbn
|
||||
|
||||
from utils import find_isbns
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger('meta.lookupbyisbn')
|
||||
|
||||
|
@ -10,18 +12,32 @@ base = 'http://www.lookupbyisbn.com'
|
|||
|
||||
def get_ids(key, value):
|
||||
ids = []
|
||||
if key in ('isbn10', 'isbn13', 'asin'):
|
||||
|
||||
def add_other_isbn(v):
|
||||
if len(v) == 10:
|
||||
ids.append(('isbn', stdnum.isbn.to_isbn13(v)))
|
||||
if len(v) == 13 and v.startswith('978'):
|
||||
ids.append(('isbn', stdnum.isbn.to_isbn10(v)))
|
||||
|
||||
if key in ('isbn', 'asin'):
|
||||
url = '%s/Search/Book/%s/1' % (base, value)
|
||||
data = read_url(url).decode('utf-8')
|
||||
m = re.compile('href="(/Lookup/Book/[^"]+?)"').findall(data)
|
||||
if m:
|
||||
asin = m[0].split('/')[-3]
|
||||
ids.append(('asin', asin))
|
||||
if key == 'isbn10':
|
||||
ids.append(('isbn13', stdnum.isbn.to_isbn13(value)))
|
||||
if not stdnum.isbn.is_valid(asin):
|
||||
ids.append(('asin', asin))
|
||||
if key == 'isbn':
|
||||
add_other_isbn(value)
|
||||
if key == 'asin':
|
||||
if stdnum.isbn.is_valid(value):
|
||||
ids.append(('isbn10', value))
|
||||
ids.append(('isbn', value))
|
||||
add_other_isbn(value)
|
||||
else:
|
||||
for isbn in amazon_lookup(value):
|
||||
if stdnum.isbn.is_valid(isbn):
|
||||
ids.append(('isbn', isbn))
|
||||
add_other_isbn(isbn)
|
||||
if ids:
|
||||
logger.debug('get_ids %s, %s => %s', key, value, ids)
|
||||
return ids
|
||||
|
@ -29,7 +45,7 @@ def get_ids(key, value):
|
|||
def lookup(id):
|
||||
logger.debug('lookup %s', id)
|
||||
r = {
|
||||
'asin': id
|
||||
'asin': [id]
|
||||
}
|
||||
url = '%s/Lookup/Book/%s/%s/1' % (base, id, id)
|
||||
data = read_url(url).decode('utf-8')
|
||||
|
@ -64,3 +80,6 @@ def lookup(id):
|
|||
r['description'] = ''
|
||||
return r
|
||||
|
||||
def amazon_lookup(asin):
|
||||
html = read_url('http://www.amazon.com/dp/%s' % asin)
|
||||
return list(set(find_isbns(find_re(html, 'Formats</h3>.*?</table'))))
|
||||
|
|
|
@ -7,6 +7,7 @@ from ox.cache import read_url
|
|||
import json
|
||||
|
||||
from marc_countries import COUNTRIES
|
||||
from dewey import get_classification
|
||||
from utils import normalize_isbn
|
||||
|
||||
import logging
|
||||
|
@ -16,11 +17,11 @@ KEYS = {
|
|||
'authors': 'author',
|
||||
'covers': 'cover',
|
||||
'dewey_decimal_class': 'classification',
|
||||
'isbn_10': 'isbn10',
|
||||
'isbn_13': 'isbn13',
|
||||
'languages': 'language',
|
||||
'isbn_10': 'isbn',
|
||||
'isbn_13': 'isbn',
|
||||
'lccn': 'lccn',
|
||||
'number_of_pages': 'pages',
|
||||
'languages': 'language',
|
||||
'oclc_numbers': 'oclc',
|
||||
'publish_country': 'country',
|
||||
'publish_date': 'date',
|
||||
|
@ -30,21 +31,7 @@ KEYS = {
|
|||
'title': 'title',
|
||||
}
|
||||
|
||||
def find(*args, **kargs):
|
||||
args = [a.replace(':', ' ') for a in args]
|
||||
for k in ('date', 'publisher'):
|
||||
if k in kargs:
|
||||
logger.debug('ignoring %s on openlibrary %s', k, kargs[k])
|
||||
del kargs[k]
|
||||
for k, v in kargs.iteritems():
|
||||
key = KEYS.keys()[KEYS.values().index(k)]
|
||||
if v:
|
||||
if not isinstance(v, list):
|
||||
v = [v]
|
||||
#v = ['%s:"%s"' % (key, value.replace(':', '\:')) for value in v]
|
||||
v = ['"%s"' % value.replace(':', ' ') for value in v]
|
||||
args += v
|
||||
query = ' '.join(args)
|
||||
def find(query):
|
||||
query = query.strip()
|
||||
logger.debug('find %s', query)
|
||||
r = api.search(query)
|
||||
|
@ -54,7 +41,8 @@ def find(*args, **kargs):
|
|||
for olid, value in books.iteritems():
|
||||
olid = olid.split('/')[-1]
|
||||
book = format(value)
|
||||
book['olid'] = olid
|
||||
book['olid'] = [olid]
|
||||
book['primaryid'] = ['olid', olid]
|
||||
results.append(book)
|
||||
return results
|
||||
|
||||
|
@ -62,15 +50,17 @@ def find(*args, **kargs):
|
|||
def get_ids(key, value):
|
||||
ids = []
|
||||
if key == 'olid':
|
||||
data = lookup(value, True)
|
||||
for id in ('isbn10', 'isbn13', 'lccn', 'oclc'):
|
||||
data = lookup(value)
|
||||
for id in ('isbn', 'lccn', 'oclc'):
|
||||
if id in data:
|
||||
for v in data[id]:
|
||||
if (id, v) not in ids:
|
||||
ids.append((id, v))
|
||||
elif key in ('isbn10', 'isbn13', 'oclc', 'lccn'):
|
||||
elif key in ('isbn', 'oclc', 'lccn'):
|
||||
logger.debug('get_ids %s %s', key, value)
|
||||
r = api.things({'type': '/type/edition', key.replace('isbn', 'isbn_'): value})
|
||||
if key == 'isbn':
|
||||
key = 'isbn_%s'%len(value)
|
||||
r = api.things({'type': '/type/edition', key: value})
|
||||
for b in r.get('result', []):
|
||||
if b.startswith('/books'):
|
||||
olid = b.split('/')[-1]
|
||||
|
@ -87,7 +77,10 @@ def lookup(id, return_all=False):
|
|||
#url = 'https://openlibrary.org/books/%s.json' % id
|
||||
#info = json.loads(read_url(url))
|
||||
data = format(info, return_all)
|
||||
data['olid'] = id
|
||||
if 'olid' not in data:
|
||||
data['olid'] = []
|
||||
if id not in data['olid']:
|
||||
data['olid'] = [id]
|
||||
logger.debug('lookup %s => %s', id, data.keys())
|
||||
return data
|
||||
|
||||
|
@ -105,14 +98,20 @@ def format(info, return_all=False):
|
|||
value = 'https://covers.openlibrary.org/b/id/%s.jpg' % value[0]
|
||||
elif key == 'languages':
|
||||
value = resolve_names(value)
|
||||
elif not return_all and isinstance(value, list) and key not in ('publish_places'):
|
||||
elif key in ('isbn_10', 'isbn_13'):
|
||||
if not isinstance(value, list):
|
||||
value = [value]
|
||||
value = map(normalize_isbn, value)
|
||||
if KEYS[key] in data:
|
||||
value = data[KEYS[key]] + value
|
||||
elif isinstance(value, list) and key not in ('publish_places', 'lccn', 'oclc_numbers'):
|
||||
value = value[0]
|
||||
if key in ('isbn_10', 'isbn_13'):
|
||||
if isinstance(value, list):
|
||||
value = map(normalize_isbn, value)
|
||||
else:
|
||||
value = normalize_isbn(value)
|
||||
data[KEYS[key]] = value
|
||||
if 'classification' in data:
|
||||
value = data['classification']
|
||||
if isinstance(value, list):
|
||||
value = value[0]
|
||||
data['classification'] = get_classification(value.split('/')[0])
|
||||
return data
|
||||
|
||||
def resolve_names(objects, key='name'):
|
||||
|
|
|
@ -15,21 +15,21 @@ base_url = 'http://www.worldcat.org'
|
|||
|
||||
def get_ids(key, value):
|
||||
ids = []
|
||||
if key in ['isbn10', 'isbn13']:
|
||||
if key == 'isbn':
|
||||
url = '%s/search?qt=worldcat_org_bks&q=%s' % (base_url, value)
|
||||
html = read_url(url)
|
||||
matches = re.compile('/title.*?oclc/(\d+).*?"').findall(html)
|
||||
if matches:
|
||||
info = lookup(matches[0])
|
||||
ids.append(('oclc', matches[0]))
|
||||
for k in ['isbn10', 'isbn13']:
|
||||
if k in info and k != key:
|
||||
ids.append((k, info[k]))
|
||||
for v in info.get('isbn', []):
|
||||
if v != value:
|
||||
ids.append(('isbn', v))
|
||||
elif key == 'oclc':
|
||||
info = lookup(value)
|
||||
for k in ['isbn10', 'isbn13']:
|
||||
if k in info:
|
||||
ids.append((k, info[k]))
|
||||
if 'isbn' in info:
|
||||
for value in info['isbn']:
|
||||
ids.append(('isbn', value))
|
||||
if ids:
|
||||
logger.debug('get_ids %s %s', key, value)
|
||||
logger.debug('%s', ids)
|
||||
|
@ -37,7 +37,7 @@ def get_ids(key, value):
|
|||
|
||||
def lookup(id):
|
||||
data = {
|
||||
'oclc': id
|
||||
'oclc': [id]
|
||||
}
|
||||
url = '%s/oclc/%s' % (base_url, id)
|
||||
html = read_url(url).decode('utf-8')
|
||||
|
@ -58,9 +58,14 @@ def lookup(id):
|
|||
for isbn in data.pop('isxn').split(' '):
|
||||
isbn = normalize_isbn(isbn)
|
||||
if stdnum.isbn.is_valid(isbn):
|
||||
data['isbn%d'%len(isbn)] = isbn
|
||||
if not 'isbn' in data:
|
||||
data['isbn'] = []
|
||||
if isbn not in data['isbn']:
|
||||
data['isbn'].append(isbn)
|
||||
if 'author' in data:
|
||||
data['author'] = [data['author']]
|
||||
if 'title' in data:
|
||||
data['title'] = data['title'].replace(' : ', ': ')
|
||||
logger.debug('lookup %s => %s', id, data.keys())
|
||||
return data
|
||||
|
||||
|
|
|
@ -15,7 +15,7 @@ import websocket
|
|||
import state
|
||||
import node.server
|
||||
import oxtornado
|
||||
from item.covers import CoverHandler
|
||||
from item.icons import IconHandler
|
||||
from item.handlers import EpubHandler
|
||||
|
||||
def run():
|
||||
|
@ -34,7 +34,7 @@ def run():
|
|||
(r'/(favicon.ico)', StaticFileHandler, {'path': static_path}),
|
||||
(r'/static/(.*)', StaticFileHandler, {'path': static_path}),
|
||||
(r'/(.*)/epub/(.*)', EpubHandler, dict(app=app)),
|
||||
(r'/(.*)/cover(\d*).jpg', CoverHandler, dict(app=app)),
|
||||
(r'/(.*)/(cover|preview)(\d*).jpg', IconHandler, dict(app=app)),
|
||||
(r'/api/', oxtornado.ApiHandler, dict(app=app)),
|
||||
(r'/ws', websocket.Handler),
|
||||
(r".*", FallbackHandler, dict(fallback=tr)),
|
||||
|
|
|
@ -19,7 +19,7 @@ if not os.path.exists(config_dir):
|
|||
os.makedirs(config_dir)
|
||||
|
||||
db_path = os.path.join(config_dir, 'openmedialibrary.db')
|
||||
covers_db_path = os.path.join(config_dir, 'covers.db')
|
||||
icons_db_path = os.path.join(config_dir, 'icons.db')
|
||||
key_path = os.path.join(config_dir, 'node.key')
|
||||
ssl_cert_path = os.path.join(config_dir, 'node.ssl.crt')
|
||||
ssl_key_path = os.path.join(config_dir, 'node.ssl.key')
|
||||
|
|
|
@ -189,11 +189,10 @@ def addListItems(data):
|
|||
'''
|
||||
if data['list'] == ':':
|
||||
from item.models import Item
|
||||
user = state.user()
|
||||
for item_id in data['items']:
|
||||
i = Item.get(item_id)
|
||||
if user not in i.users:
|
||||
i.queue_download()
|
||||
i.queue_download()
|
||||
i.update()
|
||||
elif data['list']:
|
||||
l = models.List.get_or_create(data['list'])
|
||||
if l:
|
||||
|
|
10
oml/utils.py
10
oml/utils.py
|
@ -10,6 +10,8 @@ import stdnum.isbn
|
|||
import socket
|
||||
import cStringIO
|
||||
import gzip
|
||||
import time
|
||||
from datetime import datetime
|
||||
|
||||
import ox
|
||||
import ed25519
|
||||
|
@ -154,3 +156,11 @@ def remove_empty_tree(leaf):
|
|||
os.rmdir(leaf)
|
||||
else:
|
||||
break
|
||||
|
||||
utc_0 = int(time.mktime(datetime(1970, 01, 01).timetuple()))
|
||||
|
||||
def datetime2ts(dt):
|
||||
return int(time.mktime(dt.utctimetuple())) - utc_0
|
||||
|
||||
def ts2datetime(ts):
|
||||
return datetime.utcfromtimestamp(float(ts))
|
||||
|
|
Loading…
Reference in a new issue