add protocol version, reset migrations, move db

This commit is contained in:
j 2014-05-22 00:41:29 +02:00
parent e14c686dac
commit 046af0e777
32 changed files with 424 additions and 907 deletions

View file

@ -1,28 +0,0 @@
"""empty message
Revision ID: 1a7c813a17c2
Revises: 7bb11a24276
Create Date: 2014-05-14 01:41:03.495320
"""
# revision identifiers, used by Alembic.
revision = '1a7c813a17c2'
down_revision = '7bb11a24276'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('item', sa.Column('sort_asin', sa.String(length=1000), nullable=True))
op.create_index(op.f('ix_item_sort_asin'), 'item', ['sort_asin'], unique=False)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_item_sort_asin'), table_name='item')
op.drop_column('item', 'sort_asin')
### end Alembic commands ###

View file

@ -1,26 +0,0 @@
"""empty message
Revision ID: 1ead68a53597
Revises: 348720abe06e
Create Date: 2014-05-11 17:12:04.427336
"""
# revision identifiers, used by Alembic.
revision = '1ead68a53597'
down_revision = '348720abe06e'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
pass
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
pass
### end Alembic commands ###

View file

@ -1,38 +0,0 @@
"""empty message
Revision ID: 1fe914156ac0
Revises: 4480ecc50e04
Create Date: 2014-05-20 18:29:36.352416
"""
# revision identifiers, used by Alembic.
revision = '1fe914156ac0'
down_revision = '4480ecc50e04'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('item', sa.Column('sort_isbn', sa.String(length=1000), nullable=True))
op.create_index(op.f('ix_item_sort_isbn'), 'item', ['sort_isbn'], unique=False)
#op.drop_column('item', 'sort_isbn13')
#op.drop_column('item', 'sort_isbn10')
#op.drop_index('ix_item_sort_isbn10', table_name='item')
#op.drop_index('ix_item_sort_isbn13', table_name='item')
### end Alembic commands ###
pass
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_index('ix_item_sort_isbn13', 'item', ['sort_isbn13'], unique=False)
op.create_index('ix_item_sort_isbn10', 'item', ['sort_isbn10'], unique=False)
op.drop_index(op.f('ix_item_sort_isbn'), table_name='item')
op.add_column('item', sa.Column('sort_isbn10', sa.VARCHAR(length=1000), nullable=True))
op.add_column('item', sa.Column('sort_isbn13', sa.VARCHAR(length=1000), nullable=True))
op.drop_column('item', 'sort_isbn')
### end Alembic commands ###

View file

@ -1,36 +0,0 @@
"""empty message
Revision ID: 21589282102d
Revises: 2350803a5a2d
Create Date: 2014-05-13 15:47:29.747858
"""
# revision identifiers, used by Alembic.
revision = '21589282102d'
down_revision = '2350803a5a2d'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('filter',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('item_id', sa.String(length=32), nullable=True),
sa.Column('key', sa.String(length=200), nullable=True),
sa.Column('value', sa.Text(), nullable=True),
sa.Column('findvalue', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['item_id'], ['item.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_filter_key'), 'filter', ['key'], unique=False)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_filter_key'), table_name='filter')
op.drop_table('filter')
### end Alembic commands ###

View file

@ -1,26 +0,0 @@
"""empty message
Revision ID: 2350803a5a2d
Revises: 1ead68a53597
Create Date: 2014-05-13 15:43:51.840049
"""
# revision identifiers, used by Alembic.
revision = '2350803a5a2d'
down_revision = '1ead68a53597'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
pass
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
pass
### end Alembic commands ###

View file

@ -1,26 +0,0 @@
"""empty message
Revision ID: 3169519dc1e5
Revises: 1a7c813a17c2
Create Date: 2014-05-18 03:28:03.950996
"""
# revision identifiers, used by Alembic.
revision = '3169519dc1e5'
down_revision = '1a7c813a17c2'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('user', sa.Column('queued', sa.Boolean(), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('user', 'queued')
### end Alembic commands ###

View file

@ -1,214 +0,0 @@
"""empty message
Revision ID: 348720abe06e
Revises: None
Create Date: 2014-05-11 12:24:57.346130
"""
# revision identifiers, used by Alembic.
revision = '348720abe06e'
down_revision = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('user',
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('modified', sa.DateTime(), nullable=True),
sa.Column('id', sa.String(length=43), nullable=False),
sa.Column('info', sa.PickleType(), nullable=True),
sa.Column('nickname', sa.String(length=256), nullable=True),
sa.Column('pending', sa.String(length=64), nullable=True),
sa.Column('peered', sa.Boolean(), nullable=True),
sa.Column('online', sa.Boolean(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('person',
sa.Column('name', sa.String(length=1024), nullable=False),
sa.Column('sortname', sa.String(), nullable=True),
sa.Column('numberofnames', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('name')
)
op.create_table('work',
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('modified', sa.DateTime(), nullable=True),
sa.Column('id', sa.String(length=32), nullable=False),
sa.Column('meta', sa.PickleType(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('changelog',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('user_id', sa.String(length=43), nullable=True),
sa.Column('revision', sa.BigInteger(), nullable=True),
sa.Column('data', sa.Text(), nullable=True),
sa.Column('sig', sa.String(length=96), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('list',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(), nullable=True),
sa.Column('position', sa.Integer(), nullable=True),
sa.Column('type', sa.String(length=64), nullable=True),
sa.Column('query', sa.PickleType(), nullable=True),
sa.Column('user_id', sa.String(length=43), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('edition',
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('modified', sa.DateTime(), nullable=True),
sa.Column('id', sa.String(length=32), nullable=False),
sa.Column('meta', sa.PickleType(), nullable=True),
sa.Column('work_id', sa.String(length=32), nullable=True),
sa.ForeignKeyConstraint(['work_id'], ['work.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('item',
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('modified', sa.DateTime(), nullable=True),
sa.Column('id', sa.String(length=32), nullable=False),
sa.Column('info', sa.PickleType(), nullable=True),
sa.Column('meta', sa.PickleType(), nullable=True),
sa.Column('added', sa.DateTime(), nullable=True),
sa.Column('accessed', sa.DateTime(), nullable=True),
sa.Column('timesaccessed', sa.Integer(), nullable=True),
sa.Column('transferadded', sa.DateTime(), nullable=True),
sa.Column('transferprogress', sa.Float(), nullable=True),
sa.Column('edition_id', sa.String(length=32), nullable=True),
sa.Column('work_id', sa.String(length=32), nullable=True),
sa.Column('sort_title', sa.String(length=1000), nullable=True),
sa.Column('sort_author', sa.String(length=1000), nullable=True),
sa.Column('sort_language', sa.String(length=1000), nullable=True),
sa.Column('sort_publisher', sa.String(length=1000), nullable=True),
sa.Column('sort_place', sa.String(length=1000), nullable=True),
sa.Column('sort_country', sa.String(length=1000), nullable=True),
sa.Column('sort_date', sa.String(length=1000), nullable=True),
sa.Column('sort_pages', sa.BigInteger(), nullable=True),
sa.Column('sort_classification', sa.String(length=1000), nullable=True),
sa.Column('sort_id', sa.String(length=1000), nullable=True),
sa.Column('sort_isbn10', sa.String(length=1000), nullable=True),
sa.Column('sort_isbn13', sa.String(length=1000), nullable=True),
sa.Column('sort_lccn', sa.String(length=1000), nullable=True),
sa.Column('sort_olid', sa.String(length=1000), nullable=True),
sa.Column('sort_oclc', sa.String(length=1000), nullable=True),
sa.Column('sort_extension', sa.String(length=1000), nullable=True),
sa.Column('sort_size', sa.BigInteger(), nullable=True),
sa.Column('sort_created', sa.DateTime(), nullable=True),
sa.Column('sort_added', sa.DateTime(), nullable=True),
sa.Column('sort_modified', sa.DateTime(), nullable=True),
sa.Column('sort_accessed', sa.DateTime(), nullable=True),
sa.Column('sort_timesaccessed', sa.BigInteger(), nullable=True),
sa.Column('sort_mediastate', sa.String(length=1000), nullable=True),
sa.Column('sort_transferadded', sa.DateTime(), nullable=True),
sa.Column('sort_transferprogress', sa.Float(), nullable=True),
sa.Column('sort_random', sa.BigInteger(), nullable=True),
sa.ForeignKeyConstraint(['edition_id'], ['edition.id'], ),
sa.ForeignKeyConstraint(['work_id'], ['work.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_item_sort_accessed'), 'item', ['sort_accessed'], unique=False)
op.create_index(op.f('ix_item_sort_added'), 'item', ['sort_added'], unique=False)
op.create_index(op.f('ix_item_sort_author'), 'item', ['sort_author'], unique=False)
op.create_index(op.f('ix_item_sort_classification'), 'item', ['sort_classification'], unique=False)
op.create_index(op.f('ix_item_sort_country'), 'item', ['sort_country'], unique=False)
op.create_index(op.f('ix_item_sort_created'), 'item', ['sort_created'], unique=False)
op.create_index(op.f('ix_item_sort_date'), 'item', ['sort_date'], unique=False)
op.create_index(op.f('ix_item_sort_extension'), 'item', ['sort_extension'], unique=False)
op.create_index(op.f('ix_item_sort_id'), 'item', ['sort_id'], unique=False)
op.create_index(op.f('ix_item_sort_isbn10'), 'item', ['sort_isbn10'], unique=False)
op.create_index(op.f('ix_item_sort_isbn13'), 'item', ['sort_isbn13'], unique=False)
op.create_index(op.f('ix_item_sort_language'), 'item', ['sort_language'], unique=False)
op.create_index(op.f('ix_item_sort_lccn'), 'item', ['sort_lccn'], unique=False)
op.create_index(op.f('ix_item_sort_mediastate'), 'item', ['sort_mediastate'], unique=False)
op.create_index(op.f('ix_item_sort_modified'), 'item', ['sort_modified'], unique=False)
op.create_index(op.f('ix_item_sort_oclc'), 'item', ['sort_oclc'], unique=False)
op.create_index(op.f('ix_item_sort_olid'), 'item', ['sort_olid'], unique=False)
op.create_index(op.f('ix_item_sort_pages'), 'item', ['sort_pages'], unique=False)
op.create_index(op.f('ix_item_sort_place'), 'item', ['sort_place'], unique=False)
op.create_index(op.f('ix_item_sort_publisher'), 'item', ['sort_publisher'], unique=False)
op.create_index(op.f('ix_item_sort_random'), 'item', ['sort_random'], unique=False)
op.create_index(op.f('ix_item_sort_size'), 'item', ['sort_size'], unique=False)
op.create_index(op.f('ix_item_sort_timesaccessed'), 'item', ['sort_timesaccessed'], unique=False)
op.create_index(op.f('ix_item_sort_title'), 'item', ['sort_title'], unique=False)
op.create_index(op.f('ix_item_sort_transferadded'), 'item', ['sort_transferadded'], unique=False)
op.create_index(op.f('ix_item_sort_transferprogress'), 'item', ['sort_transferprogress'], unique=False)
op.create_table('useritem',
sa.Column('user_id', sa.String(length=43), nullable=True),
sa.Column('item_id', sa.String(length=32), nullable=True),
sa.ForeignKeyConstraint(['item_id'], ['item.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], )
)
op.create_table('find',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('item_id', sa.String(length=32), nullable=True),
sa.Column('key', sa.String(length=200), nullable=True),
sa.Column('value', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['item_id'], ['item.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_find_key'), 'find', ['key'], unique=False)
op.create_table('file',
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('modified', sa.DateTime(), nullable=True),
sa.Column('sha1', sa.String(length=32), nullable=False),
sa.Column('path', sa.String(length=2048), nullable=True),
sa.Column('info', sa.PickleType(), nullable=True),
sa.Column('item_id', sa.String(length=32), nullable=True),
sa.ForeignKeyConstraint(['item_id'], ['item.id'], ),
sa.PrimaryKeyConstraint('sha1')
)
op.create_table('listitem',
sa.Column('list_id', sa.Integer(), nullable=True),
sa.Column('item_id', sa.String(length=32), nullable=True),
sa.ForeignKeyConstraint(['item_id'], ['item.id'], ),
sa.ForeignKeyConstraint(['list_id'], ['list.id'], )
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('listitem')
op.drop_table('file')
op.drop_index(op.f('ix_find_key'), table_name='find')
op.drop_table('find')
op.drop_table('useritem')
op.drop_index(op.f('ix_item_sort_transferprogress'), table_name='item')
op.drop_index(op.f('ix_item_sort_transferadded'), table_name='item')
op.drop_index(op.f('ix_item_sort_title'), table_name='item')
op.drop_index(op.f('ix_item_sort_timesaccessed'), table_name='item')
op.drop_index(op.f('ix_item_sort_size'), table_name='item')
op.drop_index(op.f('ix_item_sort_random'), table_name='item')
op.drop_index(op.f('ix_item_sort_publisher'), table_name='item')
op.drop_index(op.f('ix_item_sort_place'), table_name='item')
op.drop_index(op.f('ix_item_sort_pages'), table_name='item')
op.drop_index(op.f('ix_item_sort_olid'), table_name='item')
op.drop_index(op.f('ix_item_sort_oclc'), table_name='item')
op.drop_index(op.f('ix_item_sort_modified'), table_name='item')
op.drop_index(op.f('ix_item_sort_mediastate'), table_name='item')
op.drop_index(op.f('ix_item_sort_lccn'), table_name='item')
op.drop_index(op.f('ix_item_sort_language'), table_name='item')
op.drop_index(op.f('ix_item_sort_isbn13'), table_name='item')
op.drop_index(op.f('ix_item_sort_isbn10'), table_name='item')
op.drop_index(op.f('ix_item_sort_id'), table_name='item')
op.drop_index(op.f('ix_item_sort_extension'), table_name='item')
op.drop_index(op.f('ix_item_sort_date'), table_name='item')
op.drop_index(op.f('ix_item_sort_created'), table_name='item')
op.drop_index(op.f('ix_item_sort_country'), table_name='item')
op.drop_index(op.f('ix_item_sort_classification'), table_name='item')
op.drop_index(op.f('ix_item_sort_author'), table_name='item')
op.drop_index(op.f('ix_item_sort_added'), table_name='item')
op.drop_index(op.f('ix_item_sort_accessed'), table_name='item')
op.drop_table('item')
op.drop_table('edition')
op.drop_table('list')
op.drop_table('changelog')
op.drop_table('work')
op.drop_table('person')
op.drop_table('user')
### end Alembic commands ###

View file

@ -1,44 +0,0 @@
"""empty message
Revision ID: 3822b1700859
Revises: 1fe914156ac0
Create Date: 2014-05-20 23:25:34.942115
"""
# revision identifiers, used by Alembic.
revision = '3822b1700859'
down_revision = '1fe914156ac0'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('metadata',
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('modified', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('key', sa.String(length=256), nullable=True),
sa.Column('value', sa.String(length=256), nullable=True),
sa.Column('data', sa.PickleType(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
#op.drop_column(u'item', 'sort_isbn13')
#op.drop_column(u'item', 'sort_isbn10')
#op.create_index(op.f('ix_item_sort_isbn'), 'item', ['sort_isbn'], unique=False)
#op.drop_index('ix_item_sort_isbn10', table_name='item')
#op.drop_index('ix_item_sort_isbn13', table_name='item')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_index('ix_item_sort_isbn13', 'item', ['sort_isbn13'], unique=False)
op.create_index('ix_item_sort_isbn10', 'item', ['sort_isbn10'], unique=False)
op.drop_index(op.f('ix_item_sort_isbn'), table_name='item')
op.add_column(u'item', sa.Column('sort_isbn10', sa.VARCHAR(length=1000), nullable=True))
op.add_column(u'item', sa.Column('sort_isbn13', sa.VARCHAR(length=1000), nullable=True))
op.drop_table('metadata')
### end Alembic commands ###

View file

@ -0,0 +1,218 @@
"""empty message
Revision ID: 3c8686a285f5
Revises: None
Create Date: 2014-05-21 23:43:13.065858
"""
# revision identifiers, used by Alembic.
revision = '3c8686a285f5'
down_revision = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('item',
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('modified', sa.DateTime(), nullable=True),
sa.Column('id', sa.String(length=32), nullable=False),
sa.Column('info', sa.PickleType(), nullable=True),
sa.Column('meta', sa.PickleType(), nullable=True),
sa.Column('added', sa.DateTime(), nullable=True),
sa.Column('accessed', sa.DateTime(), nullable=True),
sa.Column('timesaccessed', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('changelog',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('timestamp', sa.BigInteger(), nullable=True),
sa.Column('user_id', sa.String(length=43), nullable=True),
sa.Column('revision', sa.BigInteger(), nullable=True),
sa.Column('data', sa.Text(), nullable=True),
sa.Column('sig', sa.String(length=96), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('user',
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('modified', sa.DateTime(), nullable=True),
sa.Column('id', sa.String(length=43), nullable=False),
sa.Column('info', sa.PickleType(), nullable=True),
sa.Column('nickname', sa.String(length=256), nullable=True),
sa.Column('pending', sa.String(length=64), nullable=True),
sa.Column('queued', sa.Boolean(), nullable=True),
sa.Column('peered', sa.Boolean(), nullable=True),
sa.Column('online', sa.Boolean(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('nickname')
)
op.create_table('metadata',
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('modified', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('key', sa.String(length=256), nullable=True),
sa.Column('value', sa.String(length=256), nullable=True),
sa.Column('data', sa.PickleType(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('person',
sa.Column('name', sa.String(length=1024), nullable=False),
sa.Column('sortname', sa.String(), nullable=True),
sa.Column('numberofnames', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('name')
)
op.create_table('transfer',
sa.Column('item_id', sa.String(length=32), nullable=False),
sa.Column('added', sa.DateTime(), nullable=True),
sa.Column('progress', sa.Float(), nullable=True),
sa.ForeignKeyConstraint(['item_id'], ['item.id'], ),
sa.PrimaryKeyConstraint('item_id')
)
op.create_table('find',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('item_id', sa.String(length=32), nullable=True),
sa.Column('key', sa.String(length=200), nullable=True),
sa.Column('value', sa.Text(), nullable=True),
sa.Column('findvalue', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['item_id'], ['item.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_find_key'), 'find', ['key'], unique=False)
op.create_table('list',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(), nullable=True),
sa.Column('index_', sa.Integer(), nullable=True),
sa.Column('type', sa.String(length=64), nullable=True),
sa.Column('query', sa.PickleType(), nullable=True),
sa.Column('user_id', sa.String(length=43), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('useritem',
sa.Column('user_id', sa.String(length=43), nullable=True),
sa.Column('item_id', sa.String(length=32), nullable=True),
sa.ForeignKeyConstraint(['item_id'], ['item.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], )
)
op.create_table('sort',
sa.Column('item_id', sa.String(length=32), nullable=False),
sa.Column('title', sa.String(length=1000), nullable=True),
sa.Column('author', sa.String(length=1000), nullable=True),
sa.Column('publisher', sa.String(length=1000), nullable=True),
sa.Column('place', sa.String(length=1000), nullable=True),
sa.Column('country', sa.String(length=1000), nullable=True),
sa.Column('date', sa.String(length=1000), nullable=True),
sa.Column('language', sa.String(length=1000), nullable=True),
sa.Column('pages', sa.BigInteger(), nullable=True),
sa.Column('classification', sa.String(length=1000), nullable=True),
sa.Column('extension', sa.String(length=1000), nullable=True),
sa.Column('size', sa.BigInteger(), nullable=True),
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('added', sa.DateTime(), nullable=True),
sa.Column('modified', sa.DateTime(), nullable=True),
sa.Column('accessed', sa.DateTime(), nullable=True),
sa.Column('timesaccessed', sa.BigInteger(), nullable=True),
sa.Column('mediastate', sa.String(length=1000), nullable=True),
sa.Column('transferadded', sa.DateTime(), nullable=True),
sa.Column('transferprogress', sa.Float(), nullable=True),
sa.Column('id', sa.String(length=1000), nullable=True),
sa.Column('isbn', sa.String(length=1000), nullable=True),
sa.Column('asin', sa.String(length=1000), nullable=True),
sa.Column('lccn', sa.String(length=1000), nullable=True),
sa.Column('olid', sa.String(length=1000), nullable=True),
sa.Column('oclc', sa.String(length=1000), nullable=True),
sa.Column('random', sa.BigInteger(), nullable=True),
sa.ForeignKeyConstraint(['item_id'], ['item.id'], ),
sa.PrimaryKeyConstraint('item_id')
)
op.create_index(op.f('ix_sort_accessed'), 'sort', ['accessed'], unique=False)
op.create_index(op.f('ix_sort_added'), 'sort', ['added'], unique=False)
op.create_index(op.f('ix_sort_asin'), 'sort', ['asin'], unique=False)
op.create_index(op.f('ix_sort_author'), 'sort', ['author'], unique=False)
op.create_index(op.f('ix_sort_classification'), 'sort', ['classification'], unique=False)
op.create_index(op.f('ix_sort_country'), 'sort', ['country'], unique=False)
op.create_index(op.f('ix_sort_created'), 'sort', ['created'], unique=False)
op.create_index(op.f('ix_sort_date'), 'sort', ['date'], unique=False)
op.create_index(op.f('ix_sort_extension'), 'sort', ['extension'], unique=False)
op.create_index(op.f('ix_sort_id'), 'sort', ['id'], unique=False)
op.create_index(op.f('ix_sort_isbn'), 'sort', ['isbn'], unique=False)
op.create_index(op.f('ix_sort_language'), 'sort', ['language'], unique=False)
op.create_index(op.f('ix_sort_lccn'), 'sort', ['lccn'], unique=False)
op.create_index(op.f('ix_sort_mediastate'), 'sort', ['mediastate'], unique=False)
op.create_index(op.f('ix_sort_modified'), 'sort', ['modified'], unique=False)
op.create_index(op.f('ix_sort_oclc'), 'sort', ['oclc'], unique=False)
op.create_index(op.f('ix_sort_olid'), 'sort', ['olid'], unique=False)
op.create_index(op.f('ix_sort_pages'), 'sort', ['pages'], unique=False)
op.create_index(op.f('ix_sort_place'), 'sort', ['place'], unique=False)
op.create_index(op.f('ix_sort_publisher'), 'sort', ['publisher'], unique=False)
op.create_index(op.f('ix_sort_random'), 'sort', ['random'], unique=False)
op.create_index(op.f('ix_sort_size'), 'sort', ['size'], unique=False)
op.create_index(op.f('ix_sort_timesaccessed'), 'sort', ['timesaccessed'], unique=False)
op.create_index(op.f('ix_sort_title'), 'sort', ['title'], unique=False)
op.create_index(op.f('ix_sort_transferadded'), 'sort', ['transferadded'], unique=False)
op.create_index(op.f('ix_sort_transferprogress'), 'sort', ['transferprogress'], unique=False)
op.create_table('file',
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('modified', sa.DateTime(), nullable=True),
sa.Column('sha1', sa.String(length=32), nullable=False),
sa.Column('path', sa.String(length=2048), nullable=True),
sa.Column('info', sa.PickleType(), nullable=True),
sa.Column('item_id', sa.String(length=32), nullable=True),
sa.ForeignKeyConstraint(['item_id'], ['item.id'], ),
sa.PrimaryKeyConstraint('sha1')
)
op.create_table('listitem',
sa.Column('list_id', sa.Integer(), nullable=True),
sa.Column('item_id', sa.String(length=32), nullable=True),
sa.ForeignKeyConstraint(['item_id'], ['item.id'], ),
sa.ForeignKeyConstraint(['list_id'], ['list.id'], )
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('listitem')
op.drop_table('file')
op.drop_index(op.f('ix_sort_transferprogress'), table_name='sort')
op.drop_index(op.f('ix_sort_transferadded'), table_name='sort')
op.drop_index(op.f('ix_sort_title'), table_name='sort')
op.drop_index(op.f('ix_sort_timesaccessed'), table_name='sort')
op.drop_index(op.f('ix_sort_size'), table_name='sort')
op.drop_index(op.f('ix_sort_random'), table_name='sort')
op.drop_index(op.f('ix_sort_publisher'), table_name='sort')
op.drop_index(op.f('ix_sort_place'), table_name='sort')
op.drop_index(op.f('ix_sort_pages'), table_name='sort')
op.drop_index(op.f('ix_sort_olid'), table_name='sort')
op.drop_index(op.f('ix_sort_oclc'), table_name='sort')
op.drop_index(op.f('ix_sort_modified'), table_name='sort')
op.drop_index(op.f('ix_sort_mediastate'), table_name='sort')
op.drop_index(op.f('ix_sort_lccn'), table_name='sort')
op.drop_index(op.f('ix_sort_language'), table_name='sort')
op.drop_index(op.f('ix_sort_isbn'), table_name='sort')
op.drop_index(op.f('ix_sort_id'), table_name='sort')
op.drop_index(op.f('ix_sort_extension'), table_name='sort')
op.drop_index(op.f('ix_sort_date'), table_name='sort')
op.drop_index(op.f('ix_sort_created'), table_name='sort')
op.drop_index(op.f('ix_sort_country'), table_name='sort')
op.drop_index(op.f('ix_sort_classification'), table_name='sort')
op.drop_index(op.f('ix_sort_author'), table_name='sort')
op.drop_index(op.f('ix_sort_asin'), table_name='sort')
op.drop_index(op.f('ix_sort_added'), table_name='sort')
op.drop_index(op.f('ix_sort_accessed'), table_name='sort')
op.drop_table('sort')
op.drop_table('useritem')
op.drop_table('list')
op.drop_index(op.f('ix_find_key'), table_name='find')
op.drop_table('find')
op.drop_table('transfer')
op.drop_table('person')
op.drop_table('metadata')
op.drop_table('user')
op.drop_table('changelog')
op.drop_table('item')
### end Alembic commands ###

View file

@ -1,35 +0,0 @@
"""empty message
Revision ID: 3ea9f03f386f
Revises: 3822b1700859
Create Date: 2014-05-20 23:37:03.959948
"""
# revision identifiers, used by Alembic.
revision = '3ea9f03f386f'
down_revision = '3822b1700859'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('metadata')
op.create_table('metadata',
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('modified', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('key', sa.String(length=256), nullable=True),
sa.Column('value', sa.String(length=256), nullable=True),
sa.Column('data', sa.PickleType(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
def downgrade():
pass
### commands auto generated by Alembic - please adjust! ###
### end Alembic commands ###

View file

@ -1,26 +0,0 @@
"""empty message
Revision ID: 4480ecc50e04
Revises: 3169519dc1e5
Create Date: 2014-05-20 02:20:20.283739
"""
# revision identifiers, used by Alembic.
revision = '4480ecc50e04'
down_revision = '3169519dc1e5'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('changelog', sa.Column('timestamp', sa.BigInteger(), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('changelog', 'timestamp')
### end Alembic commands ###

View file

@ -1,36 +0,0 @@
"""empty message
Revision ID: 7bb11a24276
Revises: 21589282102d
Create Date: 2014-05-13 18:28:46.214059
"""
# revision identifiers, used by Alembic.
revision = '7bb11a24276'
down_revision = '21589282102d'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('filter')
op.add_column('find', sa.Column('findvalue', sa.Text(), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('find', 'findvalue')
op.create_table('filter',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('item_id', sa.VARCHAR(length=32), nullable=True),
sa.Column('key', sa.VARCHAR(length=200), nullable=True),
sa.Column('value', sa.TEXT(), nullable=True),
sa.Column('findvalue', sa.TEXT(), nullable=True),
sa.ForeignKeyConstraint(['item_id'], [u'item.id'], ),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###

View file

@ -8,7 +8,6 @@ from flask.ext.migrate import Migrate, MigrateCommand
import logging import logging
import oxflask.api
import settings import settings
from settings import db from settings import db
@ -31,7 +30,6 @@ logging.basicConfig(level=logging.DEBUG)
app = Flask('openmedialibrary', static_folder=settings.static_path) app = Flask('openmedialibrary', static_folder=settings.static_path)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:////%s' % settings.db_path app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:////%s' % settings.db_path
app.register_blueprint(oxflask.api.app)
app.register_blueprint(item.views.app) app.register_blueprint(item.views.app)
db.init_app(app) db.init_app(app)

View file

@ -81,24 +81,9 @@ class PostUpdate(Command):
] ]
def run(selfi, old, new): def run(selfi, old, new):
if old <= '20140506-2-796c77b' and new > '20140506-2-796c77b': if old <= '20140521-65-e14c686' and new > '20140521-65-e14c686':
print 'migrate database content' if not os.path.exists(settings.db_path):
import item.models r('./ctl', 'setup')
for i in item.models.Item.query:
if 'mainid' in i.meta:
mainid = i.meta.pop('mainid')
pid = {'isbn10': 'isbn', 'isbn13': 'isbn'}.get(mainid, mainid)
i.meta['primaryid'] = [pid, i.meta[mainid]]
isbns = i.meta.get('isbn', [])
for key in ('isbn10', 'isbn13'):
if key in i.meta:
isbns.append(i.meta.pop(key))
if isbns:
i.meta['isbn'] = isbns
for key in ('asin', 'lccn', 'olid', 'oclc'):
if key in i.meta and isinstance(i.meta[key], basestring):
i.meta[key] = [i.meta[key]]
i.update()
class Setup(Command): class Setup(Command):
""" """
@ -137,7 +122,7 @@ class Release(Command):
os.chdir(root_dir) os.chdir(root_dir)
with open(os.path.expanduser('~/Private/openmedialibrary_release.key')) as fd: with open(os.path.expanduser('~/.openmedialibrary_release.key')) as fd:
SIG_KEY=ed25519.SigningKey(fd.read()) SIG_KEY=ed25519.SigningKey(fd.read())
SIG_ENCODING='base64' SIG_ENCODING='base64'

View file

@ -21,13 +21,13 @@ class Downloads(Thread):
def download_next(self): def download_next(self):
import item.models import item.models
for i in item.models.Item.query.filter( for t in item.models.Transfer.query.filter(
item.models.Item.transferadded!=None).filter( item.models.Transfer.added!=None,
item.models.Item.transferprogress<1).order_by(item.models.Item.transferadded): item.models.Transfer.progress<1).order_by(item.models.Transfer.added):
for u in i.users: for u in t.item.users:
if state.nodes.check_online(u.id): if state.nodes.is_online(u.id):
logger.debug('DOWNLOAD %s %s', i, u) logger.debug('DOWNLOAD %s %s', t.item, u)
r = state.nodes.download(u.id, i) r = state.nodes.download(u.id, t.item)
logger.debug('download ok? %s', r) logger.debug('download ok? %s', r)
return True return True
return False return False

View file

@ -84,7 +84,7 @@ def find(data):
#from sqlalchemy.sql import func #from sqlalchemy.sql import func
#models.db.session.query(func.sum(models.Item.sort_size).label("size")) #models.db.session.query(func.sum(models.Item.sort_size).label("size"))
#response['size'] = x.scalar() #response['size'] = x.scalar()
response['size'] = sum([i.sort_size or 0 for i in q['qs'].options(load_only('id', 'sort_size'))]) response['size'] = sum([i.info.get('size', 0) for i in q['qs'].join(models.Sort).options(load_only('id', 'info'))])
return response return response
actions.register(find) actions.register(find)
@ -225,8 +225,10 @@ def cancelDownloads(data):
ids = data['ids'] ids = data['ids']
if ids: if ids:
for item in models.Item.query.filter(models.Item.id.in_(ids)): for item in models.Item.query.filter(models.Item.id.in_(ids)):
item.transferprogress = None t = models.Transfer.get(item.id)
item.transferadded = None t.progress = None
t.added = None
t.save()
p = state.user() p = state.user()
if p in item.users: if p in item.users:
item.users.remove(p) item.users.remove(p)

View file

@ -83,7 +83,8 @@ class ReaderHandler(OMLHandler):
self.set_status(404) self.set_status(404)
self.finish() self.finish()
return return
item.sort_accessed = item.accessed = datetime.utcnow() item.accessed = datetime.utcnow()
item.sort_timesaccessed = item.timesaccessed = (item.timesaccessed or 0) + 1 item.timesaccessed = (item.timesaccessed or 0) + 1
item.update_sort()
item.save() item.save()
return serve_static(self, os.path.join(settings.static_path, html), 'text/html') return serve_static(self, os.path.join(settings.static_path, html), 'text/html')

View file

@ -15,6 +15,8 @@ import logging
import Image import Image
import ox import ox
from db import MutableDict
import settings import settings
from settings import db, config from settings import db, config
@ -26,7 +28,6 @@ import meta
import state import state
import utils import utils
from oxflask.db import MutableDict
from icons import icons from icons import icons
from changelog import Changelog from changelog import Changelog
@ -35,42 +36,6 @@ from utils import remove_empty_folders
logger = logging.getLogger('oml.item.model') logger = logging.getLogger('oml.item.model')
class Work(db.Model):
created = db.Column(db.DateTime())
modified = db.Column(db.DateTime())
id = db.Column(db.String(32), primary_key=True)
meta = db.Column(MutableDict.as_mutable(db.PickleType(pickler=json)))
def __repr__(self):
return self.id
def __init__(self, id):
self.id = id
self.created = datetime.utcnow()
self.modified = datetime.utcnow()
class Edition(db.Model):
created = db.Column(db.DateTime())
modified = db.Column(db.DateTime())
id = db.Column(db.String(32), primary_key=True)
meta = db.Column(MutableDict.as_mutable(db.PickleType(pickler=json)))
work_id = db.Column(db.String(32), db.ForeignKey('work.id'))
work = db.relationship('Work', backref=db.backref('editions', lazy='dynamic'))
def __repr__(self):
return self.id
def __init__(self, id):
self.id = id
self.created = datetime.utcnow()
self.modified = datetime.utcnow()
user_items = db.Table('useritem', user_items = db.Table('useritem',
db.Column('user_id', db.String(43), db.ForeignKey('user.id')), db.Column('user_id', db.String(43), db.ForeignKey('user.id')),
@ -87,22 +52,14 @@ class Item(db.Model):
info = db.Column(MutableDict.as_mutable(db.PickleType(pickler=json))) info = db.Column(MutableDict.as_mutable(db.PickleType(pickler=json)))
meta = db.Column(MutableDict.as_mutable(db.PickleType(pickler=json))) meta = db.Column(MutableDict.as_mutable(db.PickleType(pickler=json)))
# why is this in db and not in i.e. info?
added = db.Column(db.DateTime()) # added to local library added = db.Column(db.DateTime()) # added to local library
accessed = db.Column(db.DateTime()) accessed = db.Column(db.DateTime())
timesaccessed = db.Column(db.Integer()) timesaccessed = db.Column(db.Integer())
transferadded = db.Column(db.DateTime())
transferprogress = db.Column(db.Float())
users = db.relationship('User', secondary=user_items, users = db.relationship('User', secondary=user_items,
backref=db.backref('items', lazy='dynamic')) backref=db.backref('items', lazy='dynamic'))
edition_id = db.Column(db.String(32), db.ForeignKey('edition.id'))
edition = db.relationship('Edition', backref=db.backref('items', lazy='dynamic'))
work_id = db.Column(db.String(32), db.ForeignKey('work.id'))
work = db.relationship('Work', backref=db.backref('items', lazy='dynamic'))
@property @property
def timestamp(self): def timestamp(self):
return utils.datetime2ts(self.modified) return utils.datetime2ts(self.modified)
@ -146,8 +103,10 @@ class Item(db.Model):
j['timesaccessed'] = self.timesaccessed j['timesaccessed'] = self.timesaccessed
j['accessed'] = self.accessed j['accessed'] = self.accessed
j['added'] = self.added j['added'] = self.added
j['transferadded'] = self.transferadded t = Transfer.get(self.id)
j['transferprogress'] = self.transferprogress if t:
j['transferadded'] = t.added
j['transferprogress'] = t.progress
j['users'] = map(str, list(self.users)) j['users'] = map(str, list(self.users))
if self.info: if self.info:
@ -158,13 +117,6 @@ class Item(db.Model):
for key in self.id_keys + ['primaryid']: for key in self.id_keys + ['primaryid']:
if key not in self.meta and key in j: if key not in self.meta and key in j:
del j[key] del j[key]
'''
if self.work_id:
j['work'] = {
'olid': self.work_id
}
j['work'].update(self.work.meta)
'''
if keys: if keys:
for k in j.keys(): for k in j.keys():
if k not in keys: if k not in keys:
@ -176,6 +128,7 @@ class Item(db.Model):
return f.fullpath() if f else None return f.fullpath() if f else None
def update_sort(self): def update_sort(self):
s = Sort.get_or_create(self.id)
for key in config['itemKeys']: for key in config['itemKeys']:
if key.get('sort'): if key.get('sort'):
value = self.json().get(key['id'], None) value = self.json().get(key['id'], None)
@ -202,7 +155,8 @@ class Item(db.Model):
value = ox.sort_string(value).lower() value = ox.sort_string(value).lower()
elif isinstance(value, list): #empty list elif isinstance(value, list): #empty list
value = '' value = ''
setattr(self, 'sort_%s' % key['id'], value) setattr(s, key['id'], value)
db.session.add(s)
def update_find(self): def update_find(self):
@ -255,11 +209,11 @@ class Item(db.Model):
del self.meta[key] del self.meta[key]
users = map(str, list(self.users)) users = map(str, list(self.users))
self.info['mediastate'] = 'available' # available, unavailable, transferring self.info['mediastate'] = 'available' # available, unavailable, transferring
if self.transferadded and self.transferprogress < 1: t = Transfer.get(self.id)
if t and t.added and t.progress < 1:
self.info['mediastate'] = 'transferring' self.info['mediastate'] = 'transferring'
else: else:
self.info['mediastate'] = 'available' if settings.USER_ID in users else 'unavailable' self.info['mediastate'] = 'available' if settings.USER_ID in users else 'unavailable'
#fixme: also load metadata for other ids?
if 'primaryid' in self.meta: if 'primaryid' in self.meta:
self.meta.update(Metadata.load(*self.meta['primaryid'])) self.meta.update(Metadata.load(*self.meta['primaryid']))
self.update_sort() self.update_sort()
@ -382,13 +336,12 @@ class Item(db.Model):
def queue_download(self): def queue_download(self):
u = state.user() u = state.user()
t = Transfer.get_or_create(self.id)
if not u in self.users: if not u in self.users:
logger.debug('queue %s for download', self.id) logger.debug('queue %s for download', self.id)
self.transferprogress = 0
self.transferadded = datetime.utcnow()
self.users.append(u) self.users.append(u)
else: else:
logger.debug('%s already queued for download? %s %s', self.id, self.transferprogress, self.transferadded) logger.debug('%s already queued for download? %s %s', self.id, t.progress, t.added)
def save_file(self, content): def save_file(self, content):
u = state.user() u = state.user()
@ -407,7 +360,9 @@ class Item(db.Model):
fd.write(content) fd.write(content)
if u not in self.users: if u not in self.users:
self.users.append(u) self.users.append(u)
self.transferprogress = 1 t = Transfer.get_or_create(self.id)
t.progress = 1
t.save()
self.added = datetime.utcnow() self.added = datetime.utcnow()
Changelog.record(u, 'additem', self.id, self.info) Changelog.record(u, 'additem', self.id, self.info)
self.update() self.update()
@ -419,7 +374,9 @@ class Item(db.Model):
return True return True
else: else:
logger.debug('TRIED TO SAVE EXISTING FILE!!!') logger.debug('TRIED TO SAVE EXISTING FILE!!!')
self.transferprogress = 1 t = Transfer.get_or_create(self.id)
t.progress = 1
t.save()
self.update() self.update()
return False return False
@ -443,6 +400,26 @@ class Item(db.Model):
self.update() self.update()
Changelog.record(user, 'removeitem', self.id) Changelog.record(user, 'removeitem', self.id)
class Sort(db.Model):
item_id = db.Column(db.String(32), db.ForeignKey('item.id'), primary_key=True)
item = db.relationship('Item', backref=db.backref('sort', lazy='dynamic'))
def __repr__(self):
return '%s_sort' % self.item_id
@classmethod
def get(cls, item_id):
return cls.query.filter_by(item_id=item_id).first()
@classmethod
def get_or_create(cls, item_id):
f = cls.get(item_id)
if not f:
f = cls(item_id=item_id)
db.session.add(f)
db.session.commit()
return f
for key in config['itemKeys']: for key in config['itemKeys']:
if key.get('sort'): if key.get('sort'):
sort_type = key.get('sortType', key['type']) sort_type = key.get('sortType', key['type'])
@ -454,7 +431,7 @@ for key in config['itemKeys']:
col = db.Column(db.DateTime(), index=True) col = db.Column(db.DateTime(), index=True)
else: else:
col = db.Column(db.String(1000), index=True) col = db.Column(db.String(1000), index=True)
setattr(Item, 'sort_%s' % key['id'], col) setattr(Sort, '%s' % key['id'], col)
Item.id_keys = ['isbn', 'lccn', 'olid', 'oclc', 'asin'] Item.id_keys = ['isbn', 'lccn', 'olid', 'oclc', 'asin']
Item.item_keys = config['itemKeys'] Item.item_keys = config['itemKeys']
@ -570,6 +547,36 @@ class File(db.Model):
db.session.add(self) db.session.add(self)
db.session.commit() db.session.commit()
class Transfer(db.Model):
item_id = db.Column(db.String(32), db.ForeignKey('item.id'), primary_key=True)
item = db.relationship('Item', backref=db.backref('transfer', lazy='dynamic'))
added = db.Column(db.DateTime())
progress = db.Column(db.Float())
def __repr__(self):
return '='.join(map(str, [self.item_id, self.progress]))
@classmethod
def get(cls, item_id):
return cls.query.filter_by(item_id=item_id).first()
@classmethod
def get_or_create(cls, item_id):
t = cls.get(item_id)
if not t:
t = cls(item_id=item_id)
t.added = datetime.utcnow()
t.progress = 0
t.save()
return t
def save(self):
db.session.add(self)
db.session.commit()
class Metadata(db.Model): class Metadata(db.Model):
created = db.Column(db.DateTime()) created = db.Column(db.DateTime())

View file

@ -5,7 +5,8 @@ from __future__ import division
import settings import settings
import models import models
import utils import utils
import oxflask.query from queryparser import Parser
from sqlalchemy.sql.expression import nullslast from sqlalchemy.sql.expression import nullslast
@ -18,12 +19,12 @@ def parse(data):
if key in data: if key in data:
query[key] = data[key] query[key] = data[key]
#print data #print data
query['qs'] = oxflask.query.Parser(models.Item).find(data) query['qs'] = Parser(models.Item).find(data)
if not 'group' in query: if not 'group' in query:
query['qs'] = order(query['qs'], query['sort']) query['qs'] = order(query['qs'], query['sort'])
return query return query
def order(qs, sort, prefix='sort_'): def order(qs, sort, prefix='sort.'):
order_by = [] order_by = []
if len(sort) == 1: if len(sort) == 1:
additional_sort = settings.config['user']['ui']['listSort'] additional_sort = settings.config['user']['ui']['listSort']
@ -51,5 +52,5 @@ def order(qs, sort, prefix='sort_'):
_order_by.append(nulls) _order_by.append(nulls)
_order_by.append(order) _order_by.append(order)
order_by = _order_by order_by = _order_by
qs = qs.order_by(*order_by) qs = qs.join(models.Sort).order_by(*order_by)
return qs return qs

View file

@ -39,7 +39,8 @@ def reader(id, filename=''):
html = 'html/txt.html' html = 'html/txt.html'
else: else:
abort(404) abort(404)
item.sort_accessed = item.accessed = datetime.utcnow() item.accessed = datetime.utcnow()
item.sort_timesaccessed = item.timesaccessed = (item.timesaccessed or 0) + 1 item.timesaccessed = (item.timesaccessed or 0) + 1
item.update_sort()
item.save() item.save()
return app.send_static_file(html) return app.send_static_file(html)

View file

@ -3,7 +3,7 @@
def get_classification(id): def get_classification(id):
name = u'%s' % id name = u'%s' % id
base = str(int(id.split('/')[0].split('.')[0])) base = ''.join([s for s in id.split('/')[0].split('.')[0] if s.isdigit()])
if base in DEWEY: if base in DEWEY:
name = u'%s %s' % (name, DEWEY[base].decode('utf-8')) name = u'%s %s' % (name, DEWEY[base].decode('utf-8'))
return name return name

View file

@ -18,6 +18,7 @@ from utils import valid, get_public_ipv6
import nodeapi import nodeapi
import cert import cert
from websocket import trigger_event from websocket import trigger_event
from oxtornado import run_async
import logging import logging
logger = logging.getLogger('oml.node.server') logger = logging.getLogger('oml.node.server')
@ -28,58 +29,74 @@ class NodeHandler(tornado.web.RequestHandler):
self.app = app self.app = app
@tornado.web.asynchronous
@tornado.gen.coroutine
def post(self): def post(self):
request = self.request '''
if request.method == 'POST': API
''' pullChanges [userid] from [to]
API pushChanges [index, change]
pullChanges [userid] from [to] requestPeering username message
pushChanges [index, change] acceptPeering username message
requestPeering username message rejectPeering message
acceptPeering username message removePeering message
rejectPeering message
removePeering message
ping responds public ip ping responds public ip
''' '''
key = str(request.headers['X-Ed25519-Key']) key = str(self.request.headers['X-Ed25519-Key'])
sig = str(request.headers['X-Ed25519-Signature']) sig = str(self.request.headers['X-Ed25519-Signature'])
data = request.body data = self.request.body
content = {} content = {}
self.set_header('X-Node-Protocol', settings.NODE_PROTOCOL)
if self.request.headers.get('X-Node-Protocol', None) > settings.NODE_PROTOCOL:
state.update_required = True
if self.request.headers.get('X-Node-Protocol', None) != settings.NODE_PROTOCOL:
content = settings.release
else:
if valid(key, data, sig): if valid(key, data, sig):
action, args = json.loads(data) action, args = json.loads(data)
logger.debug('NODE action %s %s (%s)', action, args, key) logger.debug('NODE action %s %s (%s)', action, args, key)
if action == 'ping': if action == 'ping':
content = { content = {
'ip': request.remote_addr 'ip': self.request.remote_addr
} }
else: else:
with self.app.app_context(): content = yield tornado.gen.Task(api_call, self.app, action, key, args)
u = user.models.User.get(key) if content is None:
if action in ( content = {'status': 'not peered'}
'requestPeering', 'acceptPeering', 'rejectPeering', 'removePeering' logger.debug('PEER %s IS UNKNOWN SEND 403', key)
) or (u and u.peered): self.set_status(403)
content = getattr(nodeapi, 'api_' + action)(self.app, key, *args) content = json.dumps(content)
else: sig = settings.sk.sign(content, encoding='base64')
if u and u.pending: self.set_header('X-Ed25519-Signature', sig)
logger.debug('ignore request from pending peer[%s] %s (%s)', key, action, args) self.set_header('X-Node-Protocol', settings.NODE_PROTOCOL)
content = {} self.write(content)
else: self.finish()
logger.debug('PEER %s IS UNKNOWN SEND 403', key)
self.set_status(403)
content = {
'status': 'not peered'
}
content = json.dumps(content)
sig = settings.sk.sign(content, encoding='base64')
self.set_header('X-Ed25519-Signature', sig)
self.write(content)
self.finish()
def get(self): def get(self):
self.set_header('X-Node-Protocol', settings.NODE_PROTOCOL)
if self.request.headers.get('X-Node-Protocol', None) > settings.NODE_PROTOCOL:
state.update_required = True
self.write('Open Media Library') self.write('Open Media Library')
self.finish() self.finish()
@run_async
def api_call(app, action, key, args, callback):
with app.app_context():
u = user.models.User.get(key)
if action in (
'requestPeering', 'acceptPeering', 'rejectPeering', 'removePeering'
) or (u and u.peered):
content = getattr(nodeapi, 'api_' + action)(app, key, *args)
else:
if u and u.pending:
logger.debug('ignore request from pending peer[%s] %s (%s)', key, action, args)
content = {}
else:
content = None
callback(content)
class ShareHandler(tornado.web.RequestHandler): class ShareHandler(tornado.web.RequestHandler):
def initialize(self, app): def initialize(self, app):
@ -149,7 +166,7 @@ def check_nodes(app):
if state.online: if state.online:
with app.app_context(): with app.app_context():
for u in user.models.User.query.filter_by(queued=True): for u in user.models.User.query.filter_by(queued=True):
if not state.nodes.check_online(u.id): if not state.nodes.is_online(u.id):
logger.debug('queued peering message for %s trying to connect...', u.id) logger.debug('queued peering message for %s trying to connect...', u.id)
state.nodes.queue('add', u.id) state.nodes.queue('add', u.id)

View file

@ -25,6 +25,7 @@ import directory
from websocket import trigger_event from websocket import trigger_event
from localnodes import LocalNodes from localnodes import LocalNodes
from ssl_request import get_opener from ssl_request import get_opener
import state
import logging import logging
logger = logging.getLogger('oml.nodes') logger = logging.getLogger('oml.nodes')
@ -124,6 +125,7 @@ class Node(Thread):
sig = settings.sk.sign(content, encoding=ENCODING) sig = settings.sk.sign(content, encoding=ENCODING)
headers = { headers = {
'User-Agent': settings.USER_AGENT, 'User-Agent': settings.USER_AGENT,
'X-Node-Protocol': settings.NODE_PROTOCOL,
'Accept': 'text/plain', 'Accept': 'text/plain',
'Accept-Encoding': 'gzip', 'Accept-Encoding': 'gzip',
'Content-Type': 'application/json', 'Content-Type': 'application/json',
@ -154,6 +156,15 @@ class Node(Thread):
data = r.read() data = r.read()
if r.headers.get('content-encoding', None) == 'gzip': if r.headers.get('content-encoding', None) == 'gzip':
data = gzip.GzipFile(fileobj=StringIO(data)).read() data = gzip.GzipFile(fileobj=StringIO(data)).read()
version = r.headers.get('X-Node-Protocol', None)
if version != settings.NODE_PROTOCOL:
logger.debug('version does not match local: %s remote %s', settings.NODE_PROTOCOL, version)
self.online = False
if version > settings.NODE_PROTOCOL:
state.update_required = True
return None
sig = r.headers.get('X-Ed25519-Signature') sig = r.headers.get('X-Ed25519-Signature')
if sig and self._valid(data, sig): if sig and self._valid(data, sig):
response = json.loads(data) response = json.loads(data)
@ -177,7 +188,17 @@ class Node(Thread):
def can_connect(self): def can_connect(self):
try: try:
logger.debug('try to connect to %s', self.url) logger.debug('try to connect to %s', self.url)
headers = {
'User-Agent': settings.USER_AGENT,
'X-Node-Protocol': settings.NODE_PROTOCOL,
'Accept-Encoding': 'gzip',
}
self._opener.addheaders = zip(headers.keys(), headers.values())
r = self._opener.open(self.url, timeout=1) r = self._opener.open(self.url, timeout=1)
version = r.headers.get('X-Node-Protocol', None)
if version != settings.NODE_PROTOCOL:
logger.debug('version does not match local: %s remote %s', settings.NODE_PROTOCOL, version)
return False
c = r.read() c = r.read()
logger.debug('ok') logger.debug('ok')
return True return True
@ -261,6 +282,7 @@ class Node(Thread):
return True return True
def download(self, item): def download(self, item):
from item.models import Transfer
url = '%s/get/%s' % (self.url, item.id) url = '%s/get/%s' % (self.url, item.id)
headers = { headers = {
'User-Agent': settings.USER_AGENT, 'User-Agent': settings.USER_AGENT,
@ -281,11 +303,12 @@ class Node(Thread):
''' '''
content = '' content = ''
for chunk in iter(lambda: r.read(1024*1024), ''): for chunk in iter(lambda: r.read(1024*1024), ''):
t = Transfer.get(item.id)
content += chunk content += chunk
item.transferprogress = len(content) / item.info['size'] t.progress = len(content) / item.info['size']
item.save() t.save()
trigger_event('transfer', { trigger_event('transfer', {
'id': item.id, 'progress': item.transferprogress 'id': item.id, 'progress': t.progress
}) })
''' '''
content = r.read() content = r.read()
@ -337,7 +360,7 @@ class Nodes(Thread):
def queue(self, *args): def queue(self, *args):
self._q.put(list(args)) self._q.put(list(args))
def check_online(self, id): def is_online(self, id):
return id in self._nodes and self._nodes[id].online return id in self._nodes and self._nodes[id].online
def download(self, id, item): def download(self, id, item):

View file

@ -1,163 +0,0 @@
# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
from __future__ import division, with_statement
import inspect
import sys
import json
from flask import request, Blueprint
from .shortcuts import render_to_json_response, json_response
import logging
logger = logging.getLogger('oxflask.api')
app = Blueprint('oxflask', __name__)
@app.route('/api/', methods=['POST', 'OPTIONS'])
def api():
if request.host not in request.headers['origin']:
logger.debug('reject cross site attempt to access api %s', request)
return ''
if request.method == "OPTIONS":
response = render_to_json_response({'status': {'code': 200, 'text': 'use POST'}})
#response.headers['Access-Control-Allow-Origin'] = '*'
return response
if not 'action' in request.form:
methods = actions.keys()
api = []
for f in sorted(methods):
api.append({'name': f,
'doc': actions.doc(f).replace('\n', '<br>\n')})
return render_to_json_response(api)
action = request.form['action']
logger.debug('API %s', action)
f = actions.get(action)
if f:
response = f(request)
else:
response = render_to_json_response(json_response(status=400,
text='Unknown action %s' % action))
#response.headers['Access-Control-Allow-Origin'] = '*'
return response
def trim(docstring):
if not docstring:
return ''
# Convert tabs to spaces (following the normal Python rules)
# and split into a list of lines:
lines = docstring.expandtabs().splitlines()
# Determine minimum indentation (first line doesn't count):
indent = sys.maxint
for line in lines[1:]:
stripped = line.lstrip()
if stripped:
indent = min(indent, len(line) - len(stripped))
# Remove indentation (first line is special):
trimmed = [lines[0].strip()]
if indent < sys.maxint:
for line in lines[1:]:
trimmed.append(line[indent:].rstrip())
# Strip off trailing and leading blank lines:
while trimmed and not trimmed[-1]:
trimmed.pop()
while trimmed and not trimmed[0]:
trimmed.pop(0)
# Return a single string:
return '\n'.join(trimmed)
class ApiActions(dict):
properties = {}
versions = {}
def __init__(self):
def api(request):
'''
returns list of all known api actions
param data {
docs: bool
}
if docs is true, action properties contain docstrings
return {
status: {'code': int, 'text': string},
data: {
actions: {
'api': {
cache: true,
doc: 'recursion'
},
'hello': {
cache: true,
..
}
...
}
}
}
'''
data = json.loads(request.form.get('data', '{}'))
docs = data.get('docs', False)
code = data.get('code', False)
version = getattr(request, 'version', None)
if version:
_actions = self.versions.get(version, {}).keys()
_actions = list(set(_actions + self.keys()))
else:
_actions = self.keys()
_actions.sort()
actions = {}
for a in _actions:
actions[a] = self.properties[a]
if docs:
actions[a]['doc'] = self.doc(a, version)
if code:
actions[a]['code'] = self.code(a, version)
response = json_response({'actions': actions})
return render_to_json_response(response)
self.register(api)
def doc(self, name, version=None):
if version:
f = self.versions[version].get(name, self.get(name))
else:
f = self[name]
return trim(f.__doc__)
def code(self, name, version=None):
if version:
f = self.versions[version].get(name, self.get(name))
else:
f = self[name]
if name != 'api' and hasattr(f, 'func_closure') and f.func_closure:
fc = filter(lambda c: hasattr(c.cell_contents, '__call__'), f.func_closure)
f = fc[len(fc)-1].cell_contents
info = f.func_code.co_filename
info = u'%s:%s' % (info, f.func_code.co_firstlineno)
return info, trim(inspect.getsource(f))
def register(self, method, action=None, cache=True, version=None):
if not action:
action = method.func_name
if version:
if not version in self.versions:
self.versions[version] = {}
self.versions[version][action] = method
else:
self[action] = method
self.properties[action] = {'cache': cache}
def unregister(self, action):
if action in self:
del self[action]
actions = ApiActions()
def error(request):
'''
this action is used to test api error codes, it should return a 503 error
'''
success = error_is_success
return render_to_json_response({})
actions.register(error)

View file

@ -1,34 +0,0 @@
from functools import wraps
import datetime
import json
from flask import Response
def json_response(data=None, status=200, text='ok'):
if not data:
data = {}
return {'status': {'code': status, 'text': text}, 'data': data}
def _to_json(python_object):
if isinstance(python_object, datetime.datetime):
if python_object.year < 1900:
tt = python_object.timetuple()
return '%d-%02d-%02dT%02d:%02d%02dZ' % tuple(list(tt)[:6])
return python_object.strftime('%Y-%m-%dT%H:%M:%SZ')
raise TypeError(u'%s %s is not JSON serializable' % (repr(python_object), type(python_object)))
def json_dumps(obj):
indent = 2
return json.dumps(obj, indent=indent, default=_to_json, ensure_ascii=False).encode('utf-8')
def render_to_json_response(obj, content_type="text/json", status=200):
resp = Response(json_dumps(obj), status=status, content_type=content_type)
return resp
def returns_json(f):
@wraps(f)
def decorated_function(*args, **kwargs):
r = f(*args, **kwargs)
return render_to_json_response(json_response(r))
return decorated_function

View file

@ -1,8 +0,0 @@
def get_by_key(objects, key, value):
obj = filter(lambda o: o.get(key) == value, objects)
return obj and obj[0] or None
def get_by_id(objects, id):
return get_by_key(objects, 'id', id)

View file

@ -1,17 +1,16 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4 # vi:si:et:sw=4:sts=4:ts=4
from sqlalchemy.sql.expression import and_, not_, or_, ClauseElement
from datetime import datetime from datetime import datetime
import unicodedata import unicodedata
from sqlalchemy.sql import operators, extract from sqlalchemy.sql import operators
from sqlalchemy.orm import load_only from sqlalchemy.orm import load_only
import utils import utils
import settings import settings
import logging import logging
logger = logging.getLogger('oxflask.query') logger = logging.getLogger('queryparser')
def get_operator(op, type='str'): def get_operator(op, type='str'):
return { return {
@ -115,7 +114,8 @@ class Parser(object):
nickname, name = v.split(':', 1) nickname, name = v.split(':', 1)
if nickname: if nickname:
p = self._user.query.filter_by(nickname=nickname).first() p = self._user.query.filter_by(nickname=nickname).first()
v = '%s:%s' % (p.id, name) if p:
v = '%s:%s' % (p.id, name)
else: else:
p = self._user.query.filter_by(id=settings.USER_ID).first() p = self._user.query.filter_by(id=settings.USER_ID).first()
v = ':%s' % name v = ':%s' % name

View file

@ -18,7 +18,7 @@ config_dir = os.path.normpath(os.path.join(base_dir, '..', 'config'))
if not os.path.exists(config_dir): if not os.path.exists(config_dir):
os.makedirs(config_dir) os.makedirs(config_dir)
db_path = os.path.join(config_dir, 'openmedialibrary.db') db_path = os.path.join(config_dir, 'data.db')
icons_db_path = os.path.join(config_dir, 'icons.db') icons_db_path = os.path.join(config_dir, 'icons.db')
key_path = os.path.join(config_dir, 'node.key') key_path = os.path.join(config_dir, 'node.key')
ssl_cert_path = os.path.join(config_dir, 'node.ssl.crt') ssl_cert_path = os.path.join(config_dir, 'node.ssl.crt')
@ -67,8 +67,12 @@ else:
USER_ID = vk.to_ascii(encoding='base64') USER_ID = vk.to_ascii(encoding='base64')
if 'modules' in release and 'openmedialibrary' in release['modules']: if 'modules' in release and 'openmedialibrary' in release['modules']:
VERSION = release['modules']['openmedialibrary']['version'] MINOR_VERSION = release['modules']['openmedialibrary']['version']
else: else:
VERSION = 'git' MINOR_VERSION = 'git'
NODE_PROTOCOL="0.1"
VERSION="%s.%s" % (NODE_PROTOCOL, MINOR_VERSION)
USER_AGENT = 'OpenMediaLibrary/%s' % VERSION USER_AGENT = 'OpenMediaLibrary/%s' % VERSION

View file

@ -3,8 +3,8 @@
import json import json
from oxflask.db import MutableDict from db import MutableDict
import oxflask.query from queryparser import Parser
from changelog import Changelog from changelog import Changelog
import settings import settings
@ -19,11 +19,11 @@ class User(db.Model):
created = db.Column(db.DateTime()) created = db.Column(db.DateTime())
modified = db.Column(db.DateTime()) modified = db.Column(db.DateTime())
id = db.Column(db.String(43), primary_key=True) id = db.Column(db.String(43), primary_key=True)
info = db.Column(MutableDict.as_mutable(db.PickleType(pickler=json))) info = db.Column(MutableDict.as_mutable(db.PickleType(pickler=json)))
#nickname = db.Column(db.String(256), unique=True) nickname = db.Column(db.String(256), unique=True)
nickname = db.Column(db.String(256))
pending = db.Column(db.String(64)) # sent|received pending = db.Column(db.String(64)) # sent|received
queued = db.Column(db.Boolean()) queued = db.Column(db.Boolean())
@ -58,12 +58,12 @@ class User(db.Model):
if self.pending: if self.pending:
j['pending'] = self.pending j['pending'] = self.pending
j['peered'] = self.peered j['peered'] = self.peered
j['online'] = self.check_online() j['online'] = self.is_online()
j['nickname'] = self.nickname j['nickname'] = self.nickname
return j return j
def check_online(self): def is_online(self):
return state.nodes and state.nodes.check_online(self.id) return state.nodes and state.nodes.is_online(self.id)
def lists_json(self): def lists_json(self):
return [{ return [{
@ -72,7 +72,7 @@ class User(db.Model):
'type': 'library', 'type': 'library',
'items': self.items.count(), 'items': self.items.count(),
'user': self.nickname if self.id != settings.USER_ID else settings.preferences['username'], 'user': self.nickname if self.id != settings.USER_ID else settings.preferences['username'],
}] + [l.json() for l in self.lists.order_by('position')] }] + [l.json() for l in self.lists.order_by('index_')]
def update_peering(self, peered, username=None): def update_peering(self, peered, username=None):
was_peering = self.peered was_peering = self.peered
@ -125,7 +125,7 @@ list_items = db.Table('listitem',
class List(db.Model): class List(db.Model):
id = db.Column(db.Integer(), primary_key=True) id = db.Column(db.Integer(), primary_key=True)
name = db.Column(db.String()) name = db.Column(db.String())
position = db.Column(db.Integer()) index_ = db.Column(db.Integer())
type = db.Column(db.String(64)) type = db.Column(db.String(64))
_query = db.Column('query', MutableDict.as_mutable(db.PickleType(pickler=json))) _query = db.Column('query', MutableDict.as_mutable(db.PickleType(pickler=json)))
@ -171,7 +171,7 @@ class List(db.Model):
l = cls(user_id=user_id, name=name) l = cls(user_id=user_id, name=name)
l._query = query l._query = query
l.type = 'smart' if l._query else 'static' l.type = 'smart' if l._query else 'static'
l.position = cls.query.filter_by(user_id=user_id).count() l.index_ = cls.query.filter_by(user_id=user_id).count()
if user_id == settings.USER_ID: if user_id == settings.USER_ID:
p = User.get(settings.USER_ID) p = User.get(settings.USER_ID)
if not l._query: if not l._query:
@ -251,7 +251,7 @@ class List(db.Model):
from item.models import Item from item.models import Item
if self._query: if self._query:
data = self._query data = self._query
return oxflask.query.Parser(Item).find({'query': data}).count() return Parser(Item).find({'query': data}).count()
else: else:
return len(self.items) return len(self.items)
@ -260,7 +260,7 @@ class List(db.Model):
'id': self.public_id, 'id': self.public_id,
'user': self.user.nickname if self.user_id != settings.USER_ID else settings.preferences['username'], 'user': self.user.nickname if self.user_id != settings.USER_ID else settings.preferences['username'],
'name': self.name, 'name': self.name,
'index': self.position, 'index': self.index_,
'items': self.items_count(), 'items': self.items_count(),
'type': self.type 'type': self.type
} }

View file

@ -8,7 +8,7 @@ from Queue import Queue
import json import json
from threading import Thread from threading import Thread
from oxflask.shortcuts import json_dumps from oxtornado import json_dumps
import state import state