From 046af0e777495fba55d0f2401347a0e529a8f32e Mon Sep 17 00:00:00 2001 From: j Date: Thu, 22 May 2014 00:41:29 +0200 Subject: [PATCH] add protocol version, reset migrations, move db --- migrations/versions/1a7c813a17c2_.py | 28 --- migrations/versions/1ead68a53597_.py | 26 --- migrations/versions/1fe914156ac0_.py | 38 ---- migrations/versions/21589282102d_.py | 36 ---- migrations/versions/2350803a5a2d_.py | 26 --- migrations/versions/3169519dc1e5_.py | 26 --- migrations/versions/348720abe06e_.py | 214 ---------------------- migrations/versions/3822b1700859_.py | 44 ----- migrations/versions/3c8686a285f5_.py | 218 +++++++++++++++++++++++ migrations/versions/3ea9f03f386f_.py | 35 ---- migrations/versions/4480ecc50e04_.py | 26 --- migrations/versions/7bb11a24276_.py | 36 ---- oml/app.py | 2 - oml/commands.py | 23 +-- oml/{oxflask => }/db.py | 0 oml/downloads.py | 14 +- oml/item/api.py | 8 +- oml/item/handlers.py | 5 +- oml/item/models.py | 135 +++++++------- oml/item/query.py | 9 +- oml/item/views.py | 5 +- oml/meta/dewey.py | 2 +- oml/node/server.py | 95 ++++++---- oml/nodes.py | 31 +++- oml/oxflask/__init__.py | 0 oml/oxflask/api.py | 163 ----------------- oml/oxflask/shortcuts.py | 34 ---- oml/oxflask/utils.py | 8 - oml/{oxflask/query.py => queryparser.py} | 8 +- oml/settings.py | 10 +- oml/user/models.py | 24 +-- oml/websocket.py | 2 +- 32 files changed, 424 insertions(+), 907 deletions(-) delete mode 100644 migrations/versions/1a7c813a17c2_.py delete mode 100644 migrations/versions/1ead68a53597_.py delete mode 100644 migrations/versions/1fe914156ac0_.py delete mode 100644 migrations/versions/21589282102d_.py delete mode 100644 migrations/versions/2350803a5a2d_.py delete mode 100644 migrations/versions/3169519dc1e5_.py delete mode 100644 migrations/versions/348720abe06e_.py delete mode 100644 migrations/versions/3822b1700859_.py create mode 100644 migrations/versions/3c8686a285f5_.py delete mode 100644 migrations/versions/3ea9f03f386f_.py delete mode 100644 migrations/versions/4480ecc50e04_.py delete mode 100644 migrations/versions/7bb11a24276_.py rename oml/{oxflask => }/db.py (100%) delete mode 100644 oml/oxflask/__init__.py delete mode 100644 oml/oxflask/api.py delete mode 100644 oml/oxflask/shortcuts.py delete mode 100644 oml/oxflask/utils.py rename oml/{oxflask/query.py => queryparser.py} (97%) diff --git a/migrations/versions/1a7c813a17c2_.py b/migrations/versions/1a7c813a17c2_.py deleted file mode 100644 index 3f59366..0000000 --- a/migrations/versions/1a7c813a17c2_.py +++ /dev/null @@ -1,28 +0,0 @@ -"""empty message - -Revision ID: 1a7c813a17c2 -Revises: 7bb11a24276 -Create Date: 2014-05-14 01:41:03.495320 - -""" - -# revision identifiers, used by Alembic. -revision = '1a7c813a17c2' -down_revision = '7bb11a24276' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(): - ### commands auto generated by Alembic - please adjust! ### - op.add_column('item', sa.Column('sort_asin', sa.String(length=1000), nullable=True)) - op.create_index(op.f('ix_item_sort_asin'), 'item', ['sort_asin'], unique=False) - ### end Alembic commands ### - - -def downgrade(): - ### commands auto generated by Alembic - please adjust! ### - op.drop_index(op.f('ix_item_sort_asin'), table_name='item') - op.drop_column('item', 'sort_asin') - ### end Alembic commands ### diff --git a/migrations/versions/1ead68a53597_.py b/migrations/versions/1ead68a53597_.py deleted file mode 100644 index e62fffa..0000000 --- a/migrations/versions/1ead68a53597_.py +++ /dev/null @@ -1,26 +0,0 @@ -"""empty message - -Revision ID: 1ead68a53597 -Revises: 348720abe06e -Create Date: 2014-05-11 17:12:04.427336 - -""" - -# revision identifiers, used by Alembic. -revision = '1ead68a53597' -down_revision = '348720abe06e' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(): - ### commands auto generated by Alembic - please adjust! ### - pass - ### end Alembic commands ### - - -def downgrade(): - ### commands auto generated by Alembic - please adjust! ### - pass - ### end Alembic commands ### diff --git a/migrations/versions/1fe914156ac0_.py b/migrations/versions/1fe914156ac0_.py deleted file mode 100644 index 18abd20..0000000 --- a/migrations/versions/1fe914156ac0_.py +++ /dev/null @@ -1,38 +0,0 @@ -"""empty message - -Revision ID: 1fe914156ac0 -Revises: 4480ecc50e04 -Create Date: 2014-05-20 18:29:36.352416 - -""" - -# revision identifiers, used by Alembic. -revision = '1fe914156ac0' -down_revision = '4480ecc50e04' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(): - ### commands auto generated by Alembic - please adjust! ### - op.add_column('item', sa.Column('sort_isbn', sa.String(length=1000), nullable=True)) - op.create_index(op.f('ix_item_sort_isbn'), 'item', ['sort_isbn'], unique=False) - - #op.drop_column('item', 'sort_isbn13') - #op.drop_column('item', 'sort_isbn10') - #op.drop_index('ix_item_sort_isbn10', table_name='item') - #op.drop_index('ix_item_sort_isbn13', table_name='item') - ### end Alembic commands ### - pass - - -def downgrade(): - ### commands auto generated by Alembic - please adjust! ### - op.create_index('ix_item_sort_isbn13', 'item', ['sort_isbn13'], unique=False) - op.create_index('ix_item_sort_isbn10', 'item', ['sort_isbn10'], unique=False) - op.drop_index(op.f('ix_item_sort_isbn'), table_name='item') - op.add_column('item', sa.Column('sort_isbn10', sa.VARCHAR(length=1000), nullable=True)) - op.add_column('item', sa.Column('sort_isbn13', sa.VARCHAR(length=1000), nullable=True)) - op.drop_column('item', 'sort_isbn') - ### end Alembic commands ### diff --git a/migrations/versions/21589282102d_.py b/migrations/versions/21589282102d_.py deleted file mode 100644 index 6aaa857..0000000 --- a/migrations/versions/21589282102d_.py +++ /dev/null @@ -1,36 +0,0 @@ -"""empty message - -Revision ID: 21589282102d -Revises: 2350803a5a2d -Create Date: 2014-05-13 15:47:29.747858 - -""" - -# revision identifiers, used by Alembic. -revision = '21589282102d' -down_revision = '2350803a5a2d' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(): - ### commands auto generated by Alembic - please adjust! ### - op.create_table('filter', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('item_id', sa.String(length=32), nullable=True), - sa.Column('key', sa.String(length=200), nullable=True), - sa.Column('value', sa.Text(), nullable=True), - sa.Column('findvalue', sa.Text(), nullable=True), - sa.ForeignKeyConstraint(['item_id'], ['item.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.create_index(op.f('ix_filter_key'), 'filter', ['key'], unique=False) - ### end Alembic commands ### - - -def downgrade(): - ### commands auto generated by Alembic - please adjust! ### - op.drop_index(op.f('ix_filter_key'), table_name='filter') - op.drop_table('filter') - ### end Alembic commands ### diff --git a/migrations/versions/2350803a5a2d_.py b/migrations/versions/2350803a5a2d_.py deleted file mode 100644 index 9d13c2f..0000000 --- a/migrations/versions/2350803a5a2d_.py +++ /dev/null @@ -1,26 +0,0 @@ -"""empty message - -Revision ID: 2350803a5a2d -Revises: 1ead68a53597 -Create Date: 2014-05-13 15:43:51.840049 - -""" - -# revision identifiers, used by Alembic. -revision = '2350803a5a2d' -down_revision = '1ead68a53597' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(): - ### commands auto generated by Alembic - please adjust! ### - pass - ### end Alembic commands ### - - -def downgrade(): - ### commands auto generated by Alembic - please adjust! ### - pass - ### end Alembic commands ### diff --git a/migrations/versions/3169519dc1e5_.py b/migrations/versions/3169519dc1e5_.py deleted file mode 100644 index 8424a9c..0000000 --- a/migrations/versions/3169519dc1e5_.py +++ /dev/null @@ -1,26 +0,0 @@ -"""empty message - -Revision ID: 3169519dc1e5 -Revises: 1a7c813a17c2 -Create Date: 2014-05-18 03:28:03.950996 - -""" - -# revision identifiers, used by Alembic. -revision = '3169519dc1e5' -down_revision = '1a7c813a17c2' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(): - ### commands auto generated by Alembic - please adjust! ### - op.add_column('user', sa.Column('queued', sa.Boolean(), nullable=True)) - ### end Alembic commands ### - - -def downgrade(): - ### commands auto generated by Alembic - please adjust! ### - op.drop_column('user', 'queued') - ### end Alembic commands ### diff --git a/migrations/versions/348720abe06e_.py b/migrations/versions/348720abe06e_.py deleted file mode 100644 index 7c02ee9..0000000 --- a/migrations/versions/348720abe06e_.py +++ /dev/null @@ -1,214 +0,0 @@ -"""empty message - -Revision ID: 348720abe06e -Revises: None -Create Date: 2014-05-11 12:24:57.346130 - -""" - -# revision identifiers, used by Alembic. -revision = '348720abe06e' -down_revision = None - -from alembic import op -import sqlalchemy as sa - - -def upgrade(): - ### commands auto generated by Alembic - please adjust! ### - op.create_table('user', - sa.Column('created', sa.DateTime(), nullable=True), - sa.Column('modified', sa.DateTime(), nullable=True), - sa.Column('id', sa.String(length=43), nullable=False), - sa.Column('info', sa.PickleType(), nullable=True), - sa.Column('nickname', sa.String(length=256), nullable=True), - sa.Column('pending', sa.String(length=64), nullable=True), - sa.Column('peered', sa.Boolean(), nullable=True), - sa.Column('online', sa.Boolean(), nullable=True), - sa.PrimaryKeyConstraint('id') - ) - op.create_table('person', - sa.Column('name', sa.String(length=1024), nullable=False), - sa.Column('sortname', sa.String(), nullable=True), - sa.Column('numberofnames', sa.Integer(), nullable=True), - sa.PrimaryKeyConstraint('name') - ) - op.create_table('work', - sa.Column('created', sa.DateTime(), nullable=True), - sa.Column('modified', sa.DateTime(), nullable=True), - sa.Column('id', sa.String(length=32), nullable=False), - sa.Column('meta', sa.PickleType(), nullable=True), - sa.PrimaryKeyConstraint('id') - ) - op.create_table('changelog', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('created', sa.DateTime(), nullable=True), - sa.Column('user_id', sa.String(length=43), nullable=True), - sa.Column('revision', sa.BigInteger(), nullable=True), - sa.Column('data', sa.Text(), nullable=True), - sa.Column('sig', sa.String(length=96), nullable=True), - sa.PrimaryKeyConstraint('id') - ) - op.create_table('list', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('name', sa.String(), nullable=True), - sa.Column('position', sa.Integer(), nullable=True), - sa.Column('type', sa.String(length=64), nullable=True), - sa.Column('query', sa.PickleType(), nullable=True), - sa.Column('user_id', sa.String(length=43), nullable=True), - sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.create_table('edition', - sa.Column('created', sa.DateTime(), nullable=True), - sa.Column('modified', sa.DateTime(), nullable=True), - sa.Column('id', sa.String(length=32), nullable=False), - sa.Column('meta', sa.PickleType(), nullable=True), - sa.Column('work_id', sa.String(length=32), nullable=True), - sa.ForeignKeyConstraint(['work_id'], ['work.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.create_table('item', - sa.Column('created', sa.DateTime(), nullable=True), - sa.Column('modified', sa.DateTime(), nullable=True), - sa.Column('id', sa.String(length=32), nullable=False), - sa.Column('info', sa.PickleType(), nullable=True), - sa.Column('meta', sa.PickleType(), nullable=True), - sa.Column('added', sa.DateTime(), nullable=True), - sa.Column('accessed', sa.DateTime(), nullable=True), - sa.Column('timesaccessed', sa.Integer(), nullable=True), - sa.Column('transferadded', sa.DateTime(), nullable=True), - sa.Column('transferprogress', sa.Float(), nullable=True), - sa.Column('edition_id', sa.String(length=32), nullable=True), - sa.Column('work_id', sa.String(length=32), nullable=True), - sa.Column('sort_title', sa.String(length=1000), nullable=True), - sa.Column('sort_author', sa.String(length=1000), nullable=True), - sa.Column('sort_language', sa.String(length=1000), nullable=True), - sa.Column('sort_publisher', sa.String(length=1000), nullable=True), - sa.Column('sort_place', sa.String(length=1000), nullable=True), - sa.Column('sort_country', sa.String(length=1000), nullable=True), - sa.Column('sort_date', sa.String(length=1000), nullable=True), - sa.Column('sort_pages', sa.BigInteger(), nullable=True), - sa.Column('sort_classification', sa.String(length=1000), nullable=True), - sa.Column('sort_id', sa.String(length=1000), nullable=True), - sa.Column('sort_isbn10', sa.String(length=1000), nullable=True), - sa.Column('sort_isbn13', sa.String(length=1000), nullable=True), - sa.Column('sort_lccn', sa.String(length=1000), nullable=True), - sa.Column('sort_olid', sa.String(length=1000), nullable=True), - sa.Column('sort_oclc', sa.String(length=1000), nullable=True), - sa.Column('sort_extension', sa.String(length=1000), nullable=True), - sa.Column('sort_size', sa.BigInteger(), nullable=True), - sa.Column('sort_created', sa.DateTime(), nullable=True), - sa.Column('sort_added', sa.DateTime(), nullable=True), - sa.Column('sort_modified', sa.DateTime(), nullable=True), - sa.Column('sort_accessed', sa.DateTime(), nullable=True), - sa.Column('sort_timesaccessed', sa.BigInteger(), nullable=True), - sa.Column('sort_mediastate', sa.String(length=1000), nullable=True), - sa.Column('sort_transferadded', sa.DateTime(), nullable=True), - sa.Column('sort_transferprogress', sa.Float(), nullable=True), - sa.Column('sort_random', sa.BigInteger(), nullable=True), - sa.ForeignKeyConstraint(['edition_id'], ['edition.id'], ), - sa.ForeignKeyConstraint(['work_id'], ['work.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.create_index(op.f('ix_item_sort_accessed'), 'item', ['sort_accessed'], unique=False) - op.create_index(op.f('ix_item_sort_added'), 'item', ['sort_added'], unique=False) - op.create_index(op.f('ix_item_sort_author'), 'item', ['sort_author'], unique=False) - op.create_index(op.f('ix_item_sort_classification'), 'item', ['sort_classification'], unique=False) - op.create_index(op.f('ix_item_sort_country'), 'item', ['sort_country'], unique=False) - op.create_index(op.f('ix_item_sort_created'), 'item', ['sort_created'], unique=False) - op.create_index(op.f('ix_item_sort_date'), 'item', ['sort_date'], unique=False) - op.create_index(op.f('ix_item_sort_extension'), 'item', ['sort_extension'], unique=False) - op.create_index(op.f('ix_item_sort_id'), 'item', ['sort_id'], unique=False) - op.create_index(op.f('ix_item_sort_isbn10'), 'item', ['sort_isbn10'], unique=False) - op.create_index(op.f('ix_item_sort_isbn13'), 'item', ['sort_isbn13'], unique=False) - op.create_index(op.f('ix_item_sort_language'), 'item', ['sort_language'], unique=False) - op.create_index(op.f('ix_item_sort_lccn'), 'item', ['sort_lccn'], unique=False) - op.create_index(op.f('ix_item_sort_mediastate'), 'item', ['sort_mediastate'], unique=False) - op.create_index(op.f('ix_item_sort_modified'), 'item', ['sort_modified'], unique=False) - op.create_index(op.f('ix_item_sort_oclc'), 'item', ['sort_oclc'], unique=False) - op.create_index(op.f('ix_item_sort_olid'), 'item', ['sort_olid'], unique=False) - op.create_index(op.f('ix_item_sort_pages'), 'item', ['sort_pages'], unique=False) - op.create_index(op.f('ix_item_sort_place'), 'item', ['sort_place'], unique=False) - op.create_index(op.f('ix_item_sort_publisher'), 'item', ['sort_publisher'], unique=False) - op.create_index(op.f('ix_item_sort_random'), 'item', ['sort_random'], unique=False) - op.create_index(op.f('ix_item_sort_size'), 'item', ['sort_size'], unique=False) - op.create_index(op.f('ix_item_sort_timesaccessed'), 'item', ['sort_timesaccessed'], unique=False) - op.create_index(op.f('ix_item_sort_title'), 'item', ['sort_title'], unique=False) - op.create_index(op.f('ix_item_sort_transferadded'), 'item', ['sort_transferadded'], unique=False) - op.create_index(op.f('ix_item_sort_transferprogress'), 'item', ['sort_transferprogress'], unique=False) - op.create_table('useritem', - sa.Column('user_id', sa.String(length=43), nullable=True), - sa.Column('item_id', sa.String(length=32), nullable=True), - sa.ForeignKeyConstraint(['item_id'], ['item.id'], ), - sa.ForeignKeyConstraint(['user_id'], ['user.id'], ) - ) - op.create_table('find', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('item_id', sa.String(length=32), nullable=True), - sa.Column('key', sa.String(length=200), nullable=True), - sa.Column('value', sa.Text(), nullable=True), - sa.ForeignKeyConstraint(['item_id'], ['item.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.create_index(op.f('ix_find_key'), 'find', ['key'], unique=False) - op.create_table('file', - sa.Column('created', sa.DateTime(), nullable=True), - sa.Column('modified', sa.DateTime(), nullable=True), - sa.Column('sha1', sa.String(length=32), nullable=False), - sa.Column('path', sa.String(length=2048), nullable=True), - sa.Column('info', sa.PickleType(), nullable=True), - sa.Column('item_id', sa.String(length=32), nullable=True), - sa.ForeignKeyConstraint(['item_id'], ['item.id'], ), - sa.PrimaryKeyConstraint('sha1') - ) - op.create_table('listitem', - sa.Column('list_id', sa.Integer(), nullable=True), - sa.Column('item_id', sa.String(length=32), nullable=True), - sa.ForeignKeyConstraint(['item_id'], ['item.id'], ), - sa.ForeignKeyConstraint(['list_id'], ['list.id'], ) - ) - ### end Alembic commands ### - - -def downgrade(): - ### commands auto generated by Alembic - please adjust! ### - op.drop_table('listitem') - op.drop_table('file') - op.drop_index(op.f('ix_find_key'), table_name='find') - op.drop_table('find') - op.drop_table('useritem') - op.drop_index(op.f('ix_item_sort_transferprogress'), table_name='item') - op.drop_index(op.f('ix_item_sort_transferadded'), table_name='item') - op.drop_index(op.f('ix_item_sort_title'), table_name='item') - op.drop_index(op.f('ix_item_sort_timesaccessed'), table_name='item') - op.drop_index(op.f('ix_item_sort_size'), table_name='item') - op.drop_index(op.f('ix_item_sort_random'), table_name='item') - op.drop_index(op.f('ix_item_sort_publisher'), table_name='item') - op.drop_index(op.f('ix_item_sort_place'), table_name='item') - op.drop_index(op.f('ix_item_sort_pages'), table_name='item') - op.drop_index(op.f('ix_item_sort_olid'), table_name='item') - op.drop_index(op.f('ix_item_sort_oclc'), table_name='item') - op.drop_index(op.f('ix_item_sort_modified'), table_name='item') - op.drop_index(op.f('ix_item_sort_mediastate'), table_name='item') - op.drop_index(op.f('ix_item_sort_lccn'), table_name='item') - op.drop_index(op.f('ix_item_sort_language'), table_name='item') - op.drop_index(op.f('ix_item_sort_isbn13'), table_name='item') - op.drop_index(op.f('ix_item_sort_isbn10'), table_name='item') - op.drop_index(op.f('ix_item_sort_id'), table_name='item') - op.drop_index(op.f('ix_item_sort_extension'), table_name='item') - op.drop_index(op.f('ix_item_sort_date'), table_name='item') - op.drop_index(op.f('ix_item_sort_created'), table_name='item') - op.drop_index(op.f('ix_item_sort_country'), table_name='item') - op.drop_index(op.f('ix_item_sort_classification'), table_name='item') - op.drop_index(op.f('ix_item_sort_author'), table_name='item') - op.drop_index(op.f('ix_item_sort_added'), table_name='item') - op.drop_index(op.f('ix_item_sort_accessed'), table_name='item') - op.drop_table('item') - op.drop_table('edition') - op.drop_table('list') - op.drop_table('changelog') - op.drop_table('work') - op.drop_table('person') - op.drop_table('user') - ### end Alembic commands ### diff --git a/migrations/versions/3822b1700859_.py b/migrations/versions/3822b1700859_.py deleted file mode 100644 index 7512a41..0000000 --- a/migrations/versions/3822b1700859_.py +++ /dev/null @@ -1,44 +0,0 @@ -"""empty message - -Revision ID: 3822b1700859 -Revises: 1fe914156ac0 -Create Date: 2014-05-20 23:25:34.942115 - -""" - -# revision identifiers, used by Alembic. -revision = '3822b1700859' -down_revision = '1fe914156ac0' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(): - ### commands auto generated by Alembic - please adjust! ### - op.create_table('metadata', - sa.Column('created', sa.DateTime(), nullable=True), - sa.Column('modified', sa.DateTime(), nullable=True), - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('key', sa.String(length=256), nullable=True), - sa.Column('value', sa.String(length=256), nullable=True), - sa.Column('data', sa.PickleType(), nullable=True), - sa.PrimaryKeyConstraint('id') - ) - #op.drop_column(u'item', 'sort_isbn13') - #op.drop_column(u'item', 'sort_isbn10') - #op.create_index(op.f('ix_item_sort_isbn'), 'item', ['sort_isbn'], unique=False) - #op.drop_index('ix_item_sort_isbn10', table_name='item') - #op.drop_index('ix_item_sort_isbn13', table_name='item') - ### end Alembic commands ### - - -def downgrade(): - ### commands auto generated by Alembic - please adjust! ### - op.create_index('ix_item_sort_isbn13', 'item', ['sort_isbn13'], unique=False) - op.create_index('ix_item_sort_isbn10', 'item', ['sort_isbn10'], unique=False) - op.drop_index(op.f('ix_item_sort_isbn'), table_name='item') - op.add_column(u'item', sa.Column('sort_isbn10', sa.VARCHAR(length=1000), nullable=True)) - op.add_column(u'item', sa.Column('sort_isbn13', sa.VARCHAR(length=1000), nullable=True)) - op.drop_table('metadata') - ### end Alembic commands ### diff --git a/migrations/versions/3c8686a285f5_.py b/migrations/versions/3c8686a285f5_.py new file mode 100644 index 0000000..f312f44 --- /dev/null +++ b/migrations/versions/3c8686a285f5_.py @@ -0,0 +1,218 @@ +"""empty message + +Revision ID: 3c8686a285f5 +Revises: None +Create Date: 2014-05-21 23:43:13.065858 + +""" + +# revision identifiers, used by Alembic. +revision = '3c8686a285f5' +down_revision = None + +from alembic import op +import sqlalchemy as sa + + +def upgrade(): + ### commands auto generated by Alembic - please adjust! ### + op.create_table('item', + sa.Column('created', sa.DateTime(), nullable=True), + sa.Column('modified', sa.DateTime(), nullable=True), + sa.Column('id', sa.String(length=32), nullable=False), + sa.Column('info', sa.PickleType(), nullable=True), + sa.Column('meta', sa.PickleType(), nullable=True), + sa.Column('added', sa.DateTime(), nullable=True), + sa.Column('accessed', sa.DateTime(), nullable=True), + sa.Column('timesaccessed', sa.Integer(), nullable=True), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('changelog', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('created', sa.DateTime(), nullable=True), + sa.Column('timestamp', sa.BigInteger(), nullable=True), + sa.Column('user_id', sa.String(length=43), nullable=True), + sa.Column('revision', sa.BigInteger(), nullable=True), + sa.Column('data', sa.Text(), nullable=True), + sa.Column('sig', sa.String(length=96), nullable=True), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('user', + sa.Column('created', sa.DateTime(), nullable=True), + sa.Column('modified', sa.DateTime(), nullable=True), + sa.Column('id', sa.String(length=43), nullable=False), + sa.Column('info', sa.PickleType(), nullable=True), + sa.Column('nickname', sa.String(length=256), nullable=True), + sa.Column('pending', sa.String(length=64), nullable=True), + sa.Column('queued', sa.Boolean(), nullable=True), + sa.Column('peered', sa.Boolean(), nullable=True), + sa.Column('online', sa.Boolean(), nullable=True), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('nickname') + ) + op.create_table('metadata', + sa.Column('created', sa.DateTime(), nullable=True), + sa.Column('modified', sa.DateTime(), nullable=True), + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('key', sa.String(length=256), nullable=True), + sa.Column('value', sa.String(length=256), nullable=True), + sa.Column('data', sa.PickleType(), nullable=True), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('person', + sa.Column('name', sa.String(length=1024), nullable=False), + sa.Column('sortname', sa.String(), nullable=True), + sa.Column('numberofnames', sa.Integer(), nullable=True), + sa.PrimaryKeyConstraint('name') + ) + op.create_table('transfer', + sa.Column('item_id', sa.String(length=32), nullable=False), + sa.Column('added', sa.DateTime(), nullable=True), + sa.Column('progress', sa.Float(), nullable=True), + sa.ForeignKeyConstraint(['item_id'], ['item.id'], ), + sa.PrimaryKeyConstraint('item_id') + ) + op.create_table('find', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('item_id', sa.String(length=32), nullable=True), + sa.Column('key', sa.String(length=200), nullable=True), + sa.Column('value', sa.Text(), nullable=True), + sa.Column('findvalue', sa.Text(), nullable=True), + sa.ForeignKeyConstraint(['item_id'], ['item.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_find_key'), 'find', ['key'], unique=False) + op.create_table('list', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(), nullable=True), + sa.Column('index_', sa.Integer(), nullable=True), + sa.Column('type', sa.String(length=64), nullable=True), + sa.Column('query', sa.PickleType(), nullable=True), + sa.Column('user_id', sa.String(length=43), nullable=True), + sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('useritem', + sa.Column('user_id', sa.String(length=43), nullable=True), + sa.Column('item_id', sa.String(length=32), nullable=True), + sa.ForeignKeyConstraint(['item_id'], ['item.id'], ), + sa.ForeignKeyConstraint(['user_id'], ['user.id'], ) + ) + op.create_table('sort', + sa.Column('item_id', sa.String(length=32), nullable=False), + sa.Column('title', sa.String(length=1000), nullable=True), + sa.Column('author', sa.String(length=1000), nullable=True), + sa.Column('publisher', sa.String(length=1000), nullable=True), + sa.Column('place', sa.String(length=1000), nullable=True), + sa.Column('country', sa.String(length=1000), nullable=True), + sa.Column('date', sa.String(length=1000), nullable=True), + sa.Column('language', sa.String(length=1000), nullable=True), + sa.Column('pages', sa.BigInteger(), nullable=True), + sa.Column('classification', sa.String(length=1000), nullable=True), + sa.Column('extension', sa.String(length=1000), nullable=True), + sa.Column('size', sa.BigInteger(), nullable=True), + sa.Column('created', sa.DateTime(), nullable=True), + sa.Column('added', sa.DateTime(), nullable=True), + sa.Column('modified', sa.DateTime(), nullable=True), + sa.Column('accessed', sa.DateTime(), nullable=True), + sa.Column('timesaccessed', sa.BigInteger(), nullable=True), + sa.Column('mediastate', sa.String(length=1000), nullable=True), + sa.Column('transferadded', sa.DateTime(), nullable=True), + sa.Column('transferprogress', sa.Float(), nullable=True), + sa.Column('id', sa.String(length=1000), nullable=True), + sa.Column('isbn', sa.String(length=1000), nullable=True), + sa.Column('asin', sa.String(length=1000), nullable=True), + sa.Column('lccn', sa.String(length=1000), nullable=True), + sa.Column('olid', sa.String(length=1000), nullable=True), + sa.Column('oclc', sa.String(length=1000), nullable=True), + sa.Column('random', sa.BigInteger(), nullable=True), + sa.ForeignKeyConstraint(['item_id'], ['item.id'], ), + sa.PrimaryKeyConstraint('item_id') + ) + op.create_index(op.f('ix_sort_accessed'), 'sort', ['accessed'], unique=False) + op.create_index(op.f('ix_sort_added'), 'sort', ['added'], unique=False) + op.create_index(op.f('ix_sort_asin'), 'sort', ['asin'], unique=False) + op.create_index(op.f('ix_sort_author'), 'sort', ['author'], unique=False) + op.create_index(op.f('ix_sort_classification'), 'sort', ['classification'], unique=False) + op.create_index(op.f('ix_sort_country'), 'sort', ['country'], unique=False) + op.create_index(op.f('ix_sort_created'), 'sort', ['created'], unique=False) + op.create_index(op.f('ix_sort_date'), 'sort', ['date'], unique=False) + op.create_index(op.f('ix_sort_extension'), 'sort', ['extension'], unique=False) + op.create_index(op.f('ix_sort_id'), 'sort', ['id'], unique=False) + op.create_index(op.f('ix_sort_isbn'), 'sort', ['isbn'], unique=False) + op.create_index(op.f('ix_sort_language'), 'sort', ['language'], unique=False) + op.create_index(op.f('ix_sort_lccn'), 'sort', ['lccn'], unique=False) + op.create_index(op.f('ix_sort_mediastate'), 'sort', ['mediastate'], unique=False) + op.create_index(op.f('ix_sort_modified'), 'sort', ['modified'], unique=False) + op.create_index(op.f('ix_sort_oclc'), 'sort', ['oclc'], unique=False) + op.create_index(op.f('ix_sort_olid'), 'sort', ['olid'], unique=False) + op.create_index(op.f('ix_sort_pages'), 'sort', ['pages'], unique=False) + op.create_index(op.f('ix_sort_place'), 'sort', ['place'], unique=False) + op.create_index(op.f('ix_sort_publisher'), 'sort', ['publisher'], unique=False) + op.create_index(op.f('ix_sort_random'), 'sort', ['random'], unique=False) + op.create_index(op.f('ix_sort_size'), 'sort', ['size'], unique=False) + op.create_index(op.f('ix_sort_timesaccessed'), 'sort', ['timesaccessed'], unique=False) + op.create_index(op.f('ix_sort_title'), 'sort', ['title'], unique=False) + op.create_index(op.f('ix_sort_transferadded'), 'sort', ['transferadded'], unique=False) + op.create_index(op.f('ix_sort_transferprogress'), 'sort', ['transferprogress'], unique=False) + op.create_table('file', + sa.Column('created', sa.DateTime(), nullable=True), + sa.Column('modified', sa.DateTime(), nullable=True), + sa.Column('sha1', sa.String(length=32), nullable=False), + sa.Column('path', sa.String(length=2048), nullable=True), + sa.Column('info', sa.PickleType(), nullable=True), + sa.Column('item_id', sa.String(length=32), nullable=True), + sa.ForeignKeyConstraint(['item_id'], ['item.id'], ), + sa.PrimaryKeyConstraint('sha1') + ) + op.create_table('listitem', + sa.Column('list_id', sa.Integer(), nullable=True), + sa.Column('item_id', sa.String(length=32), nullable=True), + sa.ForeignKeyConstraint(['item_id'], ['item.id'], ), + sa.ForeignKeyConstraint(['list_id'], ['list.id'], ) + ) + ### end Alembic commands ### + + +def downgrade(): + ### commands auto generated by Alembic - please adjust! ### + op.drop_table('listitem') + op.drop_table('file') + op.drop_index(op.f('ix_sort_transferprogress'), table_name='sort') + op.drop_index(op.f('ix_sort_transferadded'), table_name='sort') + op.drop_index(op.f('ix_sort_title'), table_name='sort') + op.drop_index(op.f('ix_sort_timesaccessed'), table_name='sort') + op.drop_index(op.f('ix_sort_size'), table_name='sort') + op.drop_index(op.f('ix_sort_random'), table_name='sort') + op.drop_index(op.f('ix_sort_publisher'), table_name='sort') + op.drop_index(op.f('ix_sort_place'), table_name='sort') + op.drop_index(op.f('ix_sort_pages'), table_name='sort') + op.drop_index(op.f('ix_sort_olid'), table_name='sort') + op.drop_index(op.f('ix_sort_oclc'), table_name='sort') + op.drop_index(op.f('ix_sort_modified'), table_name='sort') + op.drop_index(op.f('ix_sort_mediastate'), table_name='sort') + op.drop_index(op.f('ix_sort_lccn'), table_name='sort') + op.drop_index(op.f('ix_sort_language'), table_name='sort') + op.drop_index(op.f('ix_sort_isbn'), table_name='sort') + op.drop_index(op.f('ix_sort_id'), table_name='sort') + op.drop_index(op.f('ix_sort_extension'), table_name='sort') + op.drop_index(op.f('ix_sort_date'), table_name='sort') + op.drop_index(op.f('ix_sort_created'), table_name='sort') + op.drop_index(op.f('ix_sort_country'), table_name='sort') + op.drop_index(op.f('ix_sort_classification'), table_name='sort') + op.drop_index(op.f('ix_sort_author'), table_name='sort') + op.drop_index(op.f('ix_sort_asin'), table_name='sort') + op.drop_index(op.f('ix_sort_added'), table_name='sort') + op.drop_index(op.f('ix_sort_accessed'), table_name='sort') + op.drop_table('sort') + op.drop_table('useritem') + op.drop_table('list') + op.drop_index(op.f('ix_find_key'), table_name='find') + op.drop_table('find') + op.drop_table('transfer') + op.drop_table('person') + op.drop_table('metadata') + op.drop_table('user') + op.drop_table('changelog') + op.drop_table('item') + ### end Alembic commands ### diff --git a/migrations/versions/3ea9f03f386f_.py b/migrations/versions/3ea9f03f386f_.py deleted file mode 100644 index 3fe8e75..0000000 --- a/migrations/versions/3ea9f03f386f_.py +++ /dev/null @@ -1,35 +0,0 @@ -"""empty message - -Revision ID: 3ea9f03f386f -Revises: 3822b1700859 -Create Date: 2014-05-20 23:37:03.959948 - -""" - -# revision identifiers, used by Alembic. -revision = '3ea9f03f386f' -down_revision = '3822b1700859' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(): - ### commands auto generated by Alembic - please adjust! ### - op.drop_table('metadata') - op.create_table('metadata', - sa.Column('created', sa.DateTime(), nullable=True), - sa.Column('modified', sa.DateTime(), nullable=True), - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('key', sa.String(length=256), nullable=True), - sa.Column('value', sa.String(length=256), nullable=True), - sa.Column('data', sa.PickleType(), nullable=True), - sa.PrimaryKeyConstraint('id') - ) - ### end Alembic commands ### - - -def downgrade(): - pass - ### commands auto generated by Alembic - please adjust! ### - ### end Alembic commands ### diff --git a/migrations/versions/4480ecc50e04_.py b/migrations/versions/4480ecc50e04_.py deleted file mode 100644 index 03d2c2a..0000000 --- a/migrations/versions/4480ecc50e04_.py +++ /dev/null @@ -1,26 +0,0 @@ -"""empty message - -Revision ID: 4480ecc50e04 -Revises: 3169519dc1e5 -Create Date: 2014-05-20 02:20:20.283739 - -""" - -# revision identifiers, used by Alembic. -revision = '4480ecc50e04' -down_revision = '3169519dc1e5' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(): - ### commands auto generated by Alembic - please adjust! ### - op.add_column('changelog', sa.Column('timestamp', sa.BigInteger(), nullable=True)) - ### end Alembic commands ### - - -def downgrade(): - ### commands auto generated by Alembic - please adjust! ### - op.drop_column('changelog', 'timestamp') - ### end Alembic commands ### diff --git a/migrations/versions/7bb11a24276_.py b/migrations/versions/7bb11a24276_.py deleted file mode 100644 index 2320851..0000000 --- a/migrations/versions/7bb11a24276_.py +++ /dev/null @@ -1,36 +0,0 @@ -"""empty message - -Revision ID: 7bb11a24276 -Revises: 21589282102d -Create Date: 2014-05-13 18:28:46.214059 - -""" - -# revision identifiers, used by Alembic. -revision = '7bb11a24276' -down_revision = '21589282102d' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(): - ### commands auto generated by Alembic - please adjust! ### - op.drop_table('filter') - op.add_column('find', sa.Column('findvalue', sa.Text(), nullable=True)) - ### end Alembic commands ### - - -def downgrade(): - ### commands auto generated by Alembic - please adjust! ### - op.drop_column('find', 'findvalue') - op.create_table('filter', - sa.Column('id', sa.INTEGER(), nullable=False), - sa.Column('item_id', sa.VARCHAR(length=32), nullable=True), - sa.Column('key', sa.VARCHAR(length=200), nullable=True), - sa.Column('value', sa.TEXT(), nullable=True), - sa.Column('findvalue', sa.TEXT(), nullable=True), - sa.ForeignKeyConstraint(['item_id'], [u'item.id'], ), - sa.PrimaryKeyConstraint('id') - ) - ### end Alembic commands ### diff --git a/oml/app.py b/oml/app.py index e9759df..074f663 100644 --- a/oml/app.py +++ b/oml/app.py @@ -8,7 +8,6 @@ from flask.ext.migrate import Migrate, MigrateCommand import logging -import oxflask.api import settings from settings import db @@ -31,7 +30,6 @@ logging.basicConfig(level=logging.DEBUG) app = Flask('openmedialibrary', static_folder=settings.static_path) app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:////%s' % settings.db_path -app.register_blueprint(oxflask.api.app) app.register_blueprint(item.views.app) db.init_app(app) diff --git a/oml/commands.py b/oml/commands.py index 72365f1..9c1a217 100644 --- a/oml/commands.py +++ b/oml/commands.py @@ -81,24 +81,9 @@ class PostUpdate(Command): ] def run(selfi, old, new): - if old <= '20140506-2-796c77b' and new > '20140506-2-796c77b': - print 'migrate database content' - import item.models - for i in item.models.Item.query: - if 'mainid' in i.meta: - mainid = i.meta.pop('mainid') - pid = {'isbn10': 'isbn', 'isbn13': 'isbn'}.get(mainid, mainid) - i.meta['primaryid'] = [pid, i.meta[mainid]] - isbns = i.meta.get('isbn', []) - for key in ('isbn10', 'isbn13'): - if key in i.meta: - isbns.append(i.meta.pop(key)) - if isbns: - i.meta['isbn'] = isbns - for key in ('asin', 'lccn', 'olid', 'oclc'): - if key in i.meta and isinstance(i.meta[key], basestring): - i.meta[key] = [i.meta[key]] - i.update() + if old <= '20140521-65-e14c686' and new > '20140521-65-e14c686': + if not os.path.exists(settings.db_path): + r('./ctl', 'setup') class Setup(Command): """ @@ -137,7 +122,7 @@ class Release(Command): os.chdir(root_dir) - with open(os.path.expanduser('~/Private/openmedialibrary_release.key')) as fd: + with open(os.path.expanduser('~/.openmedialibrary_release.key')) as fd: SIG_KEY=ed25519.SigningKey(fd.read()) SIG_ENCODING='base64' diff --git a/oml/oxflask/db.py b/oml/db.py similarity index 100% rename from oml/oxflask/db.py rename to oml/db.py diff --git a/oml/downloads.py b/oml/downloads.py index b3620a3..15be1d4 100644 --- a/oml/downloads.py +++ b/oml/downloads.py @@ -21,13 +21,13 @@ class Downloads(Thread): def download_next(self): import item.models - for i in item.models.Item.query.filter( - item.models.Item.transferadded!=None).filter( - item.models.Item.transferprogress<1).order_by(item.models.Item.transferadded): - for u in i.users: - if state.nodes.check_online(u.id): - logger.debug('DOWNLOAD %s %s', i, u) - r = state.nodes.download(u.id, i) + for t in item.models.Transfer.query.filter( + item.models.Transfer.added!=None, + item.models.Transfer.progress<1).order_by(item.models.Transfer.added): + for u in t.item.users: + if state.nodes.is_online(u.id): + logger.debug('DOWNLOAD %s %s', t.item, u) + r = state.nodes.download(u.id, t.item) logger.debug('download ok? %s', r) return True return False diff --git a/oml/item/api.py b/oml/item/api.py index 3a67855..7cfcfdf 100644 --- a/oml/item/api.py +++ b/oml/item/api.py @@ -84,7 +84,7 @@ def find(data): #from sqlalchemy.sql import func #models.db.session.query(func.sum(models.Item.sort_size).label("size")) #response['size'] = x.scalar() - response['size'] = sum([i.sort_size or 0 for i in q['qs'].options(load_only('id', 'sort_size'))]) + response['size'] = sum([i.info.get('size', 0) for i in q['qs'].join(models.Sort).options(load_only('id', 'info'))]) return response actions.register(find) @@ -225,8 +225,10 @@ def cancelDownloads(data): ids = data['ids'] if ids: for item in models.Item.query.filter(models.Item.id.in_(ids)): - item.transferprogress = None - item.transferadded = None + t = models.Transfer.get(item.id) + t.progress = None + t.added = None + t.save() p = state.user() if p in item.users: item.users.remove(p) diff --git a/oml/item/handlers.py b/oml/item/handlers.py index 7eb37ee..80087fd 100644 --- a/oml/item/handlers.py +++ b/oml/item/handlers.py @@ -83,7 +83,8 @@ class ReaderHandler(OMLHandler): self.set_status(404) self.finish() return - item.sort_accessed = item.accessed = datetime.utcnow() - item.sort_timesaccessed = item.timesaccessed = (item.timesaccessed or 0) + 1 + item.accessed = datetime.utcnow() + item.timesaccessed = (item.timesaccessed or 0) + 1 + item.update_sort() item.save() return serve_static(self, os.path.join(settings.static_path, html), 'text/html') diff --git a/oml/item/models.py b/oml/item/models.py index ea89a92..49460bf 100644 --- a/oml/item/models.py +++ b/oml/item/models.py @@ -15,6 +15,8 @@ import logging import Image import ox +from db import MutableDict + import settings from settings import db, config @@ -26,7 +28,6 @@ import meta import state import utils -from oxflask.db import MutableDict from icons import icons from changelog import Changelog @@ -35,42 +36,6 @@ from utils import remove_empty_folders logger = logging.getLogger('oml.item.model') -class Work(db.Model): - - created = db.Column(db.DateTime()) - modified = db.Column(db.DateTime()) - - id = db.Column(db.String(32), primary_key=True) - - meta = db.Column(MutableDict.as_mutable(db.PickleType(pickler=json))) - - def __repr__(self): - return self.id - - def __init__(self, id): - self.id = id - self.created = datetime.utcnow() - self.modified = datetime.utcnow() - -class Edition(db.Model): - - created = db.Column(db.DateTime()) - modified = db.Column(db.DateTime()) - - id = db.Column(db.String(32), primary_key=True) - - meta = db.Column(MutableDict.as_mutable(db.PickleType(pickler=json))) - - work_id = db.Column(db.String(32), db.ForeignKey('work.id')) - work = db.relationship('Work', backref=db.backref('editions', lazy='dynamic')) - - def __repr__(self): - return self.id - - def __init__(self, id): - self.id = id - self.created = datetime.utcnow() - self.modified = datetime.utcnow() user_items = db.Table('useritem', db.Column('user_id', db.String(43), db.ForeignKey('user.id')), @@ -87,22 +52,14 @@ class Item(db.Model): info = db.Column(MutableDict.as_mutable(db.PickleType(pickler=json))) meta = db.Column(MutableDict.as_mutable(db.PickleType(pickler=json))) + # why is this in db and not in i.e. info? added = db.Column(db.DateTime()) # added to local library accessed = db.Column(db.DateTime()) timesaccessed = db.Column(db.Integer()) - transferadded = db.Column(db.DateTime()) - transferprogress = db.Column(db.Float()) - users = db.relationship('User', secondary=user_items, backref=db.backref('items', lazy='dynamic')) - edition_id = db.Column(db.String(32), db.ForeignKey('edition.id')) - edition = db.relationship('Edition', backref=db.backref('items', lazy='dynamic')) - - work_id = db.Column(db.String(32), db.ForeignKey('work.id')) - work = db.relationship('Work', backref=db.backref('items', lazy='dynamic')) - @property def timestamp(self): return utils.datetime2ts(self.modified) @@ -146,8 +103,10 @@ class Item(db.Model): j['timesaccessed'] = self.timesaccessed j['accessed'] = self.accessed j['added'] = self.added - j['transferadded'] = self.transferadded - j['transferprogress'] = self.transferprogress + t = Transfer.get(self.id) + if t: + j['transferadded'] = t.added + j['transferprogress'] = t.progress j['users'] = map(str, list(self.users)) if self.info: @@ -158,13 +117,6 @@ class Item(db.Model): for key in self.id_keys + ['primaryid']: if key not in self.meta and key in j: del j[key] - ''' - if self.work_id: - j['work'] = { - 'olid': self.work_id - } - j['work'].update(self.work.meta) - ''' if keys: for k in j.keys(): if k not in keys: @@ -176,6 +128,7 @@ class Item(db.Model): return f.fullpath() if f else None def update_sort(self): + s = Sort.get_or_create(self.id) for key in config['itemKeys']: if key.get('sort'): value = self.json().get(key['id'], None) @@ -202,7 +155,8 @@ class Item(db.Model): value = ox.sort_string(value).lower() elif isinstance(value, list): #empty list value = '' - setattr(self, 'sort_%s' % key['id'], value) + setattr(s, key['id'], value) + db.session.add(s) def update_find(self): @@ -255,11 +209,11 @@ class Item(db.Model): del self.meta[key] users = map(str, list(self.users)) self.info['mediastate'] = 'available' # available, unavailable, transferring - if self.transferadded and self.transferprogress < 1: + t = Transfer.get(self.id) + if t and t.added and t.progress < 1: self.info['mediastate'] = 'transferring' else: self.info['mediastate'] = 'available' if settings.USER_ID in users else 'unavailable' - #fixme: also load metadata for other ids? if 'primaryid' in self.meta: self.meta.update(Metadata.load(*self.meta['primaryid'])) self.update_sort() @@ -382,13 +336,12 @@ class Item(db.Model): def queue_download(self): u = state.user() + t = Transfer.get_or_create(self.id) if not u in self.users: logger.debug('queue %s for download', self.id) - self.transferprogress = 0 - self.transferadded = datetime.utcnow() self.users.append(u) else: - logger.debug('%s already queued for download? %s %s', self.id, self.transferprogress, self.transferadded) + logger.debug('%s already queued for download? %s %s', self.id, t.progress, t.added) def save_file(self, content): u = state.user() @@ -407,7 +360,9 @@ class Item(db.Model): fd.write(content) if u not in self.users: self.users.append(u) - self.transferprogress = 1 + t = Transfer.get_or_create(self.id) + t.progress = 1 + t.save() self.added = datetime.utcnow() Changelog.record(u, 'additem', self.id, self.info) self.update() @@ -419,7 +374,9 @@ class Item(db.Model): return True else: logger.debug('TRIED TO SAVE EXISTING FILE!!!') - self.transferprogress = 1 + t = Transfer.get_or_create(self.id) + t.progress = 1 + t.save() self.update() return False @@ -443,6 +400,26 @@ class Item(db.Model): self.update() Changelog.record(user, 'removeitem', self.id) +class Sort(db.Model): + item_id = db.Column(db.String(32), db.ForeignKey('item.id'), primary_key=True) + item = db.relationship('Item', backref=db.backref('sort', lazy='dynamic')) + + def __repr__(self): + return '%s_sort' % self.item_id + + @classmethod + def get(cls, item_id): + return cls.query.filter_by(item_id=item_id).first() + + @classmethod + def get_or_create(cls, item_id): + f = cls.get(item_id) + if not f: + f = cls(item_id=item_id) + db.session.add(f) + db.session.commit() + return f + for key in config['itemKeys']: if key.get('sort'): sort_type = key.get('sortType', key['type']) @@ -454,7 +431,7 @@ for key in config['itemKeys']: col = db.Column(db.DateTime(), index=True) else: col = db.Column(db.String(1000), index=True) - setattr(Item, 'sort_%s' % key['id'], col) + setattr(Sort, '%s' % key['id'], col) Item.id_keys = ['isbn', 'lccn', 'olid', 'oclc', 'asin'] Item.item_keys = config['itemKeys'] @@ -570,6 +547,36 @@ class File(db.Model): db.session.add(self) db.session.commit() + +class Transfer(db.Model): + + item_id = db.Column(db.String(32), db.ForeignKey('item.id'), primary_key=True) + item = db.relationship('Item', backref=db.backref('transfer', lazy='dynamic')) + + added = db.Column(db.DateTime()) + progress = db.Column(db.Float()) + + def __repr__(self): + return '='.join(map(str, [self.item_id, self.progress])) + + @classmethod + def get(cls, item_id): + return cls.query.filter_by(item_id=item_id).first() + + @classmethod + def get_or_create(cls, item_id): + t = cls.get(item_id) + if not t: + t = cls(item_id=item_id) + t.added = datetime.utcnow() + t.progress = 0 + t.save() + return t + + def save(self): + db.session.add(self) + db.session.commit() + class Metadata(db.Model): created = db.Column(db.DateTime()) diff --git a/oml/item/query.py b/oml/item/query.py index 70d587f..a236ccd 100644 --- a/oml/item/query.py +++ b/oml/item/query.py @@ -5,7 +5,8 @@ from __future__ import division import settings import models import utils -import oxflask.query +from queryparser import Parser + from sqlalchemy.sql.expression import nullslast @@ -18,12 +19,12 @@ def parse(data): if key in data: query[key] = data[key] #print data - query['qs'] = oxflask.query.Parser(models.Item).find(data) + query['qs'] = Parser(models.Item).find(data) if not 'group' in query: query['qs'] = order(query['qs'], query['sort']) return query -def order(qs, sort, prefix='sort_'): +def order(qs, sort, prefix='sort.'): order_by = [] if len(sort) == 1: additional_sort = settings.config['user']['ui']['listSort'] @@ -51,5 +52,5 @@ def order(qs, sort, prefix='sort_'): _order_by.append(nulls) _order_by.append(order) order_by = _order_by - qs = qs.order_by(*order_by) + qs = qs.join(models.Sort).order_by(*order_by) return qs diff --git a/oml/item/views.py b/oml/item/views.py index b0765bb..2860656 100644 --- a/oml/item/views.py +++ b/oml/item/views.py @@ -39,7 +39,8 @@ def reader(id, filename=''): html = 'html/txt.html' else: abort(404) - item.sort_accessed = item.accessed = datetime.utcnow() - item.sort_timesaccessed = item.timesaccessed = (item.timesaccessed or 0) + 1 + item.accessed = datetime.utcnow() + item.timesaccessed = (item.timesaccessed or 0) + 1 + item.update_sort() item.save() return app.send_static_file(html) diff --git a/oml/meta/dewey.py b/oml/meta/dewey.py index f6ecefb..113b5ed 100644 --- a/oml/meta/dewey.py +++ b/oml/meta/dewey.py @@ -3,7 +3,7 @@ def get_classification(id): name = u'%s' % id - base = str(int(id.split('/')[0].split('.')[0])) + base = ''.join([s for s in id.split('/')[0].split('.')[0] if s.isdigit()]) if base in DEWEY: name = u'%s %s' % (name, DEWEY[base].decode('utf-8')) return name diff --git a/oml/node/server.py b/oml/node/server.py index dea0b8a..21d9351 100644 --- a/oml/node/server.py +++ b/oml/node/server.py @@ -18,6 +18,7 @@ from utils import valid, get_public_ipv6 import nodeapi import cert from websocket import trigger_event +from oxtornado import run_async import logging logger = logging.getLogger('oml.node.server') @@ -28,58 +29,74 @@ class NodeHandler(tornado.web.RequestHandler): self.app = app + @tornado.web.asynchronous + @tornado.gen.coroutine def post(self): - request = self.request - if request.method == 'POST': - ''' - API - pullChanges [userid] from [to] - pushChanges [index, change] - requestPeering username message - acceptPeering username message - rejectPeering message - removePeering message + ''' + API + pullChanges [userid] from [to] + pushChanges [index, change] + requestPeering username message + acceptPeering username message + rejectPeering message + removePeering message - ping responds public ip - ''' - key = str(request.headers['X-Ed25519-Key']) - sig = str(request.headers['X-Ed25519-Signature']) - data = request.body - content = {} + ping responds public ip + ''' + key = str(self.request.headers['X-Ed25519-Key']) + sig = str(self.request.headers['X-Ed25519-Signature']) + data = self.request.body + content = {} + + self.set_header('X-Node-Protocol', settings.NODE_PROTOCOL) + if self.request.headers.get('X-Node-Protocol', None) > settings.NODE_PROTOCOL: + state.update_required = True + if self.request.headers.get('X-Node-Protocol', None) != settings.NODE_PROTOCOL: + content = settings.release + else: if valid(key, data, sig): action, args = json.loads(data) logger.debug('NODE action %s %s (%s)', action, args, key) if action == 'ping': content = { - 'ip': request.remote_addr + 'ip': self.request.remote_addr } else: - with self.app.app_context(): - u = user.models.User.get(key) - if action in ( - 'requestPeering', 'acceptPeering', 'rejectPeering', 'removePeering' - ) or (u and u.peered): - content = getattr(nodeapi, 'api_' + action)(self.app, key, *args) - else: - if u and u.pending: - logger.debug('ignore request from pending peer[%s] %s (%s)', key, action, args) - content = {} - else: - logger.debug('PEER %s IS UNKNOWN SEND 403', key) - self.set_status(403) - content = { - 'status': 'not peered' - } - content = json.dumps(content) - sig = settings.sk.sign(content, encoding='base64') - self.set_header('X-Ed25519-Signature', sig) - self.write(content) - self.finish() + content = yield tornado.gen.Task(api_call, self.app, action, key, args) + if content is None: + content = {'status': 'not peered'} + logger.debug('PEER %s IS UNKNOWN SEND 403', key) + self.set_status(403) + content = json.dumps(content) + sig = settings.sk.sign(content, encoding='base64') + self.set_header('X-Ed25519-Signature', sig) + self.set_header('X-Node-Protocol', settings.NODE_PROTOCOL) + self.write(content) + self.finish() def get(self): + self.set_header('X-Node-Protocol', settings.NODE_PROTOCOL) + if self.request.headers.get('X-Node-Protocol', None) > settings.NODE_PROTOCOL: + state.update_required = True self.write('Open Media Library') self.finish() +@run_async +def api_call(app, action, key, args, callback): + with app.app_context(): + u = user.models.User.get(key) + if action in ( + 'requestPeering', 'acceptPeering', 'rejectPeering', 'removePeering' + ) or (u and u.peered): + content = getattr(nodeapi, 'api_' + action)(app, key, *args) + else: + if u and u.pending: + logger.debug('ignore request from pending peer[%s] %s (%s)', key, action, args) + content = {} + else: + content = None + callback(content) + class ShareHandler(tornado.web.RequestHandler): def initialize(self, app): @@ -149,7 +166,7 @@ def check_nodes(app): if state.online: with app.app_context(): for u in user.models.User.query.filter_by(queued=True): - if not state.nodes.check_online(u.id): + if not state.nodes.is_online(u.id): logger.debug('queued peering message for %s trying to connect...', u.id) state.nodes.queue('add', u.id) diff --git a/oml/nodes.py b/oml/nodes.py index 1a1894c..da6cc96 100644 --- a/oml/nodes.py +++ b/oml/nodes.py @@ -25,6 +25,7 @@ import directory from websocket import trigger_event from localnodes import LocalNodes from ssl_request import get_opener +import state import logging logger = logging.getLogger('oml.nodes') @@ -124,6 +125,7 @@ class Node(Thread): sig = settings.sk.sign(content, encoding=ENCODING) headers = { 'User-Agent': settings.USER_AGENT, + 'X-Node-Protocol': settings.NODE_PROTOCOL, 'Accept': 'text/plain', 'Accept-Encoding': 'gzip', 'Content-Type': 'application/json', @@ -154,6 +156,15 @@ class Node(Thread): data = r.read() if r.headers.get('content-encoding', None) == 'gzip': data = gzip.GzipFile(fileobj=StringIO(data)).read() + + version = r.headers.get('X-Node-Protocol', None) + if version != settings.NODE_PROTOCOL: + logger.debug('version does not match local: %s remote %s', settings.NODE_PROTOCOL, version) + self.online = False + if version > settings.NODE_PROTOCOL: + state.update_required = True + return None + sig = r.headers.get('X-Ed25519-Signature') if sig and self._valid(data, sig): response = json.loads(data) @@ -177,7 +188,17 @@ class Node(Thread): def can_connect(self): try: logger.debug('try to connect to %s', self.url) + headers = { + 'User-Agent': settings.USER_AGENT, + 'X-Node-Protocol': settings.NODE_PROTOCOL, + 'Accept-Encoding': 'gzip', + } + self._opener.addheaders = zip(headers.keys(), headers.values()) r = self._opener.open(self.url, timeout=1) + version = r.headers.get('X-Node-Protocol', None) + if version != settings.NODE_PROTOCOL: + logger.debug('version does not match local: %s remote %s', settings.NODE_PROTOCOL, version) + return False c = r.read() logger.debug('ok') return True @@ -261,6 +282,7 @@ class Node(Thread): return True def download(self, item): + from item.models import Transfer url = '%s/get/%s' % (self.url, item.id) headers = { 'User-Agent': settings.USER_AGENT, @@ -281,11 +303,12 @@ class Node(Thread): ''' content = '' for chunk in iter(lambda: r.read(1024*1024), ''): + t = Transfer.get(item.id) content += chunk - item.transferprogress = len(content) / item.info['size'] - item.save() + t.progress = len(content) / item.info['size'] + t.save() trigger_event('transfer', { - 'id': item.id, 'progress': item.transferprogress + 'id': item.id, 'progress': t.progress }) ''' content = r.read() @@ -337,7 +360,7 @@ class Nodes(Thread): def queue(self, *args): self._q.put(list(args)) - def check_online(self, id): + def is_online(self, id): return id in self._nodes and self._nodes[id].online def download(self, id, item): diff --git a/oml/oxflask/__init__.py b/oml/oxflask/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/oml/oxflask/api.py b/oml/oxflask/api.py deleted file mode 100644 index c74570f..0000000 --- a/oml/oxflask/api.py +++ /dev/null @@ -1,163 +0,0 @@ -# -*- coding: utf-8 -*- -# vi:si:et:sw=4:sts=4:ts=4 -from __future__ import division, with_statement - -import inspect -import sys -import json - -from flask import request, Blueprint -from .shortcuts import render_to_json_response, json_response - -import logging -logger = logging.getLogger('oxflask.api') - -app = Blueprint('oxflask', __name__) - -@app.route('/api/', methods=['POST', 'OPTIONS']) -def api(): - if request.host not in request.headers['origin']: - logger.debug('reject cross site attempt to access api %s', request) - return '' - - if request.method == "OPTIONS": - response = render_to_json_response({'status': {'code': 200, 'text': 'use POST'}}) - #response.headers['Access-Control-Allow-Origin'] = '*' - return response - if not 'action' in request.form: - methods = actions.keys() - api = [] - for f in sorted(methods): - api.append({'name': f, - 'doc': actions.doc(f).replace('\n', '
\n')}) - return render_to_json_response(api) - action = request.form['action'] - logger.debug('API %s', action) - f = actions.get(action) - if f: - response = f(request) - else: - response = render_to_json_response(json_response(status=400, - text='Unknown action %s' % action)) - #response.headers['Access-Control-Allow-Origin'] = '*' - return response - -def trim(docstring): - if not docstring: - return '' - # Convert tabs to spaces (following the normal Python rules) - # and split into a list of lines: - lines = docstring.expandtabs().splitlines() - # Determine minimum indentation (first line doesn't count): - indent = sys.maxint - for line in lines[1:]: - stripped = line.lstrip() - if stripped: - indent = min(indent, len(line) - len(stripped)) - # Remove indentation (first line is special): - trimmed = [lines[0].strip()] - if indent < sys.maxint: - for line in lines[1:]: - trimmed.append(line[indent:].rstrip()) - # Strip off trailing and leading blank lines: - while trimmed and not trimmed[-1]: - trimmed.pop() - while trimmed and not trimmed[0]: - trimmed.pop(0) - # Return a single string: - return '\n'.join(trimmed) - - -class ApiActions(dict): - properties = {} - versions = {} - def __init__(self): - - def api(request): - ''' - returns list of all known api actions - param data { - docs: bool - } - if docs is true, action properties contain docstrings - return { - status: {'code': int, 'text': string}, - data: { - actions: { - 'api': { - cache: true, - doc: 'recursion' - }, - 'hello': { - cache: true, - .. - } - ... - } - } - } - ''' - data = json.loads(request.form.get('data', '{}')) - docs = data.get('docs', False) - code = data.get('code', False) - version = getattr(request, 'version', None) - if version: - _actions = self.versions.get(version, {}).keys() - _actions = list(set(_actions + self.keys())) - else: - _actions = self.keys() - _actions.sort() - actions = {} - for a in _actions: - actions[a] = self.properties[a] - if docs: - actions[a]['doc'] = self.doc(a, version) - if code: - actions[a]['code'] = self.code(a, version) - response = json_response({'actions': actions}) - return render_to_json_response(response) - self.register(api) - - def doc(self, name, version=None): - if version: - f = self.versions[version].get(name, self.get(name)) - else: - f = self[name] - return trim(f.__doc__) - - def code(self, name, version=None): - if version: - f = self.versions[version].get(name, self.get(name)) - else: - f = self[name] - if name != 'api' and hasattr(f, 'func_closure') and f.func_closure: - fc = filter(lambda c: hasattr(c.cell_contents, '__call__'), f.func_closure) - f = fc[len(fc)-1].cell_contents - info = f.func_code.co_filename - info = u'%s:%s' % (info, f.func_code.co_firstlineno) - return info, trim(inspect.getsource(f)) - - def register(self, method, action=None, cache=True, version=None): - if not action: - action = method.func_name - if version: - if not version in self.versions: - self.versions[version] = {} - self.versions[version][action] = method - else: - self[action] = method - self.properties[action] = {'cache': cache} - - def unregister(self, action): - if action in self: - del self[action] - -actions = ApiActions() - -def error(request): - ''' - this action is used to test api error codes, it should return a 503 error - ''' - success = error_is_success - return render_to_json_response({}) -actions.register(error) diff --git a/oml/oxflask/shortcuts.py b/oml/oxflask/shortcuts.py deleted file mode 100644 index 0823fb8..0000000 --- a/oml/oxflask/shortcuts.py +++ /dev/null @@ -1,34 +0,0 @@ -from functools import wraps -import datetime -import json - -from flask import Response - -def json_response(data=None, status=200, text='ok'): - if not data: - data = {} - return {'status': {'code': status, 'text': text}, 'data': data} - -def _to_json(python_object): - if isinstance(python_object, datetime.datetime): - if python_object.year < 1900: - tt = python_object.timetuple() - return '%d-%02d-%02dT%02d:%02d%02dZ' % tuple(list(tt)[:6]) - return python_object.strftime('%Y-%m-%dT%H:%M:%SZ') - raise TypeError(u'%s %s is not JSON serializable' % (repr(python_object), type(python_object))) - -def json_dumps(obj): - indent = 2 - return json.dumps(obj, indent=indent, default=_to_json, ensure_ascii=False).encode('utf-8') - -def render_to_json_response(obj, content_type="text/json", status=200): - resp = Response(json_dumps(obj), status=status, content_type=content_type) - return resp - -def returns_json(f): - @wraps(f) - def decorated_function(*args, **kwargs): - r = f(*args, **kwargs) - return render_to_json_response(json_response(r)) - return decorated_function - diff --git a/oml/oxflask/utils.py b/oml/oxflask/utils.py deleted file mode 100644 index b114611..0000000 --- a/oml/oxflask/utils.py +++ /dev/null @@ -1,8 +0,0 @@ - -def get_by_key(objects, key, value): - obj = filter(lambda o: o.get(key) == value, objects) - return obj and obj[0] or None - -def get_by_id(objects, id): - return get_by_key(objects, 'id', id) - diff --git a/oml/oxflask/query.py b/oml/queryparser.py similarity index 97% rename from oml/oxflask/query.py rename to oml/queryparser.py index bf17a84..0fbeb05 100644 --- a/oml/oxflask/query.py +++ b/oml/queryparser.py @@ -1,17 +1,16 @@ # -*- coding: utf-8 -*- # vi:si:et:sw=4:sts=4:ts=4 -from sqlalchemy.sql.expression import and_, not_, or_, ClauseElement from datetime import datetime import unicodedata -from sqlalchemy.sql import operators, extract +from sqlalchemy.sql import operators from sqlalchemy.orm import load_only import utils import settings import logging -logger = logging.getLogger('oxflask.query') +logger = logging.getLogger('queryparser') def get_operator(op, type='str'): return { @@ -115,7 +114,8 @@ class Parser(object): nickname, name = v.split(':', 1) if nickname: p = self._user.query.filter_by(nickname=nickname).first() - v = '%s:%s' % (p.id, name) + if p: + v = '%s:%s' % (p.id, name) else: p = self._user.query.filter_by(id=settings.USER_ID).first() v = ':%s' % name diff --git a/oml/settings.py b/oml/settings.py index 860f858..462ba1b 100644 --- a/oml/settings.py +++ b/oml/settings.py @@ -18,7 +18,7 @@ config_dir = os.path.normpath(os.path.join(base_dir, '..', 'config')) if not os.path.exists(config_dir): os.makedirs(config_dir) -db_path = os.path.join(config_dir, 'openmedialibrary.db') +db_path = os.path.join(config_dir, 'data.db') icons_db_path = os.path.join(config_dir, 'icons.db') key_path = os.path.join(config_dir, 'node.key') ssl_cert_path = os.path.join(config_dir, 'node.ssl.crt') @@ -67,8 +67,12 @@ else: USER_ID = vk.to_ascii(encoding='base64') if 'modules' in release and 'openmedialibrary' in release['modules']: - VERSION = release['modules']['openmedialibrary']['version'] + MINOR_VERSION = release['modules']['openmedialibrary']['version'] else: - VERSION = 'git' + MINOR_VERSION = 'git' + +NODE_PROTOCOL="0.1" +VERSION="%s.%s" % (NODE_PROTOCOL, MINOR_VERSION) + USER_AGENT = 'OpenMediaLibrary/%s' % VERSION diff --git a/oml/user/models.py b/oml/user/models.py index 36d3bbc..21cf03b 100644 --- a/oml/user/models.py +++ b/oml/user/models.py @@ -3,8 +3,8 @@ import json -from oxflask.db import MutableDict -import oxflask.query +from db import MutableDict +from queryparser import Parser from changelog import Changelog import settings @@ -19,11 +19,11 @@ class User(db.Model): created = db.Column(db.DateTime()) modified = db.Column(db.DateTime()) + id = db.Column(db.String(43), primary_key=True) info = db.Column(MutableDict.as_mutable(db.PickleType(pickler=json))) - #nickname = db.Column(db.String(256), unique=True) - nickname = db.Column(db.String(256)) + nickname = db.Column(db.String(256), unique=True) pending = db.Column(db.String(64)) # sent|received queued = db.Column(db.Boolean()) @@ -58,12 +58,12 @@ class User(db.Model): if self.pending: j['pending'] = self.pending j['peered'] = self.peered - j['online'] = self.check_online() + j['online'] = self.is_online() j['nickname'] = self.nickname return j - def check_online(self): - return state.nodes and state.nodes.check_online(self.id) + def is_online(self): + return state.nodes and state.nodes.is_online(self.id) def lists_json(self): return [{ @@ -72,7 +72,7 @@ class User(db.Model): 'type': 'library', 'items': self.items.count(), 'user': self.nickname if self.id != settings.USER_ID else settings.preferences['username'], - }] + [l.json() for l in self.lists.order_by('position')] + }] + [l.json() for l in self.lists.order_by('index_')] def update_peering(self, peered, username=None): was_peering = self.peered @@ -125,7 +125,7 @@ list_items = db.Table('listitem', class List(db.Model): id = db.Column(db.Integer(), primary_key=True) name = db.Column(db.String()) - position = db.Column(db.Integer()) + index_ = db.Column(db.Integer()) type = db.Column(db.String(64)) _query = db.Column('query', MutableDict.as_mutable(db.PickleType(pickler=json))) @@ -171,7 +171,7 @@ class List(db.Model): l = cls(user_id=user_id, name=name) l._query = query l.type = 'smart' if l._query else 'static' - l.position = cls.query.filter_by(user_id=user_id).count() + l.index_ = cls.query.filter_by(user_id=user_id).count() if user_id == settings.USER_ID: p = User.get(settings.USER_ID) if not l._query: @@ -251,7 +251,7 @@ class List(db.Model): from item.models import Item if self._query: data = self._query - return oxflask.query.Parser(Item).find({'query': data}).count() + return Parser(Item).find({'query': data}).count() else: return len(self.items) @@ -260,7 +260,7 @@ class List(db.Model): 'id': self.public_id, 'user': self.user.nickname if self.user_id != settings.USER_ID else settings.preferences['username'], 'name': self.name, - 'index': self.position, + 'index': self.index_, 'items': self.items_count(), 'type': self.type } diff --git a/oml/websocket.py b/oml/websocket.py index 90c7900..31ed993 100644 --- a/oml/websocket.py +++ b/oml/websocket.py @@ -8,7 +8,7 @@ from Queue import Queue import json from threading import Thread -from oxflask.shortcuts import json_dumps +from oxtornado import json_dumps import state