From 97fe8731b8c67ca9331a2ed5a9b3592cf7bcb0ce Mon Sep 17 00:00:00 2001 From: j Date: Thu, 21 Jan 2016 12:38:02 +0530 Subject: [PATCH] remove unused scrape table --- oml/downloads.py | 43 ------------------------------------------- oml/item/models.py | 34 ---------------------------------- oml/server.py | 4 ---- oml/setup.py | 11 ----------- oml/state.py | 1 - oml/update.py | 1 + 6 files changed, 1 insertion(+), 93 deletions(-) diff --git a/oml/downloads.py b/oml/downloads.py index 70333e0..ca70476 100644 --- a/oml/downloads.py +++ b/oml/downloads.py @@ -67,46 +67,3 @@ class Downloads(Thread): time.sleep(step) timeout -= step -class ScrapeThread(Thread): - - def __init__(self): - self._running = True - Thread.__init__(self) - self.daemon = True - self.start() - - def scrape_queue(self): - import item.models - scraped = False - for s in item.models.Scrape.query.filter( - item.models.Scrape.added!=None, - ).order_by(item.models.Scrape.added): - if not self._running: - return True - logger.debug('scrape %s', s.item) - try: - if s.item.scrape(): - for f in s.item.files: - f.move() - s.item.update_icons() - s.item.save() - s.remove() - trigger_event('change', {}) - scraped = True - except: - logger.debug('scrape failed %s', s.item, exc_info=1) - return scraped - - def run(self): - time.sleep(2) - while self._running: - wait = False - with db.session(): - if not self.scrape_queue(): - wait = True - if wait: - time.sleep(1) - - def join(self): - self._running = False - return Thread.join(self) diff --git a/oml/item/models.py b/oml/item/models.py index 209969f..f74f36a 100644 --- a/oml/item/models.py +++ b/oml/item/models.py @@ -264,7 +264,6 @@ class Item(db.Model): def delete(self, commit=True): Sort.query.filter_by(item_id=self.id).delete() Transfer.query.filter_by(item_id=self.id).delete() - Scrape.query.filter_by(item_id=self.id).delete() state.db.session.delete(self) icons.clear('cover:%s' % self.id) icons.clear('preview:%s' % self.id) @@ -314,7 +313,6 @@ class Item(db.Model): self.sync_metadata() def edit(self, data, modified=None): - Scrape.query.filter_by(item_id=self.id).delete() self.update_metadata(data, modified) for f in self.files.all(): f.move() @@ -710,38 +708,6 @@ class File(db.Model): state.db.session.add(self) state.db.session.commit() -class Scrape(db.Model): - - __tablename__ = 'scrape' - - item_id = sa.Column(sa.String(32), sa.ForeignKey('item.id'), primary_key=True) - item = sa.orm.relationship('Item', backref=sa.orm.backref('scraping', lazy='dynamic')) - - added = sa.Column(sa.DateTime()) - - def __repr__(self): - return '='.join(map(str, [self.item_id, self.added])) - - @classmethod - def get(cls, item_id): - return cls.query.filter_by(item_id=item_id).first() - - @classmethod - def get_or_create(cls, item_id): - t = cls.get(item_id) - if not t: - t = cls(item_id=item_id) - t.added = datetime.utcnow() - t.save() - return t - - def save(self): - state.db.session.add(self) - state.db.session.commit() - - def remove(self): - state.db.session.delete(self) - state.db.session.commit() class Transfer(db.Model): __tablename__ = 'transfer' diff --git a/oml/server.py b/oml/server.py index 4f80e9a..d239251 100644 --- a/oml/server.py +++ b/oml/server.py @@ -65,9 +65,6 @@ def shutdown(): if state.downloads: logger.debug('shutdown downloads') state.downloads.join() - if state.scraping: - logger.debug('shutdown scraping') - state.scraping.join() logger.debug('shutdown http_server') state.http_server.stop() if state.tasks: @@ -152,7 +149,6 @@ def run(): state.tor = tor.Tor() state.node = node.server.start() state.downloads = downloads.Downloads() - #state.scraping = downloads.ScrapeThread() state.nodes = nodes.Nodes() def publish(): if not state.tor.is_online(): diff --git a/oml/setup.py b/oml/setup.py index 3cda03b..d987917 100644 --- a/oml/setup.py +++ b/oml/setup.py @@ -345,15 +345,6 @@ def upgrade_db(old, new=None): if old <= '20140527-120-3cb9819': run_sql('CREATE INDEX ix_find_findvalue ON find (findvalue)') - if old <= '20150307-272-557f4d3': - if not db.table_exists('scrape'): - run_sql('''CREATE TABLE scrape ( - item_id VARCHAR(32) NOT NULL, - added DATETIME, - PRIMARY KEY (item_id), - FOREIGN KEY(item_id) REFERENCES item (id) -)''') - run_sql('CREATE INDEX idx_scrape_added ON scrape (added)') if old <= '20151118-346-7e86e68': old_key = os.path.join(settings.data_path, 'node.ssl.key') if os.path.exists(old_key): @@ -446,7 +437,6 @@ def update_database(): item.models.File, item.models.Find, item.models.Item, - item.models.Scrape, item.models.Sort, item.models.Transfer, item.models.user_items, @@ -458,7 +448,6 @@ def update_database(): user.models.User, ] indexes = [ - 'CREATE INDEX idx_scrape_added ON scrape (added)', 'CREATE INDEX ix_find_findvalue ON find (findvalue)', 'CREATE INDEX ix_find_key ON find ("key")', 'CREATE INDEX ix_useritem_user ON useritem ("user_id")', diff --git a/oml/state.py b/oml/state.py index bf7aa40..062c45c 100644 --- a/oml/state.py +++ b/oml/state.py @@ -5,7 +5,6 @@ nodes = False node = False online = False tasks = False -scraping = False downloads = False tor = False update = False diff --git a/oml/update.py b/oml/update.py index b09680c..d10e5b5 100644 --- a/oml/update.py +++ b/oml/update.py @@ -417,5 +417,6 @@ def migrate_7(): session.delete(c) session.commit() db.run_sql('DROP TABLE IF EXISTS metadata') + db.run_sql('DROP TABLE IF EXISTS scrape') db.run_sql('VACUUM') return 7