remove unused scrape table

This commit is contained in:
j 2016-01-21 12:38:02 +05:30
parent 5f8094bba3
commit 97fe8731b8
6 changed files with 1 additions and 93 deletions

View File

@ -67,46 +67,3 @@ class Downloads(Thread):
time.sleep(step)
timeout -= step
class ScrapeThread(Thread):
def __init__(self):
self._running = True
Thread.__init__(self)
self.daemon = True
self.start()
def scrape_queue(self):
import item.models
scraped = False
for s in item.models.Scrape.query.filter(
item.models.Scrape.added!=None,
).order_by(item.models.Scrape.added):
if not self._running:
return True
logger.debug('scrape %s', s.item)
try:
if s.item.scrape():
for f in s.item.files:
f.move()
s.item.update_icons()
s.item.save()
s.remove()
trigger_event('change', {})
scraped = True
except:
logger.debug('scrape failed %s', s.item, exc_info=1)
return scraped
def run(self):
time.sleep(2)
while self._running:
wait = False
with db.session():
if not self.scrape_queue():
wait = True
if wait:
time.sleep(1)
def join(self):
self._running = False
return Thread.join(self)

View File

@ -264,7 +264,6 @@ class Item(db.Model):
def delete(self, commit=True):
Sort.query.filter_by(item_id=self.id).delete()
Transfer.query.filter_by(item_id=self.id).delete()
Scrape.query.filter_by(item_id=self.id).delete()
state.db.session.delete(self)
icons.clear('cover:%s' % self.id)
icons.clear('preview:%s' % self.id)
@ -314,7 +313,6 @@ class Item(db.Model):
self.sync_metadata()
def edit(self, data, modified=None):
Scrape.query.filter_by(item_id=self.id).delete()
self.update_metadata(data, modified)
for f in self.files.all():
f.move()
@ -710,38 +708,6 @@ class File(db.Model):
state.db.session.add(self)
state.db.session.commit()
class Scrape(db.Model):
__tablename__ = 'scrape'
item_id = sa.Column(sa.String(32), sa.ForeignKey('item.id'), primary_key=True)
item = sa.orm.relationship('Item', backref=sa.orm.backref('scraping', lazy='dynamic'))
added = sa.Column(sa.DateTime())
def __repr__(self):
return '='.join(map(str, [self.item_id, self.added]))
@classmethod
def get(cls, item_id):
return cls.query.filter_by(item_id=item_id).first()
@classmethod
def get_or_create(cls, item_id):
t = cls.get(item_id)
if not t:
t = cls(item_id=item_id)
t.added = datetime.utcnow()
t.save()
return t
def save(self):
state.db.session.add(self)
state.db.session.commit()
def remove(self):
state.db.session.delete(self)
state.db.session.commit()
class Transfer(db.Model):
__tablename__ = 'transfer'

View File

@ -65,9 +65,6 @@ def shutdown():
if state.downloads:
logger.debug('shutdown downloads')
state.downloads.join()
if state.scraping:
logger.debug('shutdown scraping')
state.scraping.join()
logger.debug('shutdown http_server')
state.http_server.stop()
if state.tasks:
@ -152,7 +149,6 @@ def run():
state.tor = tor.Tor()
state.node = node.server.start()
state.downloads = downloads.Downloads()
#state.scraping = downloads.ScrapeThread()
state.nodes = nodes.Nodes()
def publish():
if not state.tor.is_online():

View File

@ -345,15 +345,6 @@ def upgrade_db(old, new=None):
if old <= '20140527-120-3cb9819':
run_sql('CREATE INDEX ix_find_findvalue ON find (findvalue)')
if old <= '20150307-272-557f4d3':
if not db.table_exists('scrape'):
run_sql('''CREATE TABLE scrape (
item_id VARCHAR(32) NOT NULL,
added DATETIME,
PRIMARY KEY (item_id),
FOREIGN KEY(item_id) REFERENCES item (id)
)''')
run_sql('CREATE INDEX idx_scrape_added ON scrape (added)')
if old <= '20151118-346-7e86e68':
old_key = os.path.join(settings.data_path, 'node.ssl.key')
if os.path.exists(old_key):
@ -446,7 +437,6 @@ def update_database():
item.models.File,
item.models.Find,
item.models.Item,
item.models.Scrape,
item.models.Sort,
item.models.Transfer,
item.models.user_items,
@ -458,7 +448,6 @@ def update_database():
user.models.User,
]
indexes = [
'CREATE INDEX idx_scrape_added ON scrape (added)',
'CREATE INDEX ix_find_findvalue ON find (findvalue)',
'CREATE INDEX ix_find_key ON find ("key")',
'CREATE INDEX ix_useritem_user ON useritem ("user_id")',

View File

@ -5,7 +5,6 @@ nodes = False
node = False
online = False
tasks = False
scraping = False
downloads = False
tor = False
update = False

View File

@ -417,5 +417,6 @@ def migrate_7():
session.delete(c)
session.commit()
db.run_sql('DROP TABLE IF EXISTS metadata')
db.run_sql('DROP TABLE IF EXISTS scrape')
db.run_sql('VACUUM')
return 7