disable background scraping

This commit is contained in:
j 2016-01-05 21:46:50 +05:30
parent b6f5e9c0cc
commit aa3ab25a30
5 changed files with 16 additions and 8 deletions

View File

@ -150,7 +150,10 @@ class Changelog(db.Model):
if i.timestamp > timestamp:
logger.debug('ignore edititem change %s %s %s', timestamp, itemid, meta)
return True
keys = [k for k in list(meta.keys()) if k in Item.id_keys]
if 'primaryid' in meta:
keys = [meta['primaryid'][0]]
else:
keys = [k for k in meta if k in Item.id_keys]
if keys:
key = keys[0]
primary = [key, meta[key]]

View File

@ -224,7 +224,7 @@ class Item(db.Model):
if key == 'id':
continue
self.meta[key] = m[key]
self.modified = datetime.utcnow()
self.modified = datetime.utcnow()
self.update_sort()
self.update_find()
#self.modified = datetime.utcnow()
@ -298,8 +298,8 @@ class Item(db.Model):
self.update_icons()
self.modified = datetime.utcnow()
self.save()
if not scrape:
Scrape.get_or_create(self.id)
#if not scrape:
# Scrape.get_or_create(self.id)
for f in self.files.all():
f.move()
user = state.user()
@ -307,6 +307,7 @@ class Item(db.Model):
Changelog.record(user, 'edititem', self.id, record)
def edit_metadata(self, data):
Scrape.query.filter_by(item_id=self.id).delete()
if 'primaryid' in self.meta:
logger.debug('m: %s', self.meta['primaryid'])
m = Metadata.get_or_create(*self.meta['primaryid'])

View File

@ -54,13 +54,15 @@ def add_file(id, f, prefix, from_=None):
state.db.session.add(item)
item.users.append(user)
Changelog.record(user, 'additem', item.id, file.info)
if item.meta.get('primaryid'):
Changelog.record(user, 'edititem', item.id, dict([item.meta['primaryid']]))
item.added = datetime.utcnow()
if state.online:
item.scrape()
#Changelog.record(user, 'edititem', item.id, dict([item.meta['primaryid']]))
Changelog.record(user, 'edititem', item.id, item.meta)
item.update_icons()
item.modified = datetime.utcnow()
item.update()
Scrape.get_or_create(item.id)
#Scrape.get_or_create(item.id)
return file
def run_scan():

View File

@ -115,7 +115,7 @@ def run():
state.tor = tor.Tor()
state.node = node.server.start()
state.downloads = downloads.Downloads()
state.scraping = downloads.ScrapeThread()
#state.scraping = downloads.ScrapeThread()
state.nodes = nodes.Nodes()
def publish():
if not state.tor.is_online():

View File

@ -4,6 +4,8 @@ main = None
nodes = False
online = False
tasks = False
scraping = False
downloads = False
tor = False
websockets = []