disable background scraping

This commit is contained in:
j 2016-01-05 21:46:50 +05:30
parent b6f5e9c0cc
commit aa3ab25a30
5 changed files with 16 additions and 8 deletions

View file

@ -150,7 +150,10 @@ class Changelog(db.Model):
if i.timestamp > timestamp: if i.timestamp > timestamp:
logger.debug('ignore edititem change %s %s %s', timestamp, itemid, meta) logger.debug('ignore edititem change %s %s %s', timestamp, itemid, meta)
return True return True
keys = [k for k in list(meta.keys()) if k in Item.id_keys] if 'primaryid' in meta:
keys = [meta['primaryid'][0]]
else:
keys = [k for k in meta if k in Item.id_keys]
if keys: if keys:
key = keys[0] key = keys[0]
primary = [key, meta[key]] primary = [key, meta[key]]

View file

@ -298,8 +298,8 @@ class Item(db.Model):
self.update_icons() self.update_icons()
self.modified = datetime.utcnow() self.modified = datetime.utcnow()
self.save() self.save()
if not scrape: #if not scrape:
Scrape.get_or_create(self.id) # Scrape.get_or_create(self.id)
for f in self.files.all(): for f in self.files.all():
f.move() f.move()
user = state.user() user = state.user()
@ -307,6 +307,7 @@ class Item(db.Model):
Changelog.record(user, 'edititem', self.id, record) Changelog.record(user, 'edititem', self.id, record)
def edit_metadata(self, data): def edit_metadata(self, data):
Scrape.query.filter_by(item_id=self.id).delete()
if 'primaryid' in self.meta: if 'primaryid' in self.meta:
logger.debug('m: %s', self.meta['primaryid']) logger.debug('m: %s', self.meta['primaryid'])
m = Metadata.get_or_create(*self.meta['primaryid']) m = Metadata.get_or_create(*self.meta['primaryid'])

View file

@ -54,13 +54,15 @@ def add_file(id, f, prefix, from_=None):
state.db.session.add(item) state.db.session.add(item)
item.users.append(user) item.users.append(user)
Changelog.record(user, 'additem', item.id, file.info) Changelog.record(user, 'additem', item.id, file.info)
if item.meta.get('primaryid'):
Changelog.record(user, 'edititem', item.id, dict([item.meta['primaryid']]))
item.added = datetime.utcnow() item.added = datetime.utcnow()
if state.online:
item.scrape()
#Changelog.record(user, 'edititem', item.id, dict([item.meta['primaryid']]))
Changelog.record(user, 'edititem', item.id, item.meta)
item.update_icons() item.update_icons()
item.modified = datetime.utcnow() item.modified = datetime.utcnow()
item.update() item.update()
Scrape.get_or_create(item.id) #Scrape.get_or_create(item.id)
return file return file
def run_scan(): def run_scan():

View file

@ -115,7 +115,7 @@ def run():
state.tor = tor.Tor() state.tor = tor.Tor()
state.node = node.server.start() state.node = node.server.start()
state.downloads = downloads.Downloads() state.downloads = downloads.Downloads()
state.scraping = downloads.ScrapeThread() #state.scraping = downloads.ScrapeThread()
state.nodes = nodes.Nodes() state.nodes = nodes.Nodes()
def publish(): def publish():
if not state.tor.is_online(): if not state.tor.is_online():

View file

@ -4,6 +4,8 @@ main = None
nodes = False nodes = False
online = False online = False
tasks = False tasks = False
scraping = False
downloads = False
tor = False tor = False
websockets = [] websockets = []