disable background scraping

This commit is contained in:
j 2016-01-05 21:46:50 +05:30
commit aa3ab25a30
5 changed files with 16 additions and 8 deletions

View file

@ -224,7 +224,7 @@ class Item(db.Model):
if key == 'id':
continue
self.meta[key] = m[key]
self.modified = datetime.utcnow()
self.modified = datetime.utcnow()
self.update_sort()
self.update_find()
#self.modified = datetime.utcnow()
@ -298,8 +298,8 @@ class Item(db.Model):
self.update_icons()
self.modified = datetime.utcnow()
self.save()
if not scrape:
Scrape.get_or_create(self.id)
#if not scrape:
# Scrape.get_or_create(self.id)
for f in self.files.all():
f.move()
user = state.user()
@ -307,6 +307,7 @@ class Item(db.Model):
Changelog.record(user, 'edititem', self.id, record)
def edit_metadata(self, data):
Scrape.query.filter_by(item_id=self.id).delete()
if 'primaryid' in self.meta:
logger.debug('m: %s', self.meta['primaryid'])
m = Metadata.get_or_create(*self.meta['primaryid'])

View file

@ -54,13 +54,15 @@ def add_file(id, f, prefix, from_=None):
state.db.session.add(item)
item.users.append(user)
Changelog.record(user, 'additem', item.id, file.info)
if item.meta.get('primaryid'):
Changelog.record(user, 'edititem', item.id, dict([item.meta['primaryid']]))
item.added = datetime.utcnow()
if state.online:
item.scrape()
#Changelog.record(user, 'edititem', item.id, dict([item.meta['primaryid']]))
Changelog.record(user, 'edititem', item.id, item.meta)
item.update_icons()
item.modified = datetime.utcnow()
item.update()
Scrape.get_or_create(item.id)
#Scrape.get_or_create(item.id)
return file
def run_scan():