* use extract_frame.py from pad.ma
* update json request to only get md5sum from frontend * cronjob now imports all new files to db, extracts them and only after that send notification to frontend
This commit is contained in:
parent
33518260ee
commit
802a274aba
4 changed files with 224 additions and 56 deletions
|
|
@ -40,14 +40,16 @@ class Archive(SQLObject):
|
|||
name = UnicodeCol(length=255, alternateID=True)
|
||||
basePath = UnicodeCol()
|
||||
baseUrlFrontend = UnicodeCol(default = '')
|
||||
|
||||
published = DateTimeCol(defalut=datetime.now)
|
||||
modified = DateTimeCol(defalut=datetime.now)
|
||||
|
||||
def _get_basePath(self):
|
||||
basePath = self._SO_get_basePath()
|
||||
if not basePath.endswith('/'):
|
||||
basePath = basePath + "/"
|
||||
self.basePath = basePath
|
||||
return basePath
|
||||
|
||||
|
||||
def notifyFrontend(self, action, md5sum):
|
||||
if self.baseUrlFrontend:
|
||||
dto = socket.getdefaulttimeout()
|
||||
|
|
@ -141,9 +143,9 @@ class Archive(SQLObject):
|
|||
ret = "added entry"
|
||||
f.updateMeta()
|
||||
f.extractAll()
|
||||
self.notifyFrontend('add', f.md5sum)
|
||||
f.modified = datetime.now()
|
||||
return ret.encode('utf-8')
|
||||
|
||||
|
||||
def removeFile(self, md5sum):
|
||||
'''
|
||||
remove file based on md5sum from archive
|
||||
|
|
@ -158,35 +160,7 @@ class Archive(SQLObject):
|
|||
self.notifyFrontend('remove', md5sum)
|
||||
return dict(result="file removed")
|
||||
return dict(result="not in archive")
|
||||
|
||||
#FIXME this fails for old frontends because it notifies the frontend
|
||||
def importFrontend(self):
|
||||
if self.baseUrlFrontend:
|
||||
dto = socket.getdefaulttimeout()
|
||||
socket.setdefaulttimeout(256)
|
||||
url = "%s/%s" % (self.baseUrlFrontend, 'list')
|
||||
result = read_url(url)
|
||||
files = simplejson.loads(result)['files']
|
||||
socket.setdefaulttimeout(dto)
|
||||
for f in files:
|
||||
meta = files[f]
|
||||
fname = join(self.basePath, f)
|
||||
if exists(fname):
|
||||
stats = oxdb_import.oxdb_file_stats(fname)
|
||||
meta['date'] = stats['date']
|
||||
meta['path'] = f
|
||||
meta['video'] = ''
|
||||
meta['audio'] = ''
|
||||
meta['length'] = 0
|
||||
meta['bpp'] = 0
|
||||
for key in ('bpp', 'size', 'length', 'date'):
|
||||
meta[key] = int(float(meta[key]))
|
||||
meta['date'] = datetime.fromtimestamp(meta['date'])
|
||||
print self.addFile(meta), f
|
||||
else:
|
||||
print "remove", f
|
||||
self.removeFile(meta['md5sum'])
|
||||
|
||||
|
||||
def importFiles(self):
|
||||
stats = {'skipped': 0, 'added': 0, 'remove':0}
|
||||
print self.basePath
|
||||
|
|
@ -215,38 +189,38 @@ class Archive(SQLObject):
|
|||
self.removeFile(oxdb_files[f]['md5sum'])
|
||||
stats['remove'] += 1
|
||||
print stats
|
||||
print "updating information on frontend"
|
||||
self.updateFrontend()
|
||||
return stats
|
||||
|
||||
'''
|
||||
notify frontend about each file we have.
|
||||
Interaction with frontend,
|
||||
- update send modified files since last sync.
|
||||
- bootstrap send all files
|
||||
- sync get list from frontend and remove/add those that are not in sync
|
||||
'''
|
||||
def updateFrontned(self):
|
||||
for f in ArchiveFile.select(ArchiveFile.modified >= self.published):
|
||||
self.notifyFrontend('add', f.md5sum)
|
||||
self.published=datetime.now()
|
||||
|
||||
def bootstrapFrontend(self):
|
||||
for f in self.files:
|
||||
self.notifyFrontend('add', f.md5sum)
|
||||
|
||||
def cleanupFrontend(self):
|
||||
def syncFrontend(self):
|
||||
dto = socket.getdefaulttimeout()
|
||||
socket.setdefaulttimeout(256)
|
||||
data = read_url("%s/list" % self.baseUrlFrontend)
|
||||
files = simplejson.loads(data)['files']
|
||||
md5sums = simplejson.loads(data)['files']
|
||||
socket.setdefaulttimeout(dto)
|
||||
md5sums = [str(f['md5sum']) for f in files.values()]
|
||||
for md5sum in md5sums:
|
||||
try:
|
||||
f = ArchiveFile.byMd5sum(md5sum)
|
||||
except SQLObjectNotFound:
|
||||
self.notifyFrontend('remove', md5sum)
|
||||
|
||||
def fillFrontend(self):
|
||||
dto = socket.getdefaulttimeout()
|
||||
socket.setdefaulttimeout(256)
|
||||
data = read_url("%s/list" % self.baseUrlFrontend)
|
||||
files = simplejson.loads(data)['files']
|
||||
socket.setdefaulttimeout(dto)
|
||||
md5sums = [str(f['md5sum']) for f in files.values()]
|
||||
for f in ArchiveFile.select(NOT(IN(ArchiveFile.q.md5sum, md5sums))):
|
||||
self.notifyFrontend('add', f.md5sum)
|
||||
|
||||
|
||||
class ArchiveFile(SQLObject):
|
||||
'''
|
||||
|
|
@ -267,11 +241,11 @@ class ArchiveFile(SQLObject):
|
|||
size = IntCol()
|
||||
bpp = FloatCol(default = -1)
|
||||
pixels = IntCol(default = 0)
|
||||
|
||||
date_added = DateTimeCol(default = datetime.now)
|
||||
pubDate = DateTimeCol(default = datetime.now)
|
||||
modDate = DateTimeCol(default = datetime.now)
|
||||
|
||||
|
||||
date_added = DateTimeCol(default=datetime.now)
|
||||
published = DateTimeCol(defalut=datetime.now)
|
||||
modified = DateTimeCol(defalut=datetime.now)
|
||||
|
||||
height = IntCol(default = -1)
|
||||
width = IntCol(default = -1)
|
||||
frameAspect = UnicodeCol(default = "1.6", length = 100)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue