import Frontend data

This commit is contained in:
j 2007-07-11 10:46:43 +00:00
parent 8ca0a45bcb
commit e8e5a492e9
2 changed files with 24 additions and 42 deletions

View file

@ -14,9 +14,12 @@ import time
import math import math
from glob import glob from glob import glob
import shutil import shutil
from scrapeit.utils import read_url
import socket import socket
import simplejson
from scrapeit.utils import read_url
import cache import cache
import oxdb_import import oxdb_import
from oxdb_utils import oxdb_title, oxdb_director, oxdb_id from oxdb_utils import oxdb_title, oxdb_director, oxdb_id
@ -139,6 +142,24 @@ class Archive(SQLObject):
return dict(result="file removed") return dict(result="file removed")
return dict(result="not in archive") return dict(result="not in archive")
def importFrontend(self):
if self.baseUrlFrontend:
dto = socket.getdefaulttimeout()
socket.setdefaulttimeout(100)
url = "%s/%s" % (self.baseUrlFrontend, 'list')
result = read_url(url)
files = simplejson.loads(result)['files']
socket.setdefaulttimeout(dto)
for f in files:
meta = files[f]
meta['path'] = f
meta = oxdb_import.oxdb_file_metadata(meta)
meta['video'] = ''
meta['audio'] = ''
meta['length'] = 0
meta['bpp'] = 0
print self.addFile(meta), f
def importFiles(self): def importFiles(self):
stats = {'skipped': 0, 'added': 0, 'remove':0} stats = {'skipped': 0, 'added': 0, 'remove':0}
print self.basePath print self.basePath
@ -155,7 +176,7 @@ class Archive(SQLObject):
else: else:
meta = oxdb_import.oxdb_file_metadata(meta) meta = oxdb_import.oxdb_file_metadata(meta)
#FIXME: check input #FIXME: check input
for key in ['bpp', 'size', 'length', 'date']: for key in ('bpp', 'size', 'length', 'date'):
meta[key] = int(float(meta[key])) meta[key] = int(float(meta[key]))
meta['date'] = datetime.fromtimestamp(meta['date']) meta['date'] = datetime.fromtimestamp(meta['date'])
print self.addFile(meta), f print self.addFile(meta), f

View file

@ -105,40 +105,6 @@ def oxdb_title(title):
title = ".".join(title) title = ".".join(title)
return title return title
def oxdb_import_files(archive):
stats = {'skipped': 0, 'added': 0, 'remove':0}
oxdb_backend = OXDb()
base = oxdb_backend.getBase()
print base
files = oxdb_spider(base)
oxdb_files = oxdb_backend.files()
md5sum_on_disk = []
for f in files:
meta = oxdb_file_stats(f)
f = f.replace(base, '')
if oxdb_files.has_key(f) and oxdb_files[f]['size'] == meta['size']:
stats['skipped'] += 1
md5sum_on_disk.append(oxdb_files[f]['md5sum'])
else:
meta = oxdb_file_metadata(meta)
#remove base
meta['path'] = f.encode('utf-8')
#ignore files changed in the last 5 minutes
if time.time() - meta['date'] > 300:
print oxdb_backend.addFile(meta), f
stats['added'] += 1
else:
print "to hot, skipping for now", f
md5sum_on_disk.append(meta['md5sum'])
for f in oxdb_files:
if oxdb_files[f]['md5sum'] not in md5sum_on_disk:
print "remove", f
oxdb_backend.removeFile({'md5sum':oxdb_files[f]['md5sum']})
stats['remove'] += 1
print stats
return stats
def oxdb_file_stats(fname): def oxdb_file_stats(fname):
stat = os.stat(fname) stat = os.stat(fname)
size = stat.st_size size = stat.st_size
@ -166,9 +132,4 @@ def oxdb_file_metadata(meta):
print '"%s"' % title, ' by', director print '"%s"' % title, ' by', director
#imdb = oxdb_backend.byMD5Sum(md5sum) #imdb = oxdb_backend.byMD5Sum(md5sum)
# if invoked on command line, print md5 hashes of specified files.
if __name__ == '__main__':
#for fname in sys.argv[1:]:
# print oxdb_md5sum(fname), fname
oxdb_import_files()