migration to sha1sum, include temporary function to move cache to new location
This commit is contained in:
parent
21b921156e
commit
0857b631b4
2 changed files with 64 additions and 33 deletions
|
@ -74,11 +74,11 @@ class Archive(SQLObject):
|
|||
lastModification = max([os.stat(f).st_mtime for f in glob('%s*/*'% self.basePath)])
|
||||
return time.time() - lastModification
|
||||
|
||||
def notifyFrontend(self, action, md5sum):
|
||||
def notifyFrontend(self, action, sha1sum):
|
||||
if self.baseUrlFrontend:
|
||||
dto = socket.getdefaulttimeout()
|
||||
socket.setdefaulttimeout(256)
|
||||
url = "%s/%s?md5sum=%s" % (self.baseUrlFrontend, action, md5sum)
|
||||
url = "%s/%s?sha1sum=%s" % (self.baseUrlFrontend, action, sha1sum)
|
||||
#print url
|
||||
try:
|
||||
result = getUrl(url)
|
||||
|
@ -104,7 +104,7 @@ class Archive(SQLObject):
|
|||
files = {}
|
||||
for f in self.files:
|
||||
try:
|
||||
d = dict(md5sum = f.md5sum, size = f.size)
|
||||
d = dict(sha1sum = f.sha1sum, size = f.size)
|
||||
files[f.path] = d
|
||||
except SQLObjectNotFound:
|
||||
f.destroySelf()
|
||||
|
@ -119,14 +119,14 @@ class Archive(SQLObject):
|
|||
def addFile(self, params, movie = None):
|
||||
'''
|
||||
updates or adds new file to database,
|
||||
params is a dict with at least md5sum, path, date but also needs
|
||||
params is a dict with at least sha1sum, path, date but also needs
|
||||
audio, video, length, size, bpp for new files
|
||||
'''
|
||||
params['path'] = params['path'].replace(self.basePath, u'')
|
||||
|
||||
q = ArchiveFile.select(AND(
|
||||
ArchiveFile.q.archiveID == self.id,
|
||||
ArchiveFile.q.md5sum == params['md5sum'],
|
||||
ArchiveFile.q.sha1sum == params['sha1sum'],
|
||||
))
|
||||
if q.count() > 0:
|
||||
'''update existing entry'''
|
||||
|
@ -158,7 +158,7 @@ class Archive(SQLObject):
|
|||
path = params['path'],
|
||||
date = params['date'],
|
||||
oxdb = oxdb,
|
||||
md5sum = params['md5sum'],
|
||||
sha1sum = params['sha1sum'],
|
||||
audio = params['audio'],
|
||||
video = params['video'],
|
||||
length = params['length'],
|
||||
|
@ -173,18 +173,18 @@ class Archive(SQLObject):
|
|||
f.modified = datetime.now()
|
||||
return ret.encode('utf-8')
|
||||
|
||||
def removeFile(self, md5sum):
|
||||
def removeFile(self, sha1sum):
|
||||
'''
|
||||
remove file based on md5sum from archive
|
||||
remove file based on sha1sum from archive
|
||||
'''
|
||||
q = ArchiveFile.select(AND(
|
||||
ArchiveFile.q.archiveID == self.id,
|
||||
ArchiveFile.q.md5sum == md5sum,
|
||||
ArchiveFile.q.sha1sum == sha1sum,
|
||||
))
|
||||
if q.count() == 1:
|
||||
for i in q:
|
||||
ArchiveFile.delete(i.id)
|
||||
self.notifyFrontend('remove', md5sum)
|
||||
self.notifyFrontend('remove', sha1sum)
|
||||
return dict(result="file removed")
|
||||
return dict(result="not in archive")
|
||||
|
||||
|
@ -194,13 +194,13 @@ class Archive(SQLObject):
|
|||
files = oxdb_import.oxdb_spider(self.basePath)
|
||||
|
||||
oxdb_files = self.file_list
|
||||
md5sum_on_disk = []
|
||||
sha1sum_on_disk = []
|
||||
for f in files:
|
||||
meta = oxdb_import.oxdb_file_stats(f)
|
||||
f = f.replace(self.basePath, '')
|
||||
if f in oxdb_files and oxdb_files[f]['size'] == meta['size']:
|
||||
stats['skipped'] += 1
|
||||
md5sum_on_disk.append(oxdb_files[f]['md5sum'])
|
||||
sha1sum_on_disk.append(oxdb_files[f]['sha1sum'])
|
||||
else:
|
||||
meta = oxdb_import.oxdb_file_metadata(meta)
|
||||
#FIXME: check input
|
||||
|
@ -209,12 +209,12 @@ class Archive(SQLObject):
|
|||
meta['date'] = datetime.fromtimestamp(meta['date'])
|
||||
print self.addFile(meta), f.encode('utf-8')
|
||||
stats['added'] += 1
|
||||
md5sum_on_disk.append(meta['md5sum'])
|
||||
self.notifyFrontend('add', meta['md5sum'])
|
||||
sha1sum_on_disk.append(meta['sha1sum'])
|
||||
self.notifyFrontend('add', meta['sha1sum'])
|
||||
for f in oxdb_files:
|
||||
if oxdb_files[f]['md5sum'] not in md5sum_on_disk:
|
||||
if oxdb_files[f]['sha1sum'] not in sha1sum_on_disk:
|
||||
print "remove", f.encode('utf-8')
|
||||
self.removeFile(oxdb_files[f]['md5sum'])
|
||||
self.removeFile(oxdb_files[f]['sha1sum'])
|
||||
stats['remove'] += 1
|
||||
print stats
|
||||
print "updating information on frontend"
|
||||
|
@ -229,27 +229,27 @@ class Archive(SQLObject):
|
|||
'''
|
||||
def updateFrontend(self):
|
||||
for f in ArchiveFile.select(ArchiveFile.q.published >= self.published):
|
||||
self.notifyFrontend('add', f.md5sum)
|
||||
self.notifyFrontend('add', f.sha1sum)
|
||||
self.published=datetime.now()
|
||||
|
||||
def bootstrapFrontend(self):
|
||||
for f in self.files:
|
||||
self.notifyFrontend('add', f.md5sum)
|
||||
self.notifyFrontend('add', f.sha1sum)
|
||||
|
||||
def syncFrontend(self, addOnly=False):
|
||||
dto = socket.getdefaulttimeout()
|
||||
socket.setdefaulttimeout(256)
|
||||
data = getUrl("%s/list" % self.baseUrlFrontend)
|
||||
md5sums = simplejson.loads(data)['files']
|
||||
sha1sums = simplejson.loads(data)['files']
|
||||
socket.setdefaulttimeout(dto)
|
||||
for md5sum in md5sums:
|
||||
for sha1sum in sha1sums:
|
||||
try:
|
||||
f = ArchiveFile.byMd5sum(md5sum)
|
||||
f = ArchiveFile.bySha1sum(sha1sum)
|
||||
except SQLObjectNotFound:
|
||||
self.notifyFrontend('remove', md5sum)
|
||||
self.notifyFrontend('remove', sha1sum)
|
||||
if not addOnly:
|
||||
for f in ArchiveFile.select(NOT(IN(ArchiveFile.q.md5sum, md5sums))):
|
||||
self.notifyFrontend('add', f.md5sum)
|
||||
for f in ArchiveFile.select(NOT(IN(ArchiveFile.q.sha1sum, sha1sums))):
|
||||
self.notifyFrontend('add', f.sha1sum)
|
||||
|
||||
class ArchiveFile(SQLObject):
|
||||
'''
|
||||
|
@ -258,7 +258,7 @@ class ArchiveFile(SQLObject):
|
|||
ALTER TABLE archive_file CHANGE srt srt LONGTEXT;
|
||||
'''
|
||||
md5sum = UnicodeCol(length=128, alternateID=True)
|
||||
sha1sum = UnicodeCol(length=40, default='')
|
||||
sha1sum = UnicodeCol(length=40, alternateID=True)
|
||||
oxdb = UnicodeCol(length=128)
|
||||
path = UnicodeCol()
|
||||
date = DateTimeCol()
|
||||
|
@ -427,25 +427,56 @@ class ArchiveFile(SQLObject):
|
|||
if f.nameExtra == self.nameExtra or f.nameExtra == 'en':
|
||||
self.subtitle_meta_id = f.id
|
||||
|
||||
def _get_mini_movie_file(self):
|
||||
def _get_mini_movie_file_old(self):
|
||||
return join(cache.cache_root, 'mini', self.md5sum[:4], "%s.avi" % self.md5sum)
|
||||
|
||||
def _get_frameFolder(self):
|
||||
|
||||
def _get_frameFolder_old(self):
|
||||
f = join(cache.cache_root, 'frame', self.md5sum[:4], self.md5sum)
|
||||
oxdb_makedir(f)
|
||||
return f
|
||||
|
||||
def _get_stillFolder(self):
|
||||
def _get_stillFolder_old(self):
|
||||
f = join(cache.cache_root, 'still', self.md5sum[:4], self.md5sum)
|
||||
oxdb_makedir(f)
|
||||
return f
|
||||
|
||||
def _get_timelineFile(self):
|
||||
def _get_timelineFile_old(self):
|
||||
return join(cache.cache_root, 'timeline', self.md5sum[:4], "%s.png" % self.md5sum)
|
||||
|
||||
def _get_posterStillFile(self):
|
||||
def _get_posterStillFile_old(self):
|
||||
return join(cache.cache_root, 'posterStill', self.md5sum[:4], "%s.png" % self.md5sum)
|
||||
|
||||
def _get_cache_root(self):
|
||||
return join(cache.cache_root, self.sha1sum[:2], self.sha1sum)
|
||||
|
||||
def _get_mini_movie_file(self):
|
||||
return join(self.cache_root, "mini.avi")
|
||||
|
||||
def _get_timelineFile(self):
|
||||
return join(self.cache_root, "timeline.png")
|
||||
|
||||
def _get_posterStillFile(self):
|
||||
return join(self.cache_root, "posterStill.png")
|
||||
|
||||
def _get_frameFolder(self):
|
||||
f = join(self.cache_root, "frames")
|
||||
oxdb_makedir(f)
|
||||
return f
|
||||
|
||||
def _get_stillFolder(self):
|
||||
f = join(self.cache_root, "still")
|
||||
oxdb_makedir(f)
|
||||
return f
|
||||
|
||||
def moveCache(self):
|
||||
for f in os.walk(self.frameFolder_old):
|
||||
os.rename(join(self.frameFolder_old, f), join(self.frameFolder, f)
|
||||
for f in os.walk(self.sillFolder_old):
|
||||
os.rename(join(self.sillFolder_old, f), join(self.sillFolder, f)
|
||||
os.rename(self.mini_movie_file_old, self.mini_movie_file)
|
||||
os.rename(self.timelineFile_old, self.timelineFile)
|
||||
os.rename(self.posterStillFile_old, self.posterStillFile)
|
||||
|
||||
def removeMiniMovie(self):
|
||||
if exists(self.mini_movie_file):
|
||||
os.remove(self.mini_movie_file)
|
||||
|
|
|
@ -120,13 +120,13 @@ def oxdb_file_metadata(meta):
|
|||
meta['audio'] = ''
|
||||
meta['length'] = 0
|
||||
meta['bpp'] = 0
|
||||
meta['md5sum'] = oxdb_md5sum(meta['path'])
|
||||
meta['sha1sum'] = oxdb_sha1sum(meta['path'])
|
||||
#meta['md5sum'] = oxdb_md5sum(meta['path'])
|
||||
#FIXME: use midentifiy or other to get more info about file
|
||||
return meta
|
||||
|
||||
title = oxdb_title(os.path.basename(meta['path']))
|
||||
director = os.path.basename(os.path.dirname(meta['path']))
|
||||
print '"%s"' % title, ' by', director
|
||||
#imdb = oxdb_backend.byMD5Sum(md5sum)
|
||||
|
||||
|
||||
|
|
Loading…
Reference in a new issue