checking backup system, its not that bad after all
This commit is contained in:
parent
df8a6d3905
commit
079f143abc
6 changed files with 90 additions and 10 deletions
1
.bzrignore
Normal file
1
.bzrignore
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
oxdbarchive/cache
|
||||||
65
oxdbarchive/backup.py
Normal file
65
oxdbarchive/backup.py
Normal file
|
|
@ -0,0 +1,65 @@
|
||||||
|
from oxdb_utils import oxdb_makedir
|
||||||
|
from os.path import *
|
||||||
|
import cache
|
||||||
|
import extract
|
||||||
|
|
||||||
|
class BackupMovieFile:
|
||||||
|
def __init__(self, parent):
|
||||||
|
self.parent = parent
|
||||||
|
|
||||||
|
def extractFrame(self, position, img_folder):
|
||||||
|
return self.parent.extractFrame(position)
|
||||||
|
|
||||||
|
class Backup:
|
||||||
|
def __init__(self, md5sum):
|
||||||
|
self.md5sum = md5sum
|
||||||
|
self.timelineFile = join(cache.cache_root, 'timeline', self.md5sum[:4], "%s.png" % self.md5sum)
|
||||||
|
self.posterStill = join(cache.cache_root, 'posterStill', self.md5sum[:4], "%s.png" % self.md5sum)
|
||||||
|
self.mini_movie_file = join(cache.cache_root, 'mini', self.md5sum[:4], "%s.avi" % self.md5sum)
|
||||||
|
self.movieFile = BackupMovieFile(self)
|
||||||
|
self.frameFolder = join(cache.cache_root, 'frame', self.md5sum[:4], self.md5sum)
|
||||||
|
|
||||||
|
def exists(self):
|
||||||
|
return exists(self.mini_movie_file)
|
||||||
|
|
||||||
|
def timeline(self):
|
||||||
|
return cache.loadTimeline(self)
|
||||||
|
|
||||||
|
def frame(self, pos):
|
||||||
|
return cache.loadFrame(self, pos)
|
||||||
|
|
||||||
|
def posterStill(self, pos):
|
||||||
|
return cache.loadPosterStill(self, pos)
|
||||||
|
|
||||||
|
def flvClip(self, position):
|
||||||
|
return cache.loadFlvClip(self, position)
|
||||||
|
|
||||||
|
def extractFlvClip(self, inpoint, outpoint=-1):
|
||||||
|
flash_folder = self.frameFolder
|
||||||
|
movie_file = self.mini_movie_file
|
||||||
|
position = inpoint.replace(':', '.')
|
||||||
|
flash_movie = join(self.frameFolder, '%s.%s' % (position, 'flv'))
|
||||||
|
width = 128
|
||||||
|
height = int(width / self.frameAspect)
|
||||||
|
height = height - height % 2
|
||||||
|
inpoint = inpoint.replace('.', ':')
|
||||||
|
if outpoint == -1:
|
||||||
|
outpoint = shift_time(5000, inpoint)
|
||||||
|
else:
|
||||||
|
outpoint = outpoint.replace('.', ':')
|
||||||
|
extract.extract_flash(movie_file, flash_movie, inpoint, outpoint, width, height, offset = 0)
|
||||||
|
|
||||||
|
def extractFrame(self, position, img_folder=-1):
|
||||||
|
img_folder = self.frameFolder
|
||||||
|
movie_file = self.mini_movie_file
|
||||||
|
oxdb_makedir(img_folder)
|
||||||
|
position = position.replace('.', ':')
|
||||||
|
extract.extract_frame(movie_file, position, img_folder, offset = 0, redo = False)
|
||||||
|
|
||||||
|
def extractPosterStill(self, position):
|
||||||
|
oxdb_makedir(dirname(self.posterStillFile))
|
||||||
|
mFile = self.absolutePath
|
||||||
|
if os.path.splitext(mFile)[-1] in ('.mov', '.mpg', '.mpeg'):
|
||||||
|
mFile = self.mini_movie_file
|
||||||
|
extract.extract_poster_still(mFile, self.posterStillFile, position)
|
||||||
|
|
||||||
|
|
@ -41,9 +41,9 @@ tg.empty_flash = False
|
||||||
|
|
||||||
|
|
||||||
# compress the data sends to the web browser
|
# compress the data sends to the web browser
|
||||||
# [/]
|
[/]
|
||||||
# gzip_filter.on = True
|
gzip_filter.on = True
|
||||||
# gzip_filter.mime_types = ["application/x-javascript", "text/javascript", "text/html", "text/css", "text/plain"]
|
gzip_filter.mime_types = ["application/x-javascript", "text/javascript", "text/html", "text/css", "text/plain"]
|
||||||
|
|
||||||
[/static]
|
[/static]
|
||||||
static_filter.on = True
|
static_filter.on = True
|
||||||
|
|
|
||||||
|
|
@ -6,6 +6,7 @@ from turbogears import controllers, expose, flash, redirect
|
||||||
from model import *
|
from model import *
|
||||||
from json import *
|
from json import *
|
||||||
import cherrypy
|
import cherrypy
|
||||||
|
import backup
|
||||||
|
|
||||||
# import logging
|
# import logging
|
||||||
# log = logging.getLogger("oxdbarchive.controllers")
|
# log = logging.getLogger("oxdbarchive.controllers")
|
||||||
|
|
@ -28,8 +29,10 @@ class Root(controllers.RootController):
|
||||||
try:
|
try:
|
||||||
f = ArchiveFile.byMd5sum(md5Hash)
|
f = ArchiveFile.byMd5sum(md5Hash)
|
||||||
except:
|
except:
|
||||||
return dict()
|
#return dict()
|
||||||
|
f = backup.Backup(md5Hash)
|
||||||
|
if not f.exists():
|
||||||
|
return dict()
|
||||||
if action == 'metadata':
|
if action == 'metadata':
|
||||||
return dict(metadata = f)
|
return dict(metadata = f)
|
||||||
elif action in ('timeline', 'timeline.png'):
|
elif action in ('timeline', 'timeline.png'):
|
||||||
|
|
@ -56,4 +59,4 @@ class Root(controllers.RootController):
|
||||||
cherrypy.response.headerMap['Content-Type'] = "image/png"
|
cherrypy.response.headerMap['Content-Type'] = "image/png"
|
||||||
cherrypy.response.headerMap["Expires"] = httpExpires(60*60*24*15)
|
cherrypy.response.headerMap["Expires"] = httpExpires(60*60*24*15)
|
||||||
return f.posterStill(position)
|
return f.posterStill(position)
|
||||||
return dict()
|
return dict()
|
||||||
|
|
|
||||||
|
|
@ -7,6 +7,7 @@ from turbogears.database import PackageHub
|
||||||
import turbogears
|
import turbogears
|
||||||
import re
|
import re
|
||||||
from urllib import quote, quote_plus
|
from urllib import quote, quote_plus
|
||||||
|
import urllib2
|
||||||
import os
|
import os
|
||||||
from os.path import abspath, join, dirname, exists
|
from os.path import abspath, join, dirname, exists
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
@ -34,6 +35,7 @@ def debug(string):
|
||||||
hub = PackageHub('oxdbarchive')
|
hub = PackageHub('oxdbarchive')
|
||||||
__connection__ = hub
|
__connection__ = hub
|
||||||
|
|
||||||
|
|
||||||
class Archive(SQLObject):
|
class Archive(SQLObject):
|
||||||
name = UnicodeCol(length=255, alternateID=True)
|
name = UnicodeCol(length=255, alternateID=True)
|
||||||
basePath = UnicodeCol()
|
basePath = UnicodeCol()
|
||||||
|
|
@ -52,7 +54,16 @@ class Archive(SQLObject):
|
||||||
socket.setdefaulttimeout(256)
|
socket.setdefaulttimeout(256)
|
||||||
url = "%s/%s?md5sum=%s" % (self.baseUrlFrontend, action, md5sum)
|
url = "%s/%s?md5sum=%s" % (self.baseUrlFrontend, action, md5sum)
|
||||||
#print url
|
#print url
|
||||||
result = read_url(url)
|
try:
|
||||||
|
result = read_url(url)
|
||||||
|
except urllib2.HTTPError, e:
|
||||||
|
print e.code
|
||||||
|
print e.read()
|
||||||
|
raise
|
||||||
|
except:
|
||||||
|
import traceback
|
||||||
|
traceback.print_exc()
|
||||||
|
raise
|
||||||
#print "Frontend:", result['result']
|
#print "Frontend:", result['result']
|
||||||
socket.setdefaulttimeout(dto)
|
socket.setdefaulttimeout(dto)
|
||||||
|
|
||||||
|
|
@ -102,7 +113,7 @@ class Archive(SQLObject):
|
||||||
#just a new md5? happens for srt files quite often
|
#just a new md5? happens for srt files quite often
|
||||||
qq = ArchiveFile.select(AND(
|
qq = ArchiveFile.select(AND(
|
||||||
ArchiveFile.q.archiveID == self.id,
|
ArchiveFile.q.archiveID == self.id,
|
||||||
ArchiveFile.q.path == params['path'],
|
ArchiveFile.q.path == params['path'].encode('utf-8'),
|
||||||
))
|
))
|
||||||
f = None
|
f = None
|
||||||
if qq.count() == 1:
|
if qq.count() == 1:
|
||||||
|
|
|
||||||
|
|
@ -48,8 +48,8 @@ def getBestMask(filename):
|
||||||
output = os.path.join(workdir, c.replace(',', '-'))
|
output = os.path.join(workdir, c.replace(',', '-'))
|
||||||
input_file = os.path.join(output,filename)
|
input_file = os.path.join(output,filename)
|
||||||
change_count = getColorChangeCount(input_file)
|
change_count = getColorChangeCount(input_file)
|
||||||
if change_count:
|
if not change_count: change_count=100
|
||||||
outputs[change_count] = dict(
|
outputs[change_count] = dict(
|
||||||
output=output,
|
output=output,
|
||||||
input_file=input_file,
|
input_file=input_file,
|
||||||
)
|
)
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue