migrate to oxutils

This commit is contained in:
j 2008-06-01 13:48:01 +02:00
parent 3f9407c944
commit cc27cdc29d
2 changed files with 3 additions and 7 deletions

View file

@ -25,10 +25,6 @@ def extractNew():
for f in ArchiveFile.select(ArchiveFile.q.extracted == False): for f in ArchiveFile.select(ArchiveFile.q.extracted == False):
f.extractAll() f.extractAll()
def cleanScrapeitCache():
#remove files older than 30 days from scrapeit cache
os.system("""find /var/cache/scrapeit/ -type f -ctime +30 -exec rm '{}' \;""")
def cleanClipMovieCache(): def cleanClipMovieCache():
cache = os.path.abspath('oxdb/cache/mini/') cache = os.path.abspath('oxdb/cache/mini/')
for d, p, files in os.walk(cache): for d, p, files in os.walk(cache):

View file

@ -18,7 +18,7 @@ import shutil
import socket import socket
import simplejson import simplejson
from scrapeit.utils import read_url from oxutils.net import getUrl
import subtitles import subtitles
import cache import cache
@ -56,7 +56,7 @@ class Archive(SQLObject):
url = "%s/%s?md5sum=%s" % (self.baseUrlFrontend, action, md5sum) url = "%s/%s?md5sum=%s" % (self.baseUrlFrontend, action, md5sum)
#print url #print url
try: try:
result = read_url(url) result = getUrl(url)
except urllib2.HTTPError, e: except urllib2.HTTPError, e:
print e.code print e.code
print e.read() print e.read()
@ -210,7 +210,7 @@ class Archive(SQLObject):
def syncFrontend(self): def syncFrontend(self):
dto = socket.getdefaulttimeout() dto = socket.getdefaulttimeout()
socket.setdefaulttimeout(256) socket.setdefaulttimeout(256)
data = read_url("%s/list" % self.baseUrlFrontend) data = getUrl("%s/list" % self.baseUrlFrontend)
md5sums = simplejson.loads(data)['files'] md5sums = simplejson.loads(data)['files']
socket.setdefaulttimeout(dto) socket.setdefaulttimeout(dto)
for md5sum in md5sums: for md5sum in md5sums: