migrate to oxutils
This commit is contained in:
parent
3f9407c944
commit
cc27cdc29d
2 changed files with 3 additions and 7 deletions
|
@ -25,10 +25,6 @@ def extractNew():
|
|||
for f in ArchiveFile.select(ArchiveFile.q.extracted == False):
|
||||
f.extractAll()
|
||||
|
||||
def cleanScrapeitCache():
|
||||
#remove files older than 30 days from scrapeit cache
|
||||
os.system("""find /var/cache/scrapeit/ -type f -ctime +30 -exec rm '{}' \;""")
|
||||
|
||||
def cleanClipMovieCache():
|
||||
cache = os.path.abspath('oxdb/cache/mini/')
|
||||
for d, p, files in os.walk(cache):
|
||||
|
|
|
@ -18,7 +18,7 @@ import shutil
|
|||
import socket
|
||||
|
||||
import simplejson
|
||||
from scrapeit.utils import read_url
|
||||
from oxutils.net import getUrl
|
||||
import subtitles
|
||||
|
||||
import cache
|
||||
|
@ -56,7 +56,7 @@ class Archive(SQLObject):
|
|||
url = "%s/%s?md5sum=%s" % (self.baseUrlFrontend, action, md5sum)
|
||||
#print url
|
||||
try:
|
||||
result = read_url(url)
|
||||
result = getUrl(url)
|
||||
except urllib2.HTTPError, e:
|
||||
print e.code
|
||||
print e.read()
|
||||
|
@ -210,7 +210,7 @@ class Archive(SQLObject):
|
|||
def syncFrontend(self):
|
||||
dto = socket.getdefaulttimeout()
|
||||
socket.setdefaulttimeout(256)
|
||||
data = read_url("%s/list" % self.baseUrlFrontend)
|
||||
data = getUrl("%s/list" % self.baseUrlFrontend)
|
||||
md5sums = simplejson.loads(data)['files']
|
||||
socket.setdefaulttimeout(dto)
|
||||
for md5sum in md5sums:
|
||||
|
|
Loading…
Reference in a new issue