Compare commits
5 commits
47347843d0
...
2026b64faf
| Author | SHA1 | Date | |
|---|---|---|---|
| 2026b64faf | |||
| 0728847ffa | |||
| 8ecb14795f | |||
| 75b12dfb86 | |||
| 8675edf19f |
4 changed files with 46 additions and 4 deletions
|
|
@ -227,6 +227,8 @@ def signin(url):
|
|||
url = 'https://%s/api/' % url
|
||||
else:
|
||||
site = url.split('/')[2]
|
||||
if not url.endswith('/'):
|
||||
url += '/'
|
||||
api = API(url)
|
||||
update = False
|
||||
try:
|
||||
|
|
|
|||
42
ox/cache.py
42
ox/cache.py
|
|
@ -456,7 +456,47 @@ class RedisCache(KVCache):
|
|||
self.backend = redis.from_url(self.url)
|
||||
|
||||
|
||||
if cache_path().startswith('fs:'):
|
||||
class FallbackCache(KVCache):
|
||||
caches = []
|
||||
|
||||
def __init__(self):
|
||||
fallback = cache_path()
|
||||
for path in fallback.split('|'):
|
||||
os.environ['oxCACHE'] = path
|
||||
if path.startswith('redis:'):
|
||||
store = RedisCache()
|
||||
elif path.startswith('memcache:'):
|
||||
store = MemCache()
|
||||
self.caches.append(store)
|
||||
os.environ['oxCACHE'] = fallback
|
||||
|
||||
def get(self, url, data, headers=None, timeout=-1, value="data"):
|
||||
if timeout == 0:
|
||||
return None
|
||||
|
||||
info_key, data_key = self._keys(url, data, headers)
|
||||
for cache in self.caches:
|
||||
try:
|
||||
info = cache.backend.get(info_key)
|
||||
except:
|
||||
info = None
|
||||
if info:
|
||||
return cache.get(url, data, headers, timeout, value)
|
||||
return None
|
||||
|
||||
def set(self, url, post_data, data, headers):
|
||||
self.caches[0].set(url, post_data, data, headers)
|
||||
for cache in self.caches[1:]:
|
||||
cache.delete(url, post_data, headers)
|
||||
|
||||
def delete(self, url, data=None, headers=None):
|
||||
for cache in self.caches:
|
||||
cache.delete(url, data, headers)
|
||||
|
||||
|
||||
if '|' in cache_path():
|
||||
store = FallbackCache()
|
||||
elif cache_path().startswith('fs:'):
|
||||
store = FileCache()
|
||||
elif cache_path().startswith('redis:'):
|
||||
store = RedisCache()
|
||||
|
|
|
|||
|
|
@ -187,7 +187,7 @@ class Imdb(SiteParser):
|
|||
], type='int'),
|
||||
'keyword': {
|
||||
'page': 'keywords',
|
||||
're': '<a href="/keyword/.*?>(.*?)</a>',
|
||||
're': 'data-item-keyword="(.*?)"',
|
||||
'type': 'list'
|
||||
},
|
||||
'language': zebra_list('Language', more=['<a.*?>(.*?)</a>']),
|
||||
|
|
@ -224,7 +224,7 @@ class Imdb(SiteParser):
|
|||
'releasedate': {
|
||||
'page': 'releaseinfo',
|
||||
're': [
|
||||
'<td class="release_date">(.*?)</td>',
|
||||
'<td class="release-date-item__date".*?>(.*?)</td>',
|
||||
strip_tags,
|
||||
],
|
||||
'type': 'list'
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ from ox.net import read_url
|
|||
|
||||
def get_poster_url(id):
|
||||
url = 'http://piratecinema.org/posters/'
|
||||
html = read_url(url, unicode=True)
|
||||
html = read_url(url).decode('utf-8')
|
||||
results = re.compile('src="(.+)" title=".+\((\d{7})\)"').findall(html)
|
||||
for result in results:
|
||||
if result[1] == id:
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue