- expand urls, to allow relative urls in html / icon / download urls
- new fuction to clean up archives that remove items
This commit is contained in:
parent
0a8d8b9fc0
commit
2a6ec2987c
2 changed files with 32 additions and 2 deletions
|
@ -79,6 +79,17 @@ class ArchiveItem(SQLObject):
|
||||||
def _get_year(self):
|
def _get_year(self):
|
||||||
return self.relDate.strftime('%Y')
|
return self.relDate.strftime('%Y')
|
||||||
|
|
||||||
|
|
||||||
|
#expand urls in case they are relative to the archive
|
||||||
|
def _get_archiveUrl(self):
|
||||||
|
return self.archive.full_url(self._SO_get_archiveUrl())
|
||||||
|
|
||||||
|
def _get_downloadUrl(self):
|
||||||
|
return self.archive.full_url(self._SO_get_downloadUrl())
|
||||||
|
|
||||||
|
def _get_icon(self):
|
||||||
|
return self.archive.full_url(self._SO_get_icon())
|
||||||
|
|
||||||
def _get_json(self):
|
def _get_json(self):
|
||||||
result = jsonify_sqlobject(self)
|
result = jsonify_sqlobject(self)
|
||||||
result['relDate'] = self.relDate.strftime('%s')
|
result['relDate'] = self.relDate.strftime('%s')
|
||||||
|
@ -105,7 +116,7 @@ class ArchiveItem(SQLObject):
|
||||||
for key in data:
|
for key in data:
|
||||||
setattr(self, key, data[key])
|
setattr(self, key, data[key])
|
||||||
self.updateHashID()
|
self.updateHashID()
|
||||||
|
|
||||||
def updateHashID(self):
|
def updateHashID(self):
|
||||||
salt = '%s/%s/%s' % (self.archive.archiveName, self.author, self.title)
|
salt = '%s/%s/%s' % (self.archive.archiveName, self.author, self.title)
|
||||||
self.hashID = md5.new(salt).hexdigest()
|
self.hashID = md5.new(salt).hexdigest()
|
||||||
|
@ -134,6 +145,14 @@ class Archive(SQLObject):
|
||||||
def data_url(self, id):
|
def data_url(self, id):
|
||||||
return self._query_url({'id': id})
|
return self._query_url({'id': id})
|
||||||
|
|
||||||
|
def full_url(self, url):
|
||||||
|
if url.find('://') > 0:
|
||||||
|
return url
|
||||||
|
if url.startswith('/'):
|
||||||
|
url = "%s/%s" % (self.archiveUrl.split('/')[0], url)
|
||||||
|
else:
|
||||||
|
url = "%s/%s" % (self.archiveUrl, url)
|
||||||
|
|
||||||
def update(self):
|
def update(self):
|
||||||
result = simplejson.loads(read_url(self.update_url))
|
result = simplejson.loads(read_url(self.update_url))
|
||||||
items = result.get('items', [])
|
items = result.get('items', [])
|
||||||
|
@ -147,6 +166,17 @@ class Archive(SQLObject):
|
||||||
else:
|
else:
|
||||||
q[0].update(data)
|
q[0].update(data)
|
||||||
|
|
||||||
|
'''
|
||||||
|
get list of all items from archive and remove those from ArchiveItem that
|
||||||
|
are no longer in the list
|
||||||
|
'''
|
||||||
|
def cleanUp(self):
|
||||||
|
url = self._query_url({'modDate': -1})
|
||||||
|
result = simplejson.loads(read_url(url))
|
||||||
|
archiveItems = result.get('items', [])
|
||||||
|
archivedItems = [i.archiveItemId for i in ArchiveItem.select(ArchiveItem.q.archiveID == self.id)]
|
||||||
|
removeItems = filter(lambda i: i not in archiveItems, archivedItems)
|
||||||
|
for i in removeItems: ArchiveItem.delete(i)
|
||||||
|
|
||||||
class SortName(SQLObject):
|
class SortName(SQLObject):
|
||||||
name =UnicodeCol(length=1000, alternateID=True)
|
name =UnicodeCol(length=1000, alternateID=True)
|
||||||
|
|
|
@ -26,6 +26,7 @@ def jsonImportArchiveItem(archive, archiveItemId, json_array):
|
||||||
json_array = jsonLoadArchiveItem(json_array)
|
json_array = jsonLoadArchiveItem(json_array)
|
||||||
salt = '%s/%s/%s' % (archive.archiveName, json_array['author'], json_array['title'])
|
salt = '%s/%s/%s' % (archive.archiveName, json_array['author'], json_array['title'])
|
||||||
hashID = md5.new(salt).hexdigest()
|
hashID = md5.new(salt).hexdigest()
|
||||||
|
|
||||||
i = model.ArchiveItem(
|
i = model.ArchiveItem(
|
||||||
archiveID=archive.id,
|
archiveID=archive.id,
|
||||||
hashId = hashID,
|
hashId = hashID,
|
||||||
|
@ -46,4 +47,3 @@ def jsonImportArchiveItem(archive, archiveItemId, json_array):
|
||||||
itemType=json_array['itemType'],
|
itemType=json_array['itemType'],
|
||||||
icon= json_array['icon']
|
icon= json_array['icon']
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue