another day
This commit is contained in:
parent
04b0a2bae9
commit
f7f3753cbf
76 changed files with 683 additions and 297 deletions
|
|
@ -29,14 +29,17 @@ def queryArchive(query, orderBy="score", offset = 0, count = 100):
|
|||
orderBy = orderBy.encode('utf-8')
|
||||
print orderBy
|
||||
if orderBy not in ('score', 'size', 'title', 'description'):
|
||||
orderBy = 'score'
|
||||
orderBy = 'score DESC, title'
|
||||
if orderBy == 'size':
|
||||
orderBy = "size DESC"
|
||||
match = "MATCH (title, description, text) AGAINST ('%s')" % query
|
||||
sql = """SELECT id, %s AS score, title, size, description FROM archive_item
|
||||
match = '''MATCH (title, description, text) AGAINST ('%s')''' % query
|
||||
match_b = '''MATCH (title, description, text) AGAINST ('%s' IN BOOLEAN MODE)''' % query
|
||||
|
||||
sql = """SELECT id, ((100000/LENGTH(text)) * %s) AS score, title, size, description FROM archive_item
|
||||
WHERE %s ORDER BY %s""" % \
|
||||
(match, match, orderBy) #, offset, count)
|
||||
(match_b, match_b, orderBy) #, offset, count)
|
||||
result = []
|
||||
max_score= None
|
||||
print sql
|
||||
matches = ArchiveItem._connection.queryAll(sql)
|
||||
if len(matches) > offset:
|
||||
|
|
@ -45,7 +48,9 @@ WHERE %s ORDER BY %s""" % \
|
|||
matches = matches[:count]
|
||||
for m in matches:
|
||||
item = ArchiveItem.get(m[0])
|
||||
item.score = m[1]
|
||||
if not max_score:
|
||||
max_score = m[1] / 100
|
||||
item.score = m[1] / max_score
|
||||
result.append(item)
|
||||
return result
|
||||
|
||||
|
|
@ -67,8 +72,10 @@ class ArchiveItem(SQLObject):
|
|||
downloadUrl = UnicodeCol() # -> url (link to item)
|
||||
storeUrl = UnicodeCol() # -> url (link to store)
|
||||
size = IntCol() #bytes
|
||||
rights = IntCol(default = 5) #-> int: 0 (free) - 5 (unfree)
|
||||
itemType = UnicodeCol() #string (Text, Pictures, Music, Movies, Software)
|
||||
rightsLevel = IntCol(default = 5) #-> int: 0 (free) - 5 (unfree)
|
||||
rightsText = UnicodeCol(default = '')
|
||||
kind = UnicodeCol() #string (Text, Pictures, Music, Movies, Software)
|
||||
fileType = UnicodeCol() #fileType (pdf, txt etc)
|
||||
genre = UnicodeCol(default = '')
|
||||
|
||||
archive = ForeignKey('Archive')
|
||||
|
|
@ -80,25 +87,48 @@ class ArchiveItem(SQLObject):
|
|||
#Fulltext search
|
||||
#ALTER TABLE archive_item ADD FULLTEXT (title, description, text);
|
||||
|
||||
def _get_filetype(self):
|
||||
return self.downloadUrl.split('.')[-1].upper()
|
||||
|
||||
def _get_sizeFormated(self):
|
||||
return utils.formatFileSize(self.size)
|
||||
|
||||
def getPreview(self, sort):
|
||||
if sort == 'size':
|
||||
return utils.formatFileSize(self.size)
|
||||
return self.sizeFormated
|
||||
if sort == 'relevance':
|
||||
return "%d" % self.score
|
||||
return self.relDateFormated
|
||||
|
||||
def _set_author(self, value):
|
||||
self._SO_set_author(value)
|
||||
if not self.authorSort:
|
||||
self.authorSort = value
|
||||
|
||||
def _set_title(self, value):
|
||||
self._SO_set_title(value)
|
||||
if not self.titleSort:
|
||||
self.titleSort = value
|
||||
|
||||
def _get_year(self):
|
||||
return self.relDate.strftime('%Y')
|
||||
|
||||
def rightsLevelClass(self, level):
|
||||
if level == self.rightsLevel:
|
||||
return "rightsLevelActive"
|
||||
return "rightsLevelInactive"
|
||||
def _get_relDateFormated(self):
|
||||
if self.itemType in ('Movie', 'Book'):
|
||||
if self.kind in ('Movie', 'Book'):
|
||||
return self.year
|
||||
else:
|
||||
return self.relDate.strftime('%Y-%m-%d')
|
||||
|
||||
def domain(self, url):
|
||||
d = url.split('/')
|
||||
if len(d) > 2:
|
||||
return d[2].split('?')[0]
|
||||
return url
|
||||
|
||||
#expand urls in case they are relative to the archive
|
||||
def _get_archiveUrl(self):
|
||||
return self.archive.full_url(self._SO_get_archiveUrl())
|
||||
|
|
@ -113,6 +143,7 @@ class ArchiveItem(SQLObject):
|
|||
result = jsonify_sqlobject(self)
|
||||
result['relDate'] = self.relDate.strftime('%s')
|
||||
result['pubDate'] = self.pubDate.strftime('%s')
|
||||
result['modDate'] = self.relDate.strftime('%s')
|
||||
return result
|
||||
'''
|
||||
return dict(
|
||||
|
|
@ -134,24 +165,25 @@ class ArchiveItem(SQLObject):
|
|||
def update(self, data):
|
||||
for key in data:
|
||||
setattr(self, key, data[key])
|
||||
self.updateHashID()
|
||||
self.setHashId()
|
||||
|
||||
def updateHashID(self):
|
||||
def setHashId(self):
|
||||
salt = u'%s/%s' % (self.archive.archiveName, self.archiveItemId)
|
||||
self.hashID = md5.new(salt.encode('utf-8')).hexdigest()
|
||||
|
||||
|
||||
class Archive(SQLObject):
|
||||
archiveName = UnicodeCol(alternateID = True, length = 1000)
|
||||
archiveId = UnicodeCol(alternateID = True, length = 1000)
|
||||
archiveName = UnicodeCol()
|
||||
archiveUrl = UnicodeCol()
|
||||
archiveType = UnicodeCol(default=u'')
|
||||
ttl = IntCol(default = "15")
|
||||
ttl = IntCol(default = "900") #seconds
|
||||
pubDate = DateTimeCol(default=datetime.now)
|
||||
modDate = DateTimeCol(default=datetime.now)
|
||||
created = DateTimeCol(default=datetime.now)
|
||||
initialized = BoolCol(default = False)
|
||||
css = UnicodeCol(default='')
|
||||
js = UnicodeCol(default='')
|
||||
icon = UnicodeCol() # -> url (128x128)
|
||||
|
||||
hashId = UnicodeCol(alternateID = True, length=128)
|
||||
|
||||
|
|
@ -171,13 +203,15 @@ class Archive(SQLObject):
|
|||
def _get_update_url(self):
|
||||
return self._query_url({'modDate': self.modDateTimestamp})
|
||||
|
||||
def _get_files_url(self):
|
||||
return self._query_url({'files': '1'})
|
||||
def _get_metadata_url(self):
|
||||
return self._query_url({'metadata': '1'})
|
||||
|
||||
def data_url(self, id):
|
||||
return self._query_url({'id': id})
|
||||
|
||||
def full_url(self, url):
|
||||
if not url:
|
||||
return ''
|
||||
if url.find('://') > 0:
|
||||
return url
|
||||
if url.startswith('/'):
|
||||
|
|
@ -187,37 +221,63 @@ class Archive(SQLObject):
|
|||
url = "%s/%s" % (self.archiveUrl, url)
|
||||
return url
|
||||
|
||||
def _get_iconUrl(self):
|
||||
if self.icon:
|
||||
return "/icon/%s.png" % self.hashId
|
||||
else:
|
||||
return "/static/images/iconCollection.png"
|
||||
|
||||
def update(self):
|
||||
result = simplejson.loads(read_url(self.files_url))
|
||||
if result and result.has_key('css'):
|
||||
self.css = read_url(self.full_url(result['css']))
|
||||
else:
|
||||
self.css = ''
|
||||
if result and result.has_key('js'):
|
||||
self.js = read_url(self.full_url(result['js']))
|
||||
result = simplejson.loads(read_url(self.metadata_url))
|
||||
if result:
|
||||
if result.has_key('name'):
|
||||
self.archiveName = result['name']
|
||||
if result.has_key('id'):
|
||||
self.archiveId = result['id']
|
||||
if result.has_key('ttl'):
|
||||
self.ttl = int(result['ttl'])
|
||||
if result.has_key('icon'):
|
||||
self.icon = result['icon']
|
||||
if result.has_key('css'):
|
||||
try:
|
||||
data = read_url(self.full_url(result['css']))
|
||||
self.css = data
|
||||
except:
|
||||
self.css = ''
|
||||
if result.has_key('js'):
|
||||
try:
|
||||
data = read_url(self.full_url(result['js']))
|
||||
self.js = data
|
||||
except:
|
||||
self.js = ''
|
||||
else:
|
||||
self.icon = ''
|
||||
self.js = ''
|
||||
self.css = ''
|
||||
result = simplejson.loads(read_url(self.update_url))
|
||||
items = result.get('items', [])
|
||||
print "importing", len(items), "items"
|
||||
for id in items:
|
||||
try:
|
||||
data = read_url(self.data_url(id))
|
||||
data = jsonLoadArchiveItem(data)
|
||||
print data['title'].encode('utf-8')
|
||||
self.updateItem(id)
|
||||
except:
|
||||
print "failed to load ", id, "from ", self.data_url(id)
|
||||
continue
|
||||
q = ArchiveItem.select(AND(
|
||||
ArchiveItem.q.archiveItemId == id,
|
||||
ArchiveItem.q.archiveID == self.id))
|
||||
if q.count() == 0:
|
||||
jsonImportArchiveItem(self, id, data)
|
||||
else:
|
||||
q[0].update(data)
|
||||
self.initialized = True
|
||||
self.modDate = datetime.now()
|
||||
|
||||
def updateItem(self, id):
|
||||
data = read_url(self.data_url(id))
|
||||
data = jsonLoadArchiveItem(data)
|
||||
print data['title'].encode('utf-8')
|
||||
q = ArchiveItem.select(AND(
|
||||
ArchiveItem.q.archiveItemId == id,
|
||||
ArchiveItem.q.archiveID == self.id))
|
||||
if q.count() == 0:
|
||||
jsonImportArchiveItem(self, id, data)
|
||||
else:
|
||||
q[0].update(data)
|
||||
|
||||
'''
|
||||
get list of all items from archive and remove those from ArchiveItem that
|
||||
are no longer in the list
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue