From 2644f0fccfcf9defe1a8bb2e9f60331934332fa1 Mon Sep 17 00:00:00 2001 From: j <0x006A@0x2620.org> Date: Wed, 4 Apr 2007 14:07:16 +0000 Subject: [PATCH] - storeUrl - sort / search - show sort by field --- oilarchive/controllers.py | 52 +++++------- oilarchive/cronjobs.py | 9 ++- oilarchive/model.py | 34 ++++++-- oilarchive/oilspider.py | 3 + oilarchive/static/css/archive.css | 68 ++++++++++++++++ oilarchive/static/css/style.css | 31 +++++++ oilarchive/static/js/archive.js | 25 ++++++ oilarchive/templates/iconview.kid | 130 +++++++++++++++++++++++++++++- oilarchive/templates/view.kid | 32 ++++++-- oilarchive/utils.py | 16 ++++ 10 files changed, 349 insertions(+), 51 deletions(-) diff --git a/oilarchive/controllers.py b/oilarchive/controllers.py index bd61745..96e706b 100644 --- a/oilarchive/controllers.py +++ b/oilarchive/controllers.py @@ -18,6 +18,9 @@ import oilcache from forms import forms from sortname import sortname +def httpExpires(sec): + return cherrypy.lib.httptools.HTTPDate(time.gmtime(time.mktime(time.gmtime()) + sec)) + class View: @expose(template=".templates.view") def view(self, item): @@ -25,10 +28,12 @@ class View: def icon(self, item): response.headerMap['Content-Type'] = "image/png" + cherrypy.response.headerMap["Expires"] = httpExpires(60*60*24*30) return oilcache.loadIcon(item) def icon_reflection(self, item): response.headerMap['Content-Type'] = "image/png" + cherrypy.response.headerMap["Expires"] = httpExpires(60*60*24*30) return oilcache.loadIconReflection(item) @expose() @@ -108,10 +113,7 @@ class ArchiveJavascript: name = name.split('.')[0] archive = Archive.byHashId(name) response.headerMap['Content-Type'] = "application/x-javascript" - secs = 60*60*24*30 - secs = 60 - expires = cherrypy.lib.httptools.HTTPDate(time.gmtime(time.mktime(time.gmtime()) + secs)) - cherrypy.response.headerMap["Expires"] = expires + cherrypy.response.headerMap["Expires"] = httpExpires(60) #(60*60*24*30) return archive.js class ArchiveStyleSheet: @@ -120,10 +122,7 @@ class ArchiveStyleSheet: name = name.split('.')[0] archive = Archive.byHashId(name) response.headerMap['Content-Type'] = "text/css" - secs = 60*60*24*30 - secs = 60 - expires = cherrypy.lib.httptools.HTTPDate(time.gmtime(time.mktime(time.gmtime()) + secs)) - cherrypy.response.headerMap["Expires"] = expires + cherrypy.response.headerMap["Expires"] = httpExpires(60) #(60*60*24*30) return archive.css class Root(controllers.RootController): @@ -168,42 +167,27 @@ class Root(controllers.RootController): return dict(q = '', f = 'all', s = 'title', o = 0, n = 60, l = 'all', v = 'icon', length = 0) _sort_map = { - 'id': 'imdb', - 'director': 'director_html', - 'writer': 'writer_html', - 'language': 'language_html', - 'releasedate': 'release_date', - 'cast': 'cast_html', - 'genre': 'genre_html', - 'keywords': 'keywords_html', - 'connections': 'connections_sort', + 'id': 'hashId', + 'author': 'author_sort', + 'releasedate': 'rel_date', 'title': 'title_sort', - 'country': 'country_html', - 'producer': 'producer_html', - 'summary': 'plot', - 'trivia': 'plot', - 'date': 'latest_file_date', - 'year': 'release_date', + 'date': 'rel_date', } def get_sort(self, s): s = str(self._sort_map.get(s, s)) - if s in ('release_date', 'size', 'pub_date'): + if s in ('rel_date', 'size', 'pub_date'): s = '-%s' % s return s _field_map = { 'title': ArchiveItem.q.title, 'author': ArchiveItem.q.author, + 'genre': ArchiveItem.q.genre, } _search_map = { - 'summary': 'plot', - 'trivia': 'plot', - 'releasedate': 'release_date', - 'script': 'year', - 'title': 'year', - 'director': 'year' + 'releasedate': 'rel_date', } @expose(template=".templates.iconview") @@ -233,9 +217,13 @@ class Root(controllers.RootController): if v == 'quote': tg_template = ".templates.quoteview" - orderBy = [self.get_sort(s), 'title_sort', 'title'] + orderBy = [self.get_sort(s), 'title_sort', 'rel_date'] if q: - items = queryArchive(q) + if f=='all': + items = queryArchive(q, s) + elif f in ('title', 'author', 'genre'): + q = q.encode('utf-8') + items = ArchiveItem.select(LIKE(self._field_map[f], '%'+q+'%') , orderBy=orderBy) else: items = ArchiveItem.select(orderBy = orderBy) sort = s diff --git a/oilarchive/cronjobs.py b/oilarchive/cronjobs.py index 3dfea62..2f9c16b 100644 --- a/oilarchive/cronjobs.py +++ b/oilarchive/cronjobs.py @@ -12,7 +12,7 @@ from sortname import sortname update authorSort for better(tm) sorting ''' def updateSortAuthorNames(): - for i in ArchiveItems.select(): + for i in ArchiveItem.select(): i.authorSort = sortname(i.author) ''' @@ -20,10 +20,11 @@ def updateSortAuthorNames(): ''' def spiderArchives(): for archive in Archive.select(Archive.q.initialized == True): - if archive.pubDate - datetime.now() < timedelta(minutes = archive.ttl): - print archive.archiveName + if archive.modDate - datetime.now() < timedelta(minutes = archive.ttl): + print "updating", archive.archiveName archive.update() - + else: + print "skipping", archive.archiveName def runCron(): spiderArchives() diff --git a/oilarchive/model.py b/oilarchive/model.py index f9993f8..5cb222a 100644 --- a/oilarchive/model.py +++ b/oilarchive/model.py @@ -18,6 +18,7 @@ from scrapeit.utils import read_url import simplejson from oilspider import jsonLoadArchiveItem, jsonImportArchiveItem +import utils hub = PackageHub("oilarchive") __connection__ = hub @@ -25,11 +26,18 @@ __connection__ = hub def queryArchive(query, orderBy="score", offset = 0, count = 100): query = MySQLdb.escape_string(query) + orderBy = orderBy.encode('utf-8') + print orderBy + if orderBy not in ('score', 'size', 'title', 'description'): + orderBy = 'score' + if orderBy == 'size': + orderBy = "size DESC" match = "MATCH (title, description, text) AGAINST ('%s')" % query - sql = """SELECT id, %s AS score FROM archive_item + sql = """SELECT id, %s AS score, title, size, description FROM archive_item WHERE %s ORDER BY %s""" % \ (match, match, orderBy) #, offset, count) result = [] + print sql matches = ArchiveItem._connection.queryAll(sql) if len(matches) > offset: matches = matches[offset:] @@ -57,6 +65,7 @@ class ArchiveItem(SQLObject): modDate = DateTimeCol() #timestamp (item published) archiveUrl = UnicodeCol() # -> url (link to archive page) downloadUrl = UnicodeCol() # -> url (link to item) + storeUrl = UnicodeCol() # -> url (link to store) size = IntCol() #bytes rights = IntCol(default = 5) #-> int: 0 (free) - 5 (unfree) itemType = UnicodeCol() #string (Text, Pictures, Music, Movies, Software) @@ -71,7 +80,11 @@ class ArchiveItem(SQLObject): #Fulltext search #ALTER TABLE archive_item ADD FULLTEXT (title, description, text); - + def getPreview(self, sort): + if sort == 'size': + return utils.formatFileSize(self.size) + return self.relDateFormated + def _set_author(self, value): self._SO_set_author(value) if not self.authorSort: @@ -80,6 +93,11 @@ class ArchiveItem(SQLObject): def _get_year(self): return self.relDate.strftime('%Y') + def _get_relDateFormated(self): + if self.itemType in ('Movie', 'Book'): + return self.year + else: + return self.relDate.strftime('%Y-%m-%d') #expand urls in case they are relative to the archive def _get_archiveUrl(self): @@ -140,9 +158,9 @@ class Archive(SQLObject): def setHashId(self): self.hashId = md5.new("%s" % self.id).hexdigest() - def _get_pubDateTimestamp(self): + def _get_modDateTimestamp(self): if self.initialized: - return int(time.mktime(self.pubDate.timetuple())) + return int(time.mktime(self.modDate.timetuple())) return -1 def _query_url(self, query): @@ -151,7 +169,7 @@ class Archive(SQLObject): return url def _get_update_url(self): - return self._query_url({'modDate': self.pubDateTimestamp}) + return self._query_url({'modDate': self.modDateTimestamp}) def _get_files_url(self): return self._query_url({'files': '1'}) @@ -181,11 +199,12 @@ class Archive(SQLObject): self.js = '' result = simplejson.loads(read_url(self.update_url)) items = result.get('items', []) - print len(items) + print "importing", len(items), "items" for id in items: try: data = read_url(self.data_url(id)) data = jsonLoadArchiveItem(data) + print data['title'].encode('utf-8') except: print "failed to load ", id, "from ", self.data_url(id) continue @@ -196,7 +215,8 @@ class Archive(SQLObject): jsonImportArchiveItem(self, id, data) else: q[0].update(data) - self.initialized = True + self.initialized = True + self.modDate = datetime.now() ''' get list of all items from archive and remove those from ArchiveItem that diff --git a/oilarchive/oilspider.py b/oilarchive/oilspider.py index 4dec838..7af54cb 100644 --- a/oilarchive/oilspider.py +++ b/oilarchive/oilspider.py @@ -17,6 +17,8 @@ def jsonLoadArchiveItem(data): json_array['archiveUrl'] = json_array.pop('archiveURL') if json_array.has_key('downloadURL'): json_array['downloadUrl'] = json_array.pop('downloadURL') + if json_array.has_key('storeURL'): + json_array['storeUrl'] = json_array.pop('storeURL') for key in ('relDate', 'pubDate', 'modDate'): json_array[key] = datetime.utcfromtimestamp(float(json_array[key])) for key in ('rights', 'size'): @@ -43,6 +45,7 @@ def jsonImportArchiveItem(archive, archiveItemId, json_array): modDate=json_array['modDate'], archiveUrl=json_array['archiveUrl'], downloadUrl=json_array['downloadUrl'], + storeUrl=json_array['storeUrl'], html=json_array['html'], genre=json_array['genre'], title=json_array['title'], diff --git a/oilarchive/static/css/archive.css b/oilarchive/static/css/archive.css index 1cc554f..94f5503 100644 --- a/oilarchive/static/css/archive.css +++ b/oilarchive/static/css/archive.css @@ -126,4 +126,72 @@ input { .item .textIconLarge { color: rgb(0, 0, 0); +} + +table { + border-collapse: collapse; + border-spacing: 0px; +} + +td { + padding: 0px; +} + +#itemPageIcon { + width: 128px; + padding-left: 8px; + padding-right: 8px; +} + +#itemPageText { + padding-left: 8px; + padding-right: 8px; +} + +#itemPageTextLeftTop { + width: 8px; + height: 8px; + background: url(/static/images/itemPageTextLeftTop.png) +} + +#itemPageTextCenterTop { + height: 8px; + background: url(/static/images/itemPageTextCenterTop.png); +} + +#itemPageTextRightTop { + width: 8px; + height: 8px; + background: url(/static/images/itemPageTextRightTop.png) +} + +#itemPageTextLeftMiddle { + width: 8px; + background: url(/static/images/itemPageTextLeftMiddle.png) +} + +#itemPageTextCenterMiddle { + background: url(/static/images/itemPageTextCenterMiddle.png); +} + +#itemPageTextRightMiddle { + width: 8px; + background: url(/static/images/itemPageTextRightMiddle.png) +} + +#itemPageTextLeftBottom { + width: 8px; + height: 8px; + background: url(/static/images/itemPageTextLeftBottom.png) +} + +#itemPageTextCenterBottom { + height: 8px; + background: url(/static/images/itemPageTextCenterBottom.png); +} + +#itemPageTextRightBottom { + width: 8px; + height: 8px; + background: url(/static/images/itemPageTextRightBottom.png) } \ No newline at end of file diff --git a/oilarchive/static/css/style.css b/oilarchive/static/css/style.css index e69de29..6979968 100644 --- a/oilarchive/static/css/style.css +++ b/oilarchive/static/css/style.css @@ -0,0 +1,31 @@ +#head { + position: fixed; + top: 0px; + width: 100%; + height: 64px; + background: rgb(64, 64, 64); + text-align: center; + z-index: 1; +} + +#headList { + position: relative; + margin-left: auto; + margin-right: auto; + margin-top: 8px; + width: 808px; + height: 48px; +} +.headTop { + position: absolute; + left: 0px; + top: 0px; + width: 128px; +} + +.headBottom { + position: absolute; + left: 0px; + bottom: 0px; + width: 128px; +} diff --git a/oilarchive/static/js/archive.js b/oilarchive/static/js/archive.js index cf8376d..3322845 100644 --- a/oilarchive/static/js/archive.js +++ b/oilarchive/static/js/archive.js @@ -1,3 +1,28 @@ +function changeList() { + submitFind(); +} + +function changeView() { + submitFind(); +} + +function changeSort() { + submitFind(); +} + +function changeFind() { + +} + +function submitFind() { + var l = document.getElementById('selectList').value; + var v = document.getElementById('selectView').value; + var s = document.getElementById('selectSort').value; + var f = document.getElementById('selectFind').value; + var q = document.getElementById('inputFind').value; + document.location.href = '/search?l=' + l + '&v=' + v + '&s=' + s + '&f=' + f + '&q=' + q; +} + function mouseOver(id, view) { if (view == 'IconLarge' || view == 'IconSmall') document.getElementById(id).style.background = 'url(/static/images/item' + view + 'MouseOver.png)'; diff --git a/oilarchive/templates/iconview.kid b/oilarchive/templates/iconview.kid index 205fefc..96a4a24 100644 --- a/oilarchive/templates/iconview.kid +++ b/oilarchive/templates/iconview.kid @@ -1,10 +1,136 @@ + Oil of the 21st Century Archive + +
+
@@ -18,7 +144,7 @@ ${item.title}
@@ -26,7 +152,7 @@ ${item.title}
- ${item.relDate} + ${item.getPreview(sort)}
diff --git a/oilarchive/templates/view.kid b/oilarchive/templates/view.kid index c56a5db..92a05f6 100644 --- a/oilarchive/templates/view.kid +++ b/oilarchive/templates/view.kid @@ -4,17 +4,37 @@ Oil21 - ${item.title} + -
- -
${item.author}
-
${item.title}
-
${XML(item.html)}
-
+
+ +
+
+ + + + + + + + + + + + + + + + +
+ x${XML(item.html)} +
+
diff --git a/oilarchive/utils.py b/oilarchive/utils.py index 24df6e8..c752fd0 100644 --- a/oilarchive/utils.py +++ b/oilarchive/utils.py @@ -29,3 +29,19 @@ def highlightText(text, term): else: output = text return output + +''' + Format the value like a 'human-readable' file size (i.e. 13 KB, 4.1 MB, 102 + bytes, etc). +''' +def formatFileSize(bytes): + bytes = float(bytes) + if bytes < 1024: + return "%d byte%s" % (bytes, bytes != 1 and 's' or '') + if bytes < 1024 * 1024: + return "%d KB" % (bytes / 1024) + if bytes < 1024 * 1024 * 1024: + return "%.1f MB" % (bytes / (1024 * 1024)) + if bytes < 1024 * 1024 * 1024 * 1024: + return "%.2f GB" % (bytes / (1024 * 1024 * 1024)) + return "%.3f TB" % (bytes / (1024 * 1024 * 1024 * 1024))