import json/simplejson in one place and use that

This commit is contained in:
j 2010-07-28 15:08:06 +02:00
parent 8569759865
commit eae4803cd3
6 changed files with 19 additions and 18 deletions

View file

@ -11,7 +11,7 @@ import urllib2
import sqlite3 import sqlite3
import chardet import chardet
import simplejson from ox.utils import json
import net import net
from net import DEFAULT_HEADERS, getEncoding from net import DEFAULT_HEADERS, getEncoding
@ -45,7 +45,7 @@ def exists(url, data=None, headers=DEFAULT_HEADERS, timeout=cache_timeout):
def getHeaders(url, data=None, headers=DEFAULT_HEADERS, timeout=cache_timeout): def getHeaders(url, data=None, headers=DEFAULT_HEADERS, timeout=cache_timeout):
url_headers = _readUrlCache(url, data, headers, timeout, "headers") url_headers = _readUrlCache(url, data, headers, timeout, "headers")
if url_headers: if url_headers:
url_headers = simplejson.loads(url_headers) url_headers = json.loads(url_headers)
else: else:
url_headers = net.getHeaders(url, data, headers) url_headers = net.getHeaders(url, data, headers)
_saveUrlCache(url, data, -1, url_headers) _saveUrlCache(url, data, -1, url_headers)
@ -182,7 +182,7 @@ def _saveUrlCache(url, post_data, data, headers):
only_headers = 1 only_headers = 1
data = "" data = ""
created = time.mktime(time.localtime()) created = time.mktime(time.localtime())
t = (url_hash, domain, url, post_data, simplejson.dumps(headers), created, sqlite3.Binary(data), only_headers) t = (url_hash, domain, url, post_data, json.dumps(headers), created, sqlite3.Binary(data), only_headers)
c.execute(u"""INSERT OR REPLACE INTO cache values (?, ?, ?, ?, ?, ?, ?, ?)""", t) c.execute(u"""INSERT OR REPLACE INTO cache values (?, ?, ?, ?, ?, ?, ?, ?)""", t)
# Save (commit) the changes and clean up # Save (commit) the changes and clean up

View file

@ -8,7 +8,7 @@ import sys
import struct import struct
import subprocess import subprocess
import simplejson from ox.utils import json
__all__ = ['sha1sum', 'oshash', 'avinfo'] __all__ = ['sha1sum', 'oshash', 'avinfo']
@ -63,5 +63,5 @@ def avinfo(filename):
if os.path.getsize(filename): if os.path.getsize(filename):
p = subprocess.Popen(['ffmpeg2theora', '--info', filename], stdout=subprocess.PIPE, stderr=subprocess.PIPE) p = subprocess.Popen(['ffmpeg2theora', '--info', filename], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
info, error = p.communicate() info, error = p.communicate()
return simplejson.loads(info) return json.loads(info)
return {'path': filename, 'size': 0} return {'path': filename, 'size': 0}

View file

@ -2,7 +2,8 @@
# vi:si:et:sw=4:sts=4:ts=4 # vi:si:et:sw=4:sts=4:ts=4
# GPL 2009 # GPL 2009
import os import os
import simplejson
from ox.utils import json
def get(key): def get(key):
@ -12,7 +13,7 @@ def get(key):
f = open(user_auth, "r") f = open(user_auth, "r")
data = f.read() data = f.read()
f.close() f.close()
auth = simplejson.loads(data) auth = json.loads(data)
if key in auth: if key in auth:
return auth[key] return auth[key]
print "please add key %s to json file '%s'" % (key, user_auth) print "please add key %s to json file '%s'" % (key, user_auth)

View file

@ -174,14 +174,14 @@ class ItunesMovie:
return data return data
if __name__ == '__main__': if __name__ == '__main__':
import simplejson from ox.utils import json
data = ItunesAlbum(title = 'So Red the Rose', artist = 'Arcadia').getData() data = ItunesAlbum(title = 'So Red the Rose', artist = 'Arcadia').getData()
print simplejson.dumps(data, sort_keys = True, indent = 4) print json.dumps(data, sort_keys = True, indent = 4)
data = ItunesMovie(title = 'The Matrix', director = 'Wachowski').getData() data = ItunesMovie(title = 'The Matrix', director = 'Wachowski').getData()
print simplejson.dumps(data, sort_keys = True, indent = 4) print json.dumps(data, sort_keys = True, indent = 4)
for v in data['relatedMovies']: for v in data['relatedMovies']:
data = ItunesMovie(id = v['id']).getData() data = ItunesMovie(id = v['id']).getData()
print simplejson.dumps(data, sort_keys = True, indent = 4) print json.dumps(data, sort_keys = True, indent = 4)
data = ItunesMovie(id='272960052').getData() data = ItunesMovie(id='272960052').getData()
print simplejson.dumps(data, sort_keys = True, indent = 4) print json.dumps(data, sort_keys = True, indent = 4)

View file

@ -128,7 +128,7 @@ def archiveIssues():
''' '''
p = {} p = {}
import os import os
import simplejson from ox.utils import json
import time import time
archivePath = '/Volumes/Rolux Home/Desktop/Data/spiegel.de/Der Spiegel' archivePath = '/Volumes/Rolux Home/Desktop/Data/spiegel.de/Der Spiegel'
localtime = time.localtime() localtime = time.localtime()
@ -148,7 +148,7 @@ def archiveIssues():
os.makedirs(dirname) os.makedirs(dirname)
filename = '%s/Der Spiegel %d %02d.json' % (dirname, y, w) filename = '%s/Der Spiegel %d %02d.json' % (dirname, y, w)
if not os.path.exists(filename): if not os.path.exists(filename):
data = simplejson.dumps(issue, ensure_ascii = False) data = json.dumps(issue, ensure_ascii = False)
f = open(filename, 'w') f = open(filename, 'w')
f.write(data) f.write(data)
f.close() f.close()
@ -193,7 +193,7 @@ def archiveNews():
this is just an example of an archiving application this is just an example of an archiving application
''' '''
import os import os
import simplejson from ox.utils import json
import time import time
count = {} count = {}
@ -229,7 +229,7 @@ def archiveNews():
else: else:
filename = dirname + '/' + new['url'] + '.json' filename = dirname + '/' + new['url'] + '.json'
if not os.path.exists(filename) or True: if not os.path.exists(filename) or True:
data = simplejson.dumps(new, ensure_ascii = False) data = json.dumps(new, ensure_ascii = False)
f = open(filename, 'w') f = open(filename, 'w')
f.write(data) f.write(data)
f.close() f.close()

View file

@ -2,7 +2,7 @@
# vi:si:et:sw=4:sts=4:ts=4 # vi:si:et:sw=4:sts=4:ts=4
from urllib import urlencode from urllib import urlencode
import simplejson from ox.utils import json
from ox.cache import readUrl, readUrlUnicode from ox.cache import readUrl, readUrlUnicode
from ox import findRe, decodeHtml from ox import findRe, decodeHtml
@ -109,7 +109,7 @@ def find(query, max_results=10):
data = readUrl(url) data = readUrl(url)
if not data: if not data:
data = readUrl(url, timeout=0) data = readUrl(url, timeout=0)
result = simplejson.loads(data) result = json.loads(data)
results = [] results = []
if result and 'query' in result: if result and 'query' in result:
for r in result['query']['search']: for r in result['query']['search']: