truncateString

This commit is contained in:
Rolux 2008-07-06 17:37:01 +02:00
commit e262b51a62
9 changed files with 23 additions and 21 deletions

View file

@ -1,13 +1,14 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4 # vi:si:et:sw=4:sts=4:ts=4
# 2008 # GPL 2008
from hashes import * from file import *
from html import *
from text import *
from format import * from format import *
import net from html import *
from iso import *
from text import *
import cache import cache
import net
#only works if BitTornado is installed #only works if BitTornado is installed
try: try:

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4 # vi:si:et:sw=4:sts=4:ts=4
# 2008 # GPL 2008
import gzip import gzip
import StringIO import StringIO
import os import os
@ -16,9 +16,9 @@ import net
from net import DEFAULT_HEADERS, getEncoding from net import DEFAULT_HEADERS, getEncoding
_cache_timeout = 30*24*60*60 # default is 30 days cache_timeout = 30*24*60*60 # default is 30 days
def status(url, data=None, headers=DEFAULT_HEADERS, timeout=_cache_timeout): def status(url, data=None, headers=DEFAULT_HEADERS, timeout=cache_timeout):
''' '''
>>> status('http://google.com') >>> status('http://google.com')
200 200
@ -28,7 +28,7 @@ def status(url, data=None, headers=DEFAULT_HEADERS, timeout=_cache_timeout):
headers = getHeaders(url, data, headers) headers = getHeaders(url, data, headers)
return int(headers['status']) return int(headers['status'])
def exists(url, data=None, headers=DEFAULT_HEADERS, timeout=_cache_timeout): def exists(url, data=None, headers=DEFAULT_HEADERS, timeout=cache_timeout):
''' '''
>>> exists('http://google.com') >>> exists('http://google.com')
True True
@ -40,7 +40,7 @@ def exists(url, data=None, headers=DEFAULT_HEADERS, timeout=_cache_timeout):
return True return True
return False return False
def getHeaders(url, data=None, headers=DEFAULT_HEADERS, timeout=_cache_timeout): def getHeaders(url, data=None, headers=DEFAULT_HEADERS, timeout=cache_timeout):
url_cache_file = "%s.headers" % _getUrlCacheFile(url, data, headers) url_cache_file = "%s.headers" % _getUrlCacheFile(url, data, headers)
url_headers = _loadUrlCache(url_cache_file, timeout) url_headers = _loadUrlCache(url_cache_file, timeout)
if url_headers: if url_headers:
@ -50,7 +50,7 @@ def getHeaders(url, data=None, headers=DEFAULT_HEADERS, timeout=_cache_timeout):
_saveUrlHeaders(url_cache_file, url_headers) _saveUrlHeaders(url_cache_file, url_headers)
return url_headers return url_headers
def getUrl(url, data=None, headers=DEFAULT_HEADERS, timeout=_cache_timeout): def getUrl(url, data=None, headers=DEFAULT_HEADERS, timeout=cache_timeout):
url_cache_file = _getUrlCacheFile(url, data, headers) url_cache_file = _getUrlCacheFile(url, data, headers)
result = _loadUrlCache(url_cache_file, timeout) result = _loadUrlCache(url_cache_file, timeout)
if not result: if not result:
@ -65,7 +65,7 @@ def getUrl(url, data=None, headers=DEFAULT_HEADERS, timeout=_cache_timeout):
_saveUrlCache(url_cache_file, result, url_headers) _saveUrlCache(url_cache_file, result, url_headers)
return result return result
def getUrlUnicode(url, data=None, headers=DEFAULT_HEADERS, timeout=_cache_timeout, _getUrl=getUrl): def getUrlUnicode(url, data=None, headers=DEFAULT_HEADERS, timeout=cache_timeout, _getUrl=getUrl):
data = _getUrl(url, data, headers, timeout) data = _getUrl(url, data, headers, timeout)
encoding = getEncoding(data) encoding = getEncoding(data)
if not encoding: if not encoding:
@ -84,7 +84,7 @@ def _getUrlCacheFile(url, data=None, headers=DEFAULT_HEADERS):
domain = ".".join(urlparse.urlparse(url)[1].split('.')[-2:]) domain = ".".join(urlparse.urlparse(url)[1].split('.')[-2:])
return os.path.join(_getCacheBase(), domain, url_hash[:2], url_hash[2:4], url_hash[4:6], url_hash) return os.path.join(_getCacheBase(), domain, url_hash[:2], url_hash[2:4], url_hash[4:6], url_hash)
def _loadUrlCache(url_cache_file, timeout=_cache_timeout): def _loadUrlCache(url_cache_file, timeout=cache_timeout):
if timeout == 0: if timeout == 0:
return None return None
if os.path.exists(url_cache_file): if os.path.exists(url_cache_file):

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4 # vi:si:et:sw=4:sts=4:ts=4
# GPL written 2008 by j@pad.ma # GPL 2008
import sha import sha
import os import os

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4 # vi:si:et:sw=4:sts=4:ts=4
# GPL written 2008 by j@pad.ma # GPL 2008
import re import re
import string import string
from htmlentitydefs import name2codepoint from htmlentitydefs import name2codepoint

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4 # vi:si:et:sw=4:sts=4:ts=4
# GPL 2008
_iso639_languages = [ _iso639_languages = [
("Unknown", "", "", "und"), ("Unknown", "", "", "und"),
@ -227,7 +227,7 @@ def langCode2To3(code):
def langCode3To2(code): def langCode3To2(code):
langTo2Code(codeToLang(code)) langTo2Code(codeToLang(code))
def englishName(lang): def langEnglishName(lang):
lang = lang.lower() lang = lang.lower()
for l in _iso639_languages: for l in _iso639_languages:
if l[1].lower() == lang: if l[1].lower() == lang:

View file

@ -1,5 +1,6 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4 # vi:si:et:sw=4:sts=4:ts=4
# GPL 2008
import gzip import gzip
import StringIO import StringIO
import urllib import urllib

View file

@ -1,6 +1,6 @@
# -*- Mode: Python; -*-
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4 # vi:si:et:sw=4:sts=4:ts=4
# GPL 2008
import re import re
_articles = ('the', 'la', 'a', 'die', 'der', 'le', 'el', _articles = ('the', 'la', 'a', 'die', 'der', 'le', 'el',

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4 # vi:si:et:sw=4:sts=4:ts=4
# GPL written 2008 by j@pad.ma # GPL 2008
import math import math
import re import re

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4 # vi:si:et:sw=4:sts=4:ts=4
# Written 2007 by j@mailb.org # GPL 2007
from threading import Event from threading import Event
import sha import sha