make ox.torrent in python 2 and 3

This commit is contained in:
j 2014-10-01 11:21:11 +02:00
parent 8bfbaef598
commit 53fbc2e1fb
4 changed files with 235 additions and 127 deletions

View file

@ -5,15 +5,19 @@
from threading import Event from threading import Event
from hashlib import sha1 from hashlib import sha1
import os import os
from six import PY2
if PY2:
from .bencode import bencode, bdecode from .bencode import bencode, bdecode
else:
from .bencode3 import bencode, bdecode
__all__ = ['create_torrent', 'get_info_hash', 'get_torrent_info', 'get_files', 'get_torrent_size'] __all__ = ['create_torrent', 'get_info_hash', 'get_torrent_info', 'get_files', 'get_torrent_size']
def create_torrent(file, url, params = {}, flag = Event(), def create_torrent(file, url, params = {}, flag = Event(),
progress = lambda x: None, progress_percent = 1): progress = lambda x: None, progress_percent = 1):
"Creates a torrent for a given file, using url as tracker url" "Creates a torrent for a given file, using url as tracker url"
from makemetafile import make_meta_file from .makemetafile import make_meta_file
return make_meta_file(file, url, params, flag, progress, progress_percent) return make_meta_file(file, url, params, flag, progress, progress_percent)
def get_info_hash(torrentFile): def get_info_hash(torrentFile):

151
ox/torrent/bencode3.py Normal file
View file

@ -0,0 +1,151 @@
##
#
# bencode.py python3 compatable bencode / bdecode
#
##
def _decode_int(data):
"""
decode integer from bytearray
return int, remaining data
"""
data = data[1:]
end = data.index(b'e')
return int(data[:end],10), data[end+1:]
def _decode_str(data):
"""
decode string from bytearray
return string, remaining data
"""
start = data.index(b':')
l = int(data[:start].decode(),10)
if l <= 0:
raise Exception('invalid string size: %d'%d)
start += 1
ret = bytes(data[start:start+l])
data = data[start+l:]
return ret, data
def _decode_list(data):
"""
decode list from bytearray
return list, remaining data
"""
ls = []
data = data[1:]
while data[0] != ord(b'e'):
elem, data = _decode(data)
ls.append(elem)
return ls, data[1:]
def _decode_dict(data):
"""
decode dict from bytearray
return dict, remaining data
"""
d = {}
data = data[1:]
while data[0] != ord(b'e'):
k, data = _decode_str(data)
v, data = _decode(data)
d[k.decode()] = v
return d, data[1:]
def _decode(data):
"""
decode a bytearray
return deserialized object, remaining data
"""
ch = chr(data[0])
if ch == 'l':
return _decode_list(data)
elif ch == 'i':
return _decode_int(data)
elif ch == 'd':
return _decode_dict(data)
elif ch.isdigit():
return _decode_str(data)
else:
raise Exception('could not deserialize data: %s'%data)
def bdecode(data):
"""
decode a bytearray
return deserialized object
"""
obj , data = _decode(data)
if len(data) > 0:
raise Exception('failed to deserialize, extra data: %s'%data)
return obj
def _encode_str(s,buff):
"""
encode string to a buffer
"""
s = bytearray(s)
l = len(s)
buff.append(bytearray(str(l)+':','utf-8'))
buff.append(s)
def _encode_int(i,buff):
"""
encode integer to a buffer
"""
buff.append(b'i')
buff.append(bytearray(str(i),'ascii'))
buff.append(b'e')
def _encode_list(l,buff):
"""
encode list of elements to a buffer
"""
buff.append(b'l')
for i in l:
_encode(i,buff)
buff.append(b'e')
def _encode_dict(d,buff):
"""
encode dict
"""
buff.append(b'd')
l = list(d.keys())
l.sort()
for k in l:
_encode(str(k),buff)
_encode(d[k],buff)
buff.append(b'e')
def _encode(obj,buff):
"""
encode element obj to a buffer buff
"""
if isinstance(obj,str):
_encode_str(bytearray(obj,'utf-8'),buff)
elif isinstance(obj,bytes):
_encode_str(bytearray(obj),buff)
elif isinstance(obj,bytearray):
_encode_str(obj,buff)
elif str(obj).isdigit():
_encode_int(obj,buff)
elif isinstance(obj,list):
_encode_list(obj,buff)
elif hasattr(obj,'keys') and hasattr(obj,'values'):
_encode_dict(obj,buff)
elif str(obj) in ['True','False']:
_encode_int(int(obj and '1' or '0'),buff)
else:
raise Exception('non serializable object: %s'%obj)
def bencode(obj):
"""
bencode element, return bytearray
"""
buff = []
_encode(obj,buff)
ret = bytearray()
for ba in buff:
ret += ba
return bytes(ret)

View file

@ -1,100 +0,0 @@
# Written by Bram Cohen
# see LICENSE.txt for license information
from types import StringType, LongType, IntType, ListType, DictType
from re import compile
reg = compile(r'^[^/\\.~][^/\\]*$')
ints = (LongType, IntType)
def check_info(info):
if type(info) != DictType:
raise ValueError, 'bad metainfo - not a dictionary'
pieces = info.get('pieces')
if type(pieces) != StringType or len(pieces) % 20 != 0:
raise ValueError, 'bad metainfo - bad pieces key'
piecelength = info.get('piece length')
if type(piecelength) not in ints or piecelength <= 0:
raise ValueError, 'bad metainfo - illegal piece length'
name = info.get('name')
if type(name) != StringType:
raise ValueError, 'bad metainfo - bad name'
if not reg.match(name):
raise ValueError, 'name %s disallowed for security reasons' % name
if info.has_key('files') == info.has_key('length'):
raise ValueError, 'single/multiple file mix'
if info.has_key('length'):
length = info.get('length')
if type(length) not in ints or length < 0:
raise ValueError, 'bad metainfo - bad length'
else:
files = info.get('files')
if type(files) != ListType:
raise ValueError
for f in files:
if type(f) != DictType:
raise ValueError, 'bad metainfo - bad file value'
length = f.get('length')
if type(length) not in ints or length < 0:
raise ValueError, 'bad metainfo - bad length'
path = f.get('path')
if type(path) != ListType or path == []:
raise ValueError, 'bad metainfo - bad path'
for p in path:
if type(p) != StringType:
raise ValueError, 'bad metainfo - bad path dir'
if not reg.match(p):
raise ValueError, 'path %s disallowed for security reasons' % p
for i in xrange(len(files)):
for j in xrange(i):
if files[i]['path'] == files[j]['path']:
raise ValueError, 'bad metainfo - duplicate path'
def check_message(message):
if type(message) != DictType:
raise ValueError
check_info(message.get('info'))
if type(message.get('announce')) != StringType:
raise ValueError
def check_peers(message):
if type(message) != DictType:
raise ValueError
if message.has_key('failure reason'):
if type(message['failure reason']) != StringType:
raise ValueError
return
peers = message.get('peers')
if type(peers) == ListType:
for p in peers:
if type(p) != DictType:
raise ValueError
if type(p.get('ip')) != StringType:
raise ValueError
port = p.get('port')
if type(port) not in ints or p <= 0:
raise ValueError
if p.has_key('peer id'):
id = p['peer id']
if type(id) != StringType or len(id) != 20:
raise ValueError
elif type(peers) != StringType or len(peers) % 6 != 0:
raise ValueError
interval = message.get('interval', 1)
if type(interval) not in ints or interval <= 0:
raise ValueError
minint = message.get('min interval', 1)
if type(minint) not in ints or minint <= 0:
raise ValueError
if type(message.get('tracker id', '')) != StringType:
raise ValueError
npeers = message.get('num peers', 0)
if type(npeers) not in ints or npeers < 0:
raise ValueError
dpeers = message.get('done peers', 0)
if type(dpeers) not in ints or dpeers < 0:
raise ValueError
last = message.get('last', 0)
if type(last) not in ints or last < 0:
raise ValueError

View file

@ -6,9 +6,13 @@ from os.path import getsize, split, join, abspath, isdir
from os import listdir from os import listdir
from hashlib import sha1 as sha from hashlib import sha1 as sha
from copy import copy from copy import copy
from string import strip import re
from bencode import bencode
from btformats import check_info from six import PY2
if PY2:
from .bencode import bencode
else:
from .bencode3 import bencode
from threading import Event from threading import Event
from time import time from time import time
from traceback import print_exc from traceback import print_exc
@ -58,13 +62,62 @@ def print_announcelist_details():
print (' httpseeds = optional list of http-seed URLs, in the format:') print (' httpseeds = optional list of http-seed URLs, in the format:')
print (' url[|url...]') print (' url[|url...]')
reg = re.compile(r'^[^/\\.~][^/\\]*$')
def is_number(value):
return isinstance(value, int) or isinstance(value,float)
def check_info(info):
if not isinstance(info, dict):
raise ValueError('bad metainfo - not a dictionary')
pieces = info.get('pieces')
if not isinstance(pieces, bytes) or len(pieces) % 20 != 0:
raise ValueError('bad metainfo - bad pieces key')
piecelength = info.get('piece length')
if not is_number(piecelength) or piecelength <= 0:
raise ValueError('bad metainfo - illegal piece length')
name = info.get('name')
if not isinstance(name, bytes):
raise ValueError('bad metainfo - bad name')
if not reg.match(name.decode('utf-8')):
raise ValueError('name %s disallowed for security reasons' % name)
if ('files' in info) == ('length' in info):
raise ValueError('single/multiple file mix')
if 'length' in info:
length = info.get('length')
if not is_number(length) or length < 0:
raise ValueError('bad metainfo - bad length')
else:
files = info.get('files')
if not isinstance(files, list):
raise ValueError
for f in files:
if not isinstance(f, dict):
raise ValueError('bad metainfo - bad file value')
length = f.get('length')
if not is_number(length) or length < 0:
raise ValueError('bad metainfo - bad length')
path = f.get('path')
if not isinstance(path, list) or path == []:
raise ValueError('bad metainfo - bad path')
for p in path:
if not isinstance(p, bytes):
raise ValueError('bad metainfo - bad path dir')
if not reg.match(p.decode('utf-8')):
raise ValueError('path %s disallowed for security reasons' % p)
for i in range(len(files)):
for j in range(i):
if files[i]['path'] == files[j]['path']:
raise ValueError('bad metainfo - duplicate path')
def make_meta_file(file, url, params = {}, flag = Event(), def make_meta_file(file, url, params = {}, flag = Event(),
progress = lambda x: None, progress_percent = 1): progress = lambda x: None, progress_percent = 1):
if params.has_key('piece_size_pow2'): if 'piece_size_pow2' in params:
piece_len_exp = params['piece_size_pow2'] piece_len_exp = params['piece_size_pow2']
else: else:
piece_len_exp = default_piece_len_exp piece_len_exp = default_piece_len_exp
if params.has_key('target') and params['target'] != '': if 'target' in params and params['target'] != '':
f = params['target'] f = params['target']
else: else:
a, b = split(file) a, b = split(file)
@ -75,7 +128,7 @@ def make_meta_file(file, url, params = {}, flag = Event(),
if piece_len_exp == 0: # automatic if piece_len_exp == 0: # automatic
size = calcsize(file) size = calcsize(file)
if size > 8L*1024*1024*1024: # > 8 gig = if size > 8*1024*1024*1024: # > 8 gig =
piece_len_exp = 21 # 2 meg pieces piece_len_exp = 21 # 2 meg pieces
elif size > 2*1024*1024*1024: # > 2 gig = elif size > 2*1024*1024*1024: # > 2 gig =
piece_len_exp = 20 # 1 meg pieces piece_len_exp = 20 # 1 meg pieces
@ -92,7 +145,7 @@ def make_meta_file(file, url, params = {}, flag = Event(),
piece_length = 2 ** piece_len_exp piece_length = 2 ** piece_len_exp
encoding = None encoding = None
if params.has_key('filesystem_encoding'): if 'filesystem_encoding' in params:
encoding = params['filesystem_encoding'] encoding = params['filesystem_encoding']
if not encoding: if not encoding:
encoding = ENCODING encoding = ENCODING
@ -104,28 +157,28 @@ def make_meta_file(file, url, params = {}, flag = Event(),
return return
check_info(info) check_info(info)
h = open(f, 'wb') h = open(f, 'wb')
data = {'info': info, 'announce': strip(url), 'creation date': long(time())} data = {'info': info, 'announce': url.strip(), 'creation date': int(time())}
if params.has_key('comment') and params['comment']: if 'comment' in params and params['comment']:
data['comment'] = params['comment'] data['comment'] = params['comment']
if params.has_key('real_announce_list'): # shortcut for progs calling in from outside if 'real_announce_list' in params: # shortcut for progs calling in from outside
data['announce-list'] = params['real_announce_list'] data['announce-list'] = params['real_announce_list']
elif params.has_key('announce_list') and params['announce_list']: elif 'announce_list' in params and params['announce_list']:
l = [] l = []
for tier in params['announce_list'].split('|'): for tier in params['announce_list'].split('|'):
l.append(tier.split(',')) l.append(tier.split(','))
data['announce-list'] = l data['announce-list'] = l
if params.has_key('real_httpseeds'): # shortcut for progs calling in from outside if 'real_httpseeds' in params: # shortcut for progs calling in from outside
data['httpseeds'] = params['real_httpseeds'] data['httpseeds'] = params['real_httpseeds']
elif params.has_key('httpseeds') and params['httpseeds']: elif 'httpseeds' in params and params['httpseeds']:
data['httpseeds'] = params['httpseeds'].split('|') data['httpseeds'] = params['httpseeds'].split('|')
if params.has_key('url-list') and params['url-list']: if 'url-list' in params and params['url-list']:
data['url-list'] = params['url-list'].split('|') data['url-list'] = params['url-list'].split('|')
if params.has_key('playtime') and params['playtime']: if 'playtime' in params and params['playtime']:
data['info']['playtime'] = params['playtime'] data['info']['playtime'] = params['playtime']
h.write(bencode(data)) h.write(bencode(data))
@ -134,7 +187,7 @@ def make_meta_file(file, url, params = {}, flag = Event(),
def calcsize(file): def calcsize(file):
if not isdir(file): if not isdir(file):
return getsize(file) return getsize(file)
total = 0L total = 0
for s in subfiles(abspath(file)): for s in subfiles(abspath(file)):
total += getsize(s[1]) total += getsize(s[1])
return total return total
@ -151,8 +204,8 @@ def uniconvertl(l, e):
def uniconvert(s, e): def uniconvert(s, e):
try: try:
if s.__class__.__name__ != 'unicode': if isinstance(s, bytes):
s = unicode(s,e) s = s.decode(e)
except UnicodeError: except UnicodeError:
raise UnicodeError('bad filename: '+s) raise UnicodeError('bad filename: '+s)
return s.encode('utf-8') return s.encode('utf-8')
@ -164,15 +217,15 @@ def makeinfo(file, piece_length, encoding, flag, progress, progress_percent=1):
subs.sort() subs.sort()
pieces = [] pieces = []
sh = sha() sh = sha()
done = 0L done = 0
fs = [] fs = []
totalsize = 0.0 totalsize = 0.0
totalhashed = 0L totalhashed = 0
for p, f in subs: for p, f in subs:
totalsize += getsize(f) totalsize += getsize(f)
for p, f in subs: for p, f in subs:
pos = 0L pos = 0
size = getsize(f) size = getsize(f)
fs.append({'length': size, 'path': uniconvertl(p, encoding)}) fs.append({'length': size, 'path': uniconvertl(p, encoding)})
h = open(f, 'rb') h = open(f, 'rb')
@ -196,13 +249,13 @@ def makeinfo(file, piece_length, encoding, flag, progress, progress_percent=1):
h.close() h.close()
if done > 0: if done > 0:
pieces.append(sh.digest()) pieces.append(sh.digest())
return {'pieces': ''.join(pieces), return {'pieces': b''.join(pieces),
'piece length': piece_length, 'files': fs, 'piece length': piece_length, 'files': fs,
'name': uniconvert(split(file)[1], encoding) } 'name': uniconvert(split(file)[1], encoding) }
else: else:
size = getsize(file) size = getsize(file)
pieces = [] pieces = []
p = 0L p = 0
h = open(file, 'rb') h = open(file, 'rb')
while p < size: while p < size:
x = h.read(min(piece_length, size - p)) x = h.read(min(piece_length, size - p))
@ -217,7 +270,7 @@ def makeinfo(file, piece_length, encoding, flag, progress, progress_percent=1):
else: else:
progress(min(piece_length, size - p)) progress(min(piece_length, size - p))
h.close() h.close()
return {'pieces': ''.join(pieces), return {'pieces': b''.join(pieces),
'piece length': piece_length, 'length': size, 'piece length': piece_length, 'length': size,
'name': uniconvert(split(file)[1], encoding) } 'name': uniconvert(split(file)[1], encoding) }
@ -240,7 +293,7 @@ def completedir(dir, url, params = {}, flag = Event(),
files = listdir(dir) files = listdir(dir)
files.sort() files.sort()
ext = '.torrent' ext = '.torrent'
if params.has_key('target'): if 'target' in params:
target = params['target'] target = params['target']
else: else:
target = '' target = ''