Blame | Last modification | View Log | Download
# Written by Bram Cohen# multitracker extensions by John Hoffman# see LICENSE.txt for license informationfrom os.path import getsize, split, join, abspath, isdirfrom os import listdirfrom sha import shafrom copy import copyfrom string import stripfrom BitTornado.bencode import bencodefrom btformats import check_infofrom threading import Eventfrom time import timefrom traceback import print_exctry:from sys import getfilesystemencodingENCODING = getfilesystemencoding()except:from sys import getdefaultencodingENCODING = getdefaultencoding()defaults = [('announce_list', '','a list of announce URLs - explained below'),('httpseeds', '','a list of http seed URLs - explained below'),('piece_size_pow2', 0,"which power of 2 to set the piece size to (0 = automatic)"),('comment', '',"optional human-readable comment to put in .torrent"),('filesystem_encoding', '',"optional specification for filesystem encoding " +"(set automatically in recent Python versions)"),('target', '',"optional target file for the torrent")]default_piece_len_exp = 18ignore = ['core', 'CVS']def print_announcelist_details():print (' announce_list = optional list of redundant/backup tracker URLs, in the format:')print (' url[,url...][|url[,url...]...]')print (' where URLs separated by commas are all tried first')print (' before the next group of URLs separated by the pipe is checked.')print (" If none is given, it is assumed you don't want one in the metafile.")print (' If announce_list is given, clients which support it')print (' will ignore the <announce> value.')print (' Examples:')print (' http://tracker1.com|http://tracker2.com|http://tracker3.com')print (' (tries trackers 1-3 in order)')print (' http://tracker1.com,http://tracker2.com,http://tracker3.com')print (' (tries trackers 1-3 in a randomly selected order)')print (' http://tracker1.com|http://backup1.com,http://backup2.com')print (' (tries tracker 1 first, then tries between the 2 backups randomly)')print ('')print (' httpseeds = optional list of http-seed URLs, in the format:')print (' url[|url...]')def make_meta_file(file, url, params = {}, flag = Event(),progress = lambda x: None, progress_percent = 1):if params.has_key('piece_size_pow2'):piece_len_exp = params['piece_size_pow2']else:piece_len_exp = default_piece_len_expif params.has_key('target') and params['target'] != '':f = params['target']else:a, b = split(file)if b == '':f = a + '.torrent'else:f = join(a, b + '.torrent')if piece_len_exp == 0: # automaticsize = calcsize(file)if size > 8L*1024*1024*1024: # > 8 gig =piece_len_exp = 21 # 2 meg pieceselif size > 2*1024*1024*1024: # > 2 gig =piece_len_exp = 20 # 1 meg pieceselif size > 512*1024*1024: # > 512M =piece_len_exp = 19 # 512K pieceselif size > 64*1024*1024: # > 64M =piece_len_exp = 18 # 256K pieceselif size > 16*1024*1024: # > 16M =piece_len_exp = 17 # 128K pieceselif size > 4*1024*1024: # > 4M =piece_len_exp = 16 # 64K pieceselse: # < 4M =piece_len_exp = 15 # 32K piecespiece_length = 2 ** piece_len_expencoding = Noneif params.has_key('filesystem_encoding'):encoding = params['filesystem_encoding']if not encoding:encoding = ENCODINGif not encoding:encoding = 'ascii'info = makeinfo(file, piece_length, encoding, flag, progress, progress_percent)if flag.isSet():returncheck_info(info)h = open(f, 'wb')data = {'info': info, 'announce': strip(url), 'creation date': long(time())}if params.has_key('comment') and params['comment']:data['comment'] = params['comment']if params.has_key('real_announce_list'): # shortcut for progs calling in from outsidedata['announce-list'] = params['real_announce_list']elif params.has_key('announce_list') and params['announce_list']:l = []for tier in params['announce_list'].split('|'):l.append(tier.split(','))data['announce-list'] = lif params.has_key('real_httpseeds'): # shortcut for progs calling in from outsidedata['httpseeds'] = params['real_httpseeds']elif params.has_key('httpseeds') and params['httpseeds']:data['httpseeds'] = params['httpseeds'].split('|')h.write(bencode(data))h.close()def calcsize(file):if not isdir(file):return getsize(file)total = 0Lfor s in subfiles(abspath(file)):total += getsize(s[1])return totaldef uniconvertl(l, e):r = []try:for s in l:r.append(uniconvert(s, e))except UnicodeError:raise UnicodeError('bad filename: '+join(l))return rdef uniconvert(s, e):try:s = unicode(s,e)except UnicodeError:raise UnicodeError('bad filename: '+s)return s.encode('utf-8')def makeinfo(file, piece_length, encoding, flag, progress, progress_percent=1):file = abspath(file)if isdir(file):subs = subfiles(file)subs.sort()pieces = []sh = sha()done = 0Lfs = []totalsize = 0.0totalhashed = 0Lfor p, f in subs:totalsize += getsize(f)for p, f in subs:pos = 0Lsize = getsize(f)fs.append({'length': size, 'path': uniconvertl(p, encoding)})h = open(f, 'rb')while pos < size:a = min(size - pos, piece_length - done)sh.update(h.read(a))if flag.isSet():returndone += apos += atotalhashed += aif done == piece_length:pieces.append(sh.digest())done = 0sh = sha()if progress_percent:progress(totalhashed / totalsize)else:progress(a)h.close()if done > 0:pieces.append(sh.digest())return {'pieces': ''.join(pieces),'piece length': piece_length, 'files': fs,'name': uniconvert(split(file)[1], encoding) }else:size = getsize(file)pieces = []p = 0Lh = open(file, 'rb')while p < size:x = h.read(min(piece_length, size - p))if flag.isSet():returnpieces.append(sha(x).digest())p += piece_lengthif p > size:p = sizeif progress_percent:progress(float(p) / size)else:progress(min(piece_length, size - p))h.close()return {'pieces': ''.join(pieces),'piece length': piece_length, 'length': size,'name': uniconvert(split(file)[1], encoding) }def subfiles(d):r = []stack = [([], d)]while len(stack) > 0:p, n = stack.pop()if isdir(n):for s in listdir(n):if s not in ignore and s[:1] != '.':stack.append((copy(p) + [s], join(n, s)))else:r.append((p, n))return rdef completedir(dir, url, params = {}, flag = Event(),vc = lambda x: None, fc = lambda x: None):files = listdir(dir)files.sort()ext = '.torrent'if params.has_key('target'):target = params['target']else:target = ''togen = []for f in files:if f[-len(ext):] != ext and (f + ext) not in files:togen.append(join(dir, f))total = 0for i in togen:total += calcsize(i)subtotal = [0]def callback(x, subtotal = subtotal, total = total, vc = vc):subtotal[0] += xvc(float(subtotal[0]) / total)for i in togen:fc(i)try:t = split(i)[-1]if t not in ignore and t[0] != '.':if target != '':params['target'] = join(target,t+ext)make_meta_file(i, url, params, flag, progress = callback, progress_percent = 0)except ValueError:print_exc()