Subversion Repositories svnkaklik

Rev

Details | Last modification | View Log

Rev Author Line No. Line
36 kaklik 1
# Written by Bram Cohen
2
# multitracker extensions by John Hoffman
3
# see LICENSE.txt for license information
4
 
5
from os.path import getsize, split, join, abspath, isdir
6
from os import listdir
7
from sha import sha
8
from copy import copy
9
from string import strip
10
from BitTornado.bencode import bencode
11
from btformats import check_info
12
from threading import Event
13
from time import time
14
from traceback import print_exc
15
try:
16
    from sys import getfilesystemencoding
17
    ENCODING = getfilesystemencoding()
18
except:
19
    from sys import getdefaultencoding
20
    ENCODING = getdefaultencoding()
21
 
22
defaults = [
23
    ('announce_list', '',
24
        'a list of announce URLs - explained below'),
25
    ('httpseeds', '',
26
        'a list of http seed URLs - explained below'),
27
    ('piece_size_pow2', 0,
28
        "which power of 2 to set the piece size to (0 = automatic)"),
29
    ('comment', '',
30
        "optional human-readable comment to put in .torrent"),
31
    ('filesystem_encoding', '',
32
        "optional specification for filesystem encoding " +
33
        "(set automatically in recent Python versions)"),
34
    ('target', '',
35
        "optional target file for the torrent")
36
    ]
37
 
38
default_piece_len_exp = 18
39
 
40
ignore = ['core', 'CVS']
41
 
42
def print_announcelist_details():
43
    print ('    announce_list = optional list of redundant/backup tracker URLs, in the format:')
44
    print ('           url[,url...][|url[,url...]...]')
45
    print ('                where URLs separated by commas are all tried first')
46
    print ('                before the next group of URLs separated by the pipe is checked.')
47
    print ("                If none is given, it is assumed you don't want one in the metafile.")
48
    print ('                If announce_list is given, clients which support it')
49
    print ('                will ignore the <announce> value.')
50
    print ('           Examples:')
51
    print ('                http://tracker1.com|http://tracker2.com|http://tracker3.com')
52
    print ('                     (tries trackers 1-3 in order)')
53
    print ('                http://tracker1.com,http://tracker2.com,http://tracker3.com')
54
    print ('                     (tries trackers 1-3 in a randomly selected order)')
55
    print ('                http://tracker1.com|http://backup1.com,http://backup2.com')
56
    print ('                     (tries tracker 1 first, then tries between the 2 backups randomly)')
57
    print ('')
58
    print ('    httpseeds = optional list of http-seed URLs, in the format:')
59
    print ('            url[|url...]')
60
 
61
def make_meta_file(file, url, params = {}, flag = Event(),
62
                   progress = lambda x: None, progress_percent = 1):
63
    if params.has_key('piece_size_pow2'):
64
        piece_len_exp = params['piece_size_pow2']
65
    else:
66
        piece_len_exp = default_piece_len_exp
67
    if params.has_key('target') and params['target'] != '':
68
        f = params['target']
69
    else:
70
        a, b = split(file)
71
        if b == '':
72
            f = a + '.torrent'
73
        else:
74
            f = join(a, b + '.torrent')
75
 
76
    if piece_len_exp == 0:  # automatic
77
        size = calcsize(file)
78
        if   size > 8L*1024*1024*1024:   # > 8 gig =
79
            piece_len_exp = 21          #   2 meg pieces
80
        elif size > 2*1024*1024*1024:   # > 2 gig =
81
            piece_len_exp = 20          #   1 meg pieces
82
        elif size > 512*1024*1024:      # > 512M =
83
            piece_len_exp = 19          #   512K pieces
84
        elif size > 64*1024*1024:       # > 64M =
85
            piece_len_exp = 18          #   256K pieces
86
        elif size > 16*1024*1024:       # > 16M =
87
            piece_len_exp = 17          #   128K pieces
88
        elif size > 4*1024*1024:        # > 4M =
89
            piece_len_exp = 16          #   64K pieces
90
        else:                           # < 4M =
91
            piece_len_exp = 15          #   32K pieces
92
    piece_length = 2 ** piece_len_exp
93
 
94
    encoding = None
95
    if params.has_key('filesystem_encoding'):
96
        encoding = params['filesystem_encoding']
97
    if not encoding:
98
        encoding = ENCODING
99
    if not encoding:
100
        encoding = 'ascii'
101
 
102
    info = makeinfo(file, piece_length, encoding, flag, progress, progress_percent)
103
    if flag.isSet():
104
        return
105
    check_info(info)
106
    h = open(f, 'wb')
107
    data = {'info': info, 'announce': strip(url), 'creation date': long(time())}
108
 
109
    if params.has_key('comment') and params['comment']:
110
        data['comment'] = params['comment']
111
 
112
    if params.has_key('real_announce_list'):    # shortcut for progs calling in from outside
113
        data['announce-list'] = params['real_announce_list']
114
    elif params.has_key('announce_list') and params['announce_list']:
115
        l = []
116
        for tier in params['announce_list'].split('|'):
117
            l.append(tier.split(','))
118
        data['announce-list'] = l
119
 
120
    if params.has_key('real_httpseeds'):    # shortcut for progs calling in from outside
121
        data['httpseeds'] = params['real_httpseeds']
122
    elif params.has_key('httpseeds') and params['httpseeds']:
123
        data['httpseeds'] = params['httpseeds'].split('|')
124
 
125
    h.write(bencode(data))
126
    h.close()
127
 
128
def calcsize(file):
129
    if not isdir(file):
130
        return getsize(file)
131
    total = 0L
132
    for s in subfiles(abspath(file)):
133
        total += getsize(s[1])
134
    return total
135
 
136
 
137
def uniconvertl(l, e):
138
    r = []
139
    try:
140
        for s in l:
141
            r.append(uniconvert(s, e))
142
    except UnicodeError:
143
        raise UnicodeError('bad filename: '+join(l))
144
    return r
145
 
146
def uniconvert(s, e):
147
    try:
148
        s = unicode(s,e)
149
    except UnicodeError:
150
        raise UnicodeError('bad filename: '+s)
151
    return s.encode('utf-8')
152
 
153
def makeinfo(file, piece_length, encoding, flag, progress, progress_percent=1):
154
    file = abspath(file)
155
    if isdir(file):
156
        subs = subfiles(file)
157
        subs.sort()
158
        pieces = []
159
        sh = sha()
160
        done = 0L
161
        fs = []
162
        totalsize = 0.0
163
        totalhashed = 0L
164
        for p, f in subs:
165
            totalsize += getsize(f)
166
 
167
        for p, f in subs:
168
            pos = 0L
169
            size = getsize(f)
170
            fs.append({'length': size, 'path': uniconvertl(p, encoding)})
171
            h = open(f, 'rb')
172
            while pos < size:
173
                a = min(size - pos, piece_length - done)
174
                sh.update(h.read(a))
175
                if flag.isSet():
176
                    return
177
                done += a
178
                pos += a
179
                totalhashed += a
180
 
181
                if done == piece_length:
182
                    pieces.append(sh.digest())
183
                    done = 0
184
                    sh = sha()
185
                if progress_percent:
186
                    progress(totalhashed / totalsize)
187
                else:
188
                    progress(a)
189
            h.close()
190
        if done > 0:
191
            pieces.append(sh.digest())
192
        return {'pieces': ''.join(pieces),
193
            'piece length': piece_length, 'files': fs, 
194
            'name': uniconvert(split(file)[1], encoding) }
195
    else:
196
        size = getsize(file)
197
        pieces = []
198
        p = 0L
199
        h = open(file, 'rb')
200
        while p < size:
201
            x = h.read(min(piece_length, size - p))
202
            if flag.isSet():
203
                return
204
            pieces.append(sha(x).digest())
205
            p += piece_length
206
            if p > size:
207
                p = size
208
            if progress_percent:
209
                progress(float(p) / size)
210
            else:
211
                progress(min(piece_length, size - p))
212
        h.close()
213
        return {'pieces': ''.join(pieces), 
214
            'piece length': piece_length, 'length': size, 
215
            'name': uniconvert(split(file)[1], encoding) }
216
 
217
def subfiles(d):
218
    r = []
219
    stack = [([], d)]
220
    while len(stack) > 0:
221
        p, n = stack.pop()
222
        if isdir(n):
223
            for s in listdir(n):
224
                if s not in ignore and s[:1] != '.':
225
                    stack.append((copy(p) + [s], join(n, s)))
226
        else:
227
            r.append((p, n))
228
    return r
229
 
230
 
231
def completedir(dir, url, params = {}, flag = Event(),
232
                vc = lambda x: None, fc = lambda x: None):
233
    files = listdir(dir)
234
    files.sort()
235
    ext = '.torrent'
236
    if params.has_key('target'):
237
        target = params['target']
238
    else:
239
        target = ''
240
 
241
    togen = []
242
    for f in files:
243
        if f[-len(ext):] != ext and (f + ext) not in files:
244
            togen.append(join(dir, f))
245
 
246
    total = 0
247
    for i in togen:
248
        total += calcsize(i)
249
 
250
    subtotal = [0]
251
    def callback(x, subtotal = subtotal, total = total, vc = vc):
252
        subtotal[0] += x
253
        vc(float(subtotal[0]) / total)
254
    for i in togen:
255
        fc(i)
256
        try:
257
            t = split(i)[-1]
258
            if t not in ignore and t[0] != '.':
259
                if target != '':
260
                    params['target'] = join(target,t+ext)
261
                make_meta_file(i, url, params, flag, progress = callback, progress_percent = 0)
262
        except ValueError:
263
            print_exc()