Subversion Repositories svnkaklik

Rev

Details | Last modification | View Log

Rev Author Line No. Line
36 kaklik 1
# Written by Bram Cohen
2
# see LICENSE.txt for license information
3
 
4
from zurllib import urlopen
5
from urlparse import urlparse
6
from BT1.btformats import check_message
7
from BT1.Choker import Choker
8
from BT1.Storage import Storage
9
from BT1.StorageWrapper import StorageWrapper
10
from BT1.FileSelector import FileSelector
11
from BT1.Uploader import Upload
12
from BT1.Downloader import Downloader
13
from BT1.HTTPDownloader import HTTPDownloader
14
from BT1.Connecter import Connecter
15
from RateLimiter import RateLimiter
16
from BT1.Encrypter import Encoder
17
from RawServer import RawServer, autodetect_ipv6, autodetect_socket_style
18
from BT1.Rerequester import Rerequester
19
from BT1.DownloaderFeedback import DownloaderFeedback
20
from RateMeasure import RateMeasure
21
from CurrentRateMeasure import Measure
22
from BT1.PiecePicker import PiecePicker
23
from BT1.Statistics import Statistics
24
from ConfigDir import ConfigDir
25
from bencode import bencode, bdecode
26
from natpunch import UPnP_test
27
from sha import sha
28
from os import path, makedirs, listdir
29
from parseargs import parseargs, formatDefinitions, defaultargs
30
from socket import error as socketerror
31
from random import seed
32
from threading import Thread, Event
33
from clock import clock
34
from __init__ import createPeerID
35
 
36
try:
37
    True
38
except:
39
    True = 1
40
    False = 0
41
 
42
defaults = [
43
    ('max_uploads', 7,
44
        "the maximum number of uploads to allow at once."),
45
    ('keepalive_interval', 120.0,
46
        'number of seconds to pause between sending keepalives'),
47
    ('download_slice_size', 2 ** 14,
48
        "How many bytes to query for per request."),
49
    ('upload_unit_size', 1460,
50
        "when limiting upload rate, how many bytes to send at a time"),
51
    ('request_backlog', 10,
52
        "maximum number of requests to keep in a single pipe at once."),
53
    ('max_message_length', 2 ** 23,
54
        "maximum length prefix encoding you'll accept over the wire - larger values get the connection dropped."),
55
    ('ip', '',
56
        "ip to report you have to the tracker."),
57
    ('minport', 10000, 'minimum port to listen on, counts up if unavailable'),
58
    ('maxport', 60000, 'maximum port to listen on'),
59
    ('random_port', 1, 'whether to choose randomly inside the port range ' +
60
        'instead of counting up linearly'),
61
    ('responsefile', '',
62
        'file the server response was stored in, alternative to url'),
63
    ('url', '',
64
        'url to get file from, alternative to responsefile'),
65
    ('selector_enabled', 1,
66
        'whether to enable the file selector and fast resume function'),
67
    ('expire_cache_data', 10,
68
        'the number of days after which you wish to expire old cache data ' +
69
        '(0 = disabled)'),
70
    ('priority', '',
71
        'a list of file priorities separated by commas, must be one per file, ' +
72
        '0 = highest, 1 = normal, 2 = lowest, -1 = download disabled'),
73
    ('saveas', '',
74
        'local file name to save the file as, null indicates query user'),
75
    ('timeout', 300.0,
76
        'time to wait between closing sockets which nothing has been received on'),
77
    ('timeout_check_interval', 60.0,
78
        'time to wait between checking if any connections have timed out'),
79
    ('max_slice_length', 2 ** 17,
80
        "maximum length slice to send to peers, larger requests are ignored"),
81
    ('max_rate_period', 20.0,
82
        "maximum amount of time to guess the current rate estimate represents"),
83
    ('bind', '', 
84
        'comma-separated list of ips/hostnames to bind to locally'),
85
#    ('ipv6_enabled', autodetect_ipv6(),
86
    ('ipv6_enabled', 0,
87
         'allow the client to connect to peers via IPv6'),
88
    ('ipv6_binds_v4', autodetect_socket_style(),
89
        "set if an IPv6 server socket won't also field IPv4 connections"),
90
    ('upnp_nat_access', 1,
91
        'attempt to autoconfigure a UPnP router to forward a server port ' +
92
        '(0 = disabled, 1 = mode 1 [fast], 2 = mode 2 [slow])'),
93
    ('upload_rate_fudge', 5.0, 
94
        'time equivalent of writing to kernel-level TCP buffer, for rate adjustment'),
95
    ('tcp_ack_fudge', 0.03,
96
        'how much TCP ACK download overhead to add to upload rate calculations ' +
97
        '(0 = disabled)'),
98
    ('display_interval', .5,
99
        'time between updates of displayed information'),
100
    ('rerequest_interval', 5 * 60,
101
        'time to wait between requesting more peers'),
102
    ('min_peers', 20, 
103
        'minimum number of peers to not do rerequesting'),
104
    ('http_timeout', 60, 
105
        'number of seconds to wait before assuming that an http connection has timed out'),
106
    ('max_initiate', 40,
107
        'number of peers at which to stop initiating new connections'),
108
    ('check_hashes', 1,
109
        'whether to check hashes on disk'),
110
    ('max_upload_rate', 0,
111
        'maximum kB/s to upload at (0 = no limit, -1 = automatic)'),
112
    ('max_download_rate', 0,
113
        'maximum kB/s to download at (0 = no limit)'),
114
    ('alloc_type', 'normal',
115
        'allocation type (may be normal, background, pre-allocate or sparse)'),
116
    ('alloc_rate', 2.0,
117
        'rate (in MiB/s) to allocate space at using background allocation'),
118
    ('buffer_reads', 1,
119
        'whether to buffer disk reads'),
120
    ('write_buffer_size', 4,
121
        'the maximum amount of space to use for buffering disk writes ' +
122
        '(in megabytes, 0 = disabled)'),
123
    ('breakup_seed_bitfield', 1,
124
        'sends an incomplete bitfield and then fills with have messages, '
125
        'in order to get around stupid ISP manipulation'),
126
    ('snub_time', 30.0,
127
        "seconds to wait for data to come in over a connection before assuming it's semi-permanently choked"),
128
    ('spew', 0,
129
        "whether to display diagnostic info to stdout"),
130
    ('rarest_first_cutoff', 2,
131
        "number of downloads at which to switch from random to rarest first"),
132
    ('rarest_first_priority_cutoff', 5,
133
        'the number of peers which need to have a piece before other partials take priority over rarest first'),
134
    ('min_uploads', 4,
135
        "the number of uploads to fill out to with extra optimistic unchokes"),
136
    ('max_files_open', 50,
137
        'the maximum number of files to keep open at a time, 0 means no limit'),
138
    ('round_robin_period', 30,
139
        "the number of seconds between the client's switching upload targets"),
140
    ('super_seeder', 0,
141
        "whether to use special upload-efficiency-maximizing routines (only for dedicated seeds)"),
142
    ('security', 1,
143
        "whether to enable extra security features intended to prevent abuse"),
144
    ('max_connections', 0,
145
        "the absolute maximum number of peers to connect with (0 = no limit)"),
146
    ('auto_kick', 1,
147
        "whether to allow the client to automatically kick/ban peers that send bad data"),
148
    ('double_check', 1,
149
        "whether to double-check data being written to the disk for errors (may increase CPU load)"),
150
    ('triple_check', 0,
151
        "whether to thoroughly check data being written to the disk (may slow disk access)"),
152
    ('lock_files', 1,
153
        "whether to lock files the client is working with"),
154
    ('lock_while_reading', 0,
155
        "whether to lock access to files being read"),
156
    ('auto_flush', 0,
157
        "minutes between automatic flushes to disk (0 = disabled)"),
158
    ('dedicated_seed_id', '',
159
        "code to send to tracker identifying as a dedicated seed"),
160
    ]
161
 
162
argslistheader = 'Arguments are:\n\n'
163
 
164
 
165
def _failfunc(x):
166
    print x
167
 
168
# old-style downloader
169
def download(params, filefunc, statusfunc, finfunc, errorfunc, doneflag, cols,
170
             pathFunc = None, presets = {}, exchandler = None,
171
             failed = _failfunc, paramfunc = None):
172
 
173
    try:
174
        config = parse_params(params, presets)
175
    except ValueError, e:
176
        failed('error: ' + str(e) + '\nrun with no args for parameter explanations')
177
        return
178
    if not config:
179
        errorfunc(get_usage())
180
        return
181
 
182
    myid = createPeerID()
183
    seed(myid)
184
 
185
    rawserver = RawServer(doneflag, config['timeout_check_interval'],
186
                          config['timeout'], ipv6_enable = config['ipv6_enabled'],
187
                          failfunc = failed, errorfunc = exchandler)
188
 
189
    upnp_type = UPnP_test(config['upnp_nat_access'])
190
    try:
191
        listen_port = rawserver.find_and_bind(config['minport'], config['maxport'],
192
                        config['bind'], ipv6_socket_style = config['ipv6_binds_v4'],
193
                        upnp = upnp_type, randomizer = config['random_port'])
194
    except socketerror, e:
195
        failed("Couldn't listen - " + str(e))
196
        return
197
 
198
    response = get_response(config['responsefile'], config['url'], failed)
199
    if not response:
200
        return
201
 
202
    infohash = sha(bencode(response['info'])).digest()
203
 
204
    d = BT1Download(statusfunc, finfunc, errorfunc, exchandler, doneflag,
205
                    config, response, infohash, myid, rawserver, listen_port)
206
 
207
    if not d.saveAs(filefunc):
208
        return
209
 
210
    if pathFunc:
211
        pathFunc(d.getFilename())
212
 
213
    hashcheck = d.initFiles(old_style = True)
214
    if not hashcheck:
215
        return
216
    if not hashcheck():
217
        return
218
    if not d.startEngine():
219
        return
220
    d.startRerequester()
221
    d.autoStats()
222
 
223
    statusfunc(activity = 'connecting to peers')
224
 
225
    if paramfunc:
226
        paramfunc({ 'max_upload_rate' : d.setUploadRate,  # change_max_upload_rate(<int KiB/sec>)
227
                    'max_uploads': d.setConns, # change_max_uploads(<int max uploads>)
228
                    'listen_port' : listen_port, # int
229
                    'peer_id' : myid, # string
230
                    'info_hash' : infohash, # string
231
                    'start_connection' : d._startConnection, # start_connection((<string ip>, <int port>), <peer id>)
232
                    })
233
 
234
    rawserver.listen_forever(d.getPortHandler())
235
 
236
    d.shutdown()
237
 
238
 
239
def parse_params(params, presets = {}):
240
    if len(params) == 0:
241
        return None
242
    config, args = parseargs(params, defaults, 0, 1, presets = presets)
243
    if args:
244
        if config['responsefile'] or config['url']:
245
            raise ValueError,'must have responsefile or url as arg or parameter, not both'
246
        if path.isfile(args[0]):
247
            config['responsefile'] = args[0]
248
        else:
249
            try:
250
                urlparse(args[0])
251
            except:
252
                raise ValueError, 'bad filename or url'
253
            config['url'] = args[0]
254
    elif (config['responsefile'] == '') == (config['url'] == ''):
255
        raise ValueError, 'need responsefile or url, must have one, cannot have both'
256
    return config
257
 
258
 
259
def get_usage(defaults = defaults, cols = 100, presets = {}):
260
    return (argslistheader + formatDefinitions(defaults, cols, presets))
261
 
262
 
263
def get_response(file, url, errorfunc):
264
    try:
265
        if file:
266
            h = open(file, 'rb')
267
            try:
268
                line = h.read(10)   # quick test to see if responsefile contains a dict
269
                front,garbage = line.split(':',1)
270
                assert front[0] == 'd'
271
                int(front[1:])
272
            except:
273
                errorfunc(file+' is not a valid responsefile')
274
                return None
275
            try:
276
                h.seek(0)
277
            except:
278
                try:
279
                    h.close()
280
                except:
281
                    pass
282
                h = open(file, 'rb')
283
        else:
284
            try:
285
                h = urlopen(url)
286
            except:
287
                errorfunc(url+' bad url')
288
                return None
289
        response = h.read()
290
 
291
    except IOError, e:
292
        errorfunc('problem getting response info - ' + str(e))
293
        return None
294
    try:    
295
        h.close()
296
    except:
297
        pass
298
    try:
299
        try:
300
            response = bdecode(response)
301
        except:
302
            errorfunc("warning: bad data in responsefile")
303
            response = bdecode(response, sloppy=1)
304
        check_message(response)
305
    except ValueError, e:
306
        errorfunc("got bad file info - " + str(e))
307
        return None
308
 
309
    return response
310
 
311
 
312
class BT1Download:    
313
    def __init__(self, statusfunc, finfunc, errorfunc, excfunc, doneflag,
314
                 config, response, infohash, id, rawserver, port,
315
                 appdataobj = None):
316
        self.statusfunc = statusfunc
317
        self.finfunc = finfunc
318
        self.errorfunc = errorfunc
319
        self.excfunc = excfunc
320
        self.doneflag = doneflag
321
        self.config = config
322
        self.response = response
323
        self.infohash = infohash
324
        self.myid = id
325
        self.rawserver = rawserver
326
        self.port = port
327
 
328
        self.info = self.response['info']
329
        self.pieces = [self.info['pieces'][x:x+20]
330
                       for x in xrange(0, len(self.info['pieces']), 20)]
331
        self.len_pieces = len(self.pieces)
332
        self.argslistheader = argslistheader
333
        self.unpauseflag = Event()
334
        self.unpauseflag.set()
335
        self.downloader = None
336
        self.storagewrapper = None
337
        self.fileselector = None
338
        self.super_seeding_active = False
339
        self.filedatflag = Event()
340
        self.spewflag = Event()
341
        self.superseedflag = Event()
342
        self.whenpaused = None
343
        self.finflag = Event()
344
        self.rerequest = None
345
        self.tcp_ack_fudge = config['tcp_ack_fudge']
346
 
347
        self.selector_enabled = config['selector_enabled']
348
        if appdataobj:
349
            self.appdataobj = appdataobj
350
        elif self.selector_enabled:
351
            self.appdataobj = ConfigDir()
352
            self.appdataobj.deleteOldCacheData( config['expire_cache_data'],
353
                                                [self.infohash] )
354
 
355
        self.excflag = self.rawserver.get_exception_flag()
356
        self.failed = False
357
        self.checking = False
358
        self.started = False
359
 
360
        self.picker = PiecePicker(self.len_pieces, config['rarest_first_cutoff'],
361
                             config['rarest_first_priority_cutoff'])
362
        self.choker = Choker(config, rawserver.add_task,
363
                             self.picker, self.finflag.isSet)
364
 
365
 
366
    def checkSaveLocation(self, loc):
367
        if self.info.has_key('length'):
368
            return path.exists(loc)
369
        for x in self.info['files']:
370
            if path.exists(path.join(loc, x['path'][0])):
371
                return True
372
        return False
373
 
374
 
375
    def saveAs(self, filefunc, pathfunc = None):
376
        try:
377
            def make(f, forcedir = False):
378
                if not forcedir:
379
                    f = path.split(f)[0]
380
                if f != '' and not path.exists(f):
381
                    makedirs(f)
382
 
383
            if self.info.has_key('length'):
384
                file_length = self.info['length']
385
                file = filefunc(self.info['name'], file_length,
386
                                self.config['saveas'], False)
387
                if file is None:
388
                    return None
389
                make(file)
390
                files = [(file, file_length)]
391
            else:
392
                file_length = 0L
393
                for x in self.info['files']:
394
                    file_length += x['length']
395
                file = filefunc(self.info['name'], file_length,
396
                                self.config['saveas'], True)
397
                if file is None:
398
                    return None
399
 
400
                # if this path exists, and no files from the info dict exist, we assume it's a new download and 
401
                # the user wants to create a new directory with the default name
402
                existing = 0
403
                if path.exists(file):
404
                    if not path.isdir(file):
405
                        self.errorfunc(file + 'is not a dir')
406
                        return None
407
                    if len(listdir(file)) > 0:  # if it's not empty
408
                        for x in self.info['files']:
409
                            if path.exists(path.join(file, x['path'][0])):
410
                                existing = 1
411
                        if not existing:
412
                            file = path.join(file, self.info['name'])
413
                            if path.exists(file) and not path.isdir(file):
414
                                if file[-8:] == '.torrent':
415
                                    file = file[:-8]
416
                                if path.exists(file) and not path.isdir(file):
417
                                    self.errorfunc("Can't create dir - " + self.info['name'])
418
                                    return None
419
                make(file, True)
420
 
421
                # alert the UI to any possible change in path
422
                if pathfunc != None:
423
                    pathfunc(file)
424
 
425
                files = []
426
                for x in self.info['files']:
427
                    n = file
428
                    for i in x['path']:
429
                        n = path.join(n, i)
430
                    files.append((n, x['length']))
431
                    make(n)
432
        except OSError, e:
433
            self.errorfunc("Couldn't allocate dir - " + str(e))
434
            return None
435
 
436
        self.filename = file
437
        self.files = files
438
        self.datalength = file_length
439
 
440
        return file
441
 
442
 
443
    def getFilename(self):
444
        return self.filename
445
 
446
 
447
    def _finished(self):
448
        self.finflag.set()
449
        try:
450
            self.storage.set_readonly()
451
        except (IOError, OSError), e:
452
            self.errorfunc('trouble setting readonly at end - ' + str(e))
453
        if self.superseedflag.isSet():
454
            self._set_super_seed()
455
        self.choker.set_round_robin_period(
456
            max( self.config['round_robin_period'],
457
                 self.config['round_robin_period'] *
458
                                     self.info['piece length'] / 200000 ) )
459
        self.rerequest_complete()
460
        self.finfunc()
461
 
462
    def _data_flunked(self, amount, index):
463
        self.ratemeasure_datarejected(amount)
464
        if not self.doneflag.isSet():
465
            self.errorfunc('piece %d failed hash check, re-downloading it' % index)
466
 
467
    def _failed(self, reason):
468
        self.failed = True
469
        self.doneflag.set()
470
        if reason is not None:
471
            self.errorfunc(reason)
472
 
473
 
474
    def initFiles(self, old_style = False, statusfunc = None):
475
        if self.doneflag.isSet():
476
            return None
477
        if not statusfunc:
478
            statusfunc = self.statusfunc
479
 
480
        disabled_files = None
481
        if self.selector_enabled:
482
            self.priority = self.config['priority']
483
            if self.priority:
484
                try:
485
                    self.priority = self.priority.split(',')
486
                    assert len(self.priority) == len(self.files)
487
                    self.priority = [int(p) for p in self.priority]
488
                    for p in self.priority:
489
                        assert p >= -1
490
                        assert p <= 2
491
                except:
492
                    self.errorfunc('bad priority list given, ignored')
493
                    self.priority = None
494
 
495
            data = self.appdataobj.getTorrentData(self.infohash)
496
            try:
497
                d = data['resume data']['priority']
498
                assert len(d) == len(self.files)
499
                disabled_files = [x == -1 for x in d]
500
            except:
501
                try:
502
                    disabled_files = [x == -1 for x in self.priority]
503
                except:
504
                    pass
505
 
506
        try:
507
            try:
508
                self.storage = Storage(self.files, self.info['piece length'],
509
                                       self.doneflag, self.config, disabled_files)
510
            except IOError, e:
511
                self.errorfunc('trouble accessing files - ' + str(e))
512
                return None
513
            if self.doneflag.isSet():
514
                return None
515
 
516
            self.storagewrapper = StorageWrapper(self.storage, self.config['download_slice_size'],
517
                self.pieces, self.info['piece length'], self._finished, self._failed,
518
                statusfunc, self.doneflag, self.config['check_hashes'],
519
                self._data_flunked, self.rawserver.add_task,
520
                self.config, self.unpauseflag)
521
 
522
        except ValueError, e:
523
            self._failed('bad data - ' + str(e))
524
        except IOError, e:
525
            self._failed('IOError - ' + str(e))
526
        if self.doneflag.isSet():
527
            return None
528
 
529
        if self.selector_enabled:
530
            self.fileselector = FileSelector(self.files, self.info['piece length'],
531
                                             self.appdataobj.getPieceDir(self.infohash),
532
                                             self.storage, self.storagewrapper,
533
                                             self.rawserver.add_task,
534
                                             self._failed)
535
            if data:
536
                data = data.get('resume data')
537
                if data:
538
                    self.fileselector.unpickle(data)
539
 
540
        self.checking = True
541
        if old_style:
542
            return self.storagewrapper.old_style_init()
543
        return self.storagewrapper.initialize
544
 
545
 
546
    def getCachedTorrentData(self):
547
        return self.appdataobj.getTorrentData(self.infohash)
548
 
549
 
550
    def _make_upload(self, connection, ratelimiter, totalup):
551
        return Upload(connection, ratelimiter, totalup,
552
                      self.choker, self.storagewrapper, self.picker,
553
                      self.config)
554
 
555
    def _kick_peer(self, connection):
556
        def k(connection = connection):
557
            connection.close()
558
        self.rawserver.add_task(k,0)
559
 
560
    def _ban_peer(self, ip):
561
        self.encoder_ban(ip)
562
 
563
    def _received_raw_data(self, x):
564
        if self.tcp_ack_fudge:
565
            x = int(x*self.tcp_ack_fudge)
566
            self.ratelimiter.adjust_sent(x)
567
#            self.upmeasure.update_rate(x)
568
 
569
    def _received_data(self, x):
570
        self.downmeasure.update_rate(x)
571
        self.ratemeasure.data_came_in(x)
572
 
573
    def _received_http_data(self, x):
574
        self.downmeasure.update_rate(x)
575
        self.ratemeasure.data_came_in(x)
576
        self.downloader.external_data_received(x)
577
 
578
    def _cancelfunc(self, pieces):
579
        self.downloader.cancel_piece_download(pieces)
580
        self.httpdownloader.cancel_piece_download(pieces)
581
    def _reqmorefunc(self, pieces):
582
        self.downloader.requeue_piece_download(pieces)
583
 
584
    def startEngine(self, ratelimiter = None, statusfunc = None):
585
        if self.doneflag.isSet():
586
            return False
587
        if not statusfunc:
588
            statusfunc = self.statusfunc
589
 
590
        self.checking = False
591
 
592
        for i in xrange(self.len_pieces):
593
            if self.storagewrapper.do_I_have(i):
594
                self.picker.complete(i)
595
        self.upmeasure = Measure(self.config['max_rate_period'],
596
                            self.config['upload_rate_fudge'])
597
        self.downmeasure = Measure(self.config['max_rate_period'])
598
 
599
        if ratelimiter:
600
            self.ratelimiter = ratelimiter
601
        else:
602
            self.ratelimiter = RateLimiter(self.rawserver.add_task,
603
                                           self.config['upload_unit_size'],
604
                                           self.setConns)
605
            self.ratelimiter.set_upload_rate(self.config['max_upload_rate'])
606
 
607
        self.ratemeasure = RateMeasure()
608
        self.ratemeasure_datarejected = self.ratemeasure.data_rejected
609
 
610
        self.downloader = Downloader(self.storagewrapper, self.picker,
611
            self.config['request_backlog'], self.config['max_rate_period'],
612
            self.len_pieces, self.config['download_slice_size'],
613
            self._received_data, self.config['snub_time'], self.config['auto_kick'],
614
            self._kick_peer, self._ban_peer)
615
        self.downloader.set_download_rate(self.config['max_download_rate'])
616
        self.connecter = Connecter(self._make_upload, self.downloader, self.choker,
617
                            self.len_pieces, self.upmeasure, self.config,
618
                            self.ratelimiter, self.rawserver.add_task)
619
        self.encoder = Encoder(self.connecter, self.rawserver,
620
            self.myid, self.config['max_message_length'], self.rawserver.add_task,
621
            self.config['keepalive_interval'], self.infohash,
622
            self._received_raw_data, self.config)
623
        self.encoder_ban = self.encoder.ban
624
 
625
        self.httpdownloader = HTTPDownloader(self.storagewrapper, self.picker,
626
            self.rawserver, self.finflag, self.errorfunc, self.downloader,
627
            self.config['max_rate_period'], self.infohash, self._received_http_data,
628
            self.connecter.got_piece)
629
        if self.response.has_key('httpseeds') and not self.finflag.isSet():
630
            for u in self.response['httpseeds']:
631
                self.httpdownloader.make_download(u)
632
 
633
        if self.selector_enabled:
634
            self.fileselector.tie_in(self.picker, self._cancelfunc,
635
                    self._reqmorefunc, self.rerequest_ondownloadmore)
636
            if self.priority:
637
                self.fileselector.set_priorities_now(self.priority)
638
            self.appdataobj.deleteTorrentData(self.infohash)
639
                                # erase old data once you've started modifying it
640
        self.started = True
641
        return True
642
 
643
 
644
    def rerequest_complete(self):
645
        if self.rerequest:
646
            self.rerequest.announce(1)
647
 
648
    def rerequest_stopped(self):
649
        if self.rerequest:
650
            self.rerequest.announce(2)
651
 
652
    def rerequest_lastfailed(self):
653
        if self.rerequest:
654
            return self.rerequest.last_failed
655
        return False
656
 
657
    def rerequest_ondownloadmore(self):
658
        if self.rerequest:
659
            self.rerequest.hit()
660
 
661
    def startRerequester(self, seededfunc = None, force_rapid_update = False):
662
        if self.response.has_key('announce-list'):
663
            trackerlist = self.response['announce-list']
664
        else:
665
            trackerlist = [[self.response['announce']]]
666
 
667
        self.rerequest = Rerequester(trackerlist, self.config['rerequest_interval'], 
668
            self.rawserver.add_task, self.connecter.how_many_connections, 
669
            self.config['min_peers'], self.encoder.start_connections,
670
            self.rawserver.add_task, self.storagewrapper.get_amount_left, 
671
            self.upmeasure.get_total, self.downmeasure.get_total, self.port, self.config['ip'],
672
            self.myid, self.infohash, self.config['http_timeout'],
673
            self.errorfunc, self.excfunc, self.config['max_initiate'],
674
            self.doneflag, self.upmeasure.get_rate, self.downmeasure.get_rate,
675
            self.unpauseflag, self.config['dedicated_seed_id'],
676
            seededfunc, force_rapid_update )
677
 
678
        self.rerequest.start()
679
 
680
 
681
    def _init_stats(self):
682
        self.statistics = Statistics(self.upmeasure, self.downmeasure,
683
                    self.connecter, self.httpdownloader, self.ratelimiter,
684
                    self.rerequest_lastfailed, self.filedatflag)
685
        if self.info.has_key('files'):
686
            self.statistics.set_dirstats(self.files, self.info['piece length'])
687
        if self.config['spew']:
688
            self.spewflag.set()
689
 
690
    def autoStats(self, displayfunc = None):
691
        if not displayfunc:
692
            displayfunc = self.statusfunc
693
 
694
        self._init_stats()
695
        DownloaderFeedback(self.choker, self.httpdownloader, self.rawserver.add_task,
696
            self.upmeasure.get_rate, self.downmeasure.get_rate,
697
            self.ratemeasure, self.storagewrapper.get_stats,
698
            self.datalength, self.finflag, self.spewflag, self.statistics,
699
            displayfunc, self.config['display_interval'])
700
 
701
    def startStats(self):
702
        self._init_stats()
703
        d = DownloaderFeedback(self.choker, self.httpdownloader, self.rawserver.add_task,
704
            self.upmeasure.get_rate, self.downmeasure.get_rate,
705
            self.ratemeasure, self.storagewrapper.get_stats,
706
            self.datalength, self.finflag, self.spewflag, self.statistics)
707
        return d.gather
708
 
709
 
710
    def getPortHandler(self):
711
        return self.encoder
712
 
713
 
714
    def shutdown(self, torrentdata = {}):
715
        if self.checking or self.started:
716
            self.storagewrapper.sync()
717
            self.storage.close()
718
            self.rerequest_stopped()
719
        if self.fileselector and self.started:
720
            if not self.failed:
721
                self.fileselector.finish()
722
                torrentdata['resume data'] = self.fileselector.pickle()
723
            try:
724
                self.appdataobj.writeTorrentData(self.infohash,torrentdata)
725
            except:
726
                self.appdataobj.deleteTorrentData(self.infohash) # clear it
727
        return not self.failed and not self.excflag.isSet()
728
        # if returns false, you may wish to auto-restart the torrent
729
 
730
 
731
    def setUploadRate(self, rate):
732
        try:
733
            def s(self = self, rate = rate):
734
                self.config['max_upload_rate'] = rate
735
                self.ratelimiter.set_upload_rate(rate)
736
            self.rawserver.add_task(s)
737
        except AttributeError:
738
            pass
739
 
740
    def setConns(self, conns, conns2 = None):
741
        if not conns2:
742
            conns2 = conns
743
        try:
744
            def s(self = self, conns = conns, conns2 = conns2):
745
                self.config['min_uploads'] = conns
746
                self.config['max_uploads'] = conns2
747
                if (conns > 30):
748
                    self.config['max_initiate'] = conns + 10
749
            self.rawserver.add_task(s)
750
        except AttributeError:
751
            pass
752
 
753
    def setDownloadRate(self, rate):
754
        try:
755
            def s(self = self, rate = rate):
756
                self.config['max_download_rate'] = rate
757
                self.downloader.set_download_rate(rate)
758
            self.rawserver.add_task(s)
759
        except AttributeError:
760
            pass
761
 
762
    def startConnection(self, ip, port, id):
763
        self.encoder._start_connection((ip, port), id)
764
 
765
    def _startConnection(self, ipandport, id):
766
        self.encoder._start_connection(ipandport, id)
767
 
768
    def setInitiate(self, initiate):
769
        try:
770
            def s(self = self, initiate = initiate):
771
                self.config['max_initiate'] = initiate
772
            self.rawserver.add_task(s)
773
        except AttributeError:
774
            pass
775
 
776
    def getConfig(self):
777
        return self.config
778
 
779
    def getDefaults(self):
780
        return defaultargs(defaults)
781
 
782
    def getUsageText(self):
783
        return self.argslistheader
784
 
785
    def reannounce(self, special = None):
786
        try:
787
            def r(self = self, special = special):
788
                if special is None:
789
                    self.rerequest.announce()
790
                else:
791
                    self.rerequest.announce(specialurl = special)
792
            self.rawserver.add_task(r)
793
        except AttributeError:
794
            pass
795
 
796
    def getResponse(self):
797
        try:
798
            return self.response
799
        except:
800
            return None
801
 
802
#    def Pause(self):
803
#        try:
804
#            if self.storagewrapper:
805
#                self.rawserver.add_task(self._pausemaker, 0)
806
#        except:
807
#            return False
808
#        self.unpauseflag.clear()
809
#        return True
810
#
811
#    def _pausemaker(self):
812
#        self.whenpaused = clock()
813
#        self.unpauseflag.wait()   # sticks a monkey wrench in the main thread
814
#
815
#    def Unpause(self):
816
#        self.unpauseflag.set()
817
#        if self.whenpaused and clock()-self.whenpaused > 60:
818
#            def r(self = self):
819
#                self.rerequest.announce(3)      # rerequest automatically if paused for >60 seconds
820
#            self.rawserver.add_task(r)
821
 
822
    def Pause(self):
823
        if not self.storagewrapper:
824
            return False
825
        self.unpauseflag.clear()
826
        self.rawserver.add_task(self.onPause)
827
        return True
828
 
829
    def onPause(self):
830
        self.whenpaused = clock()
831
        if not self.downloader:
832
            return
833
        self.downloader.pause(True)
834
        self.encoder.pause(True)
835
        self.choker.pause(True)
836
 
837
    def Unpause(self):
838
        self.unpauseflag.set()
839
        self.rawserver.add_task(self.onUnpause)
840
 
841
    def onUnpause(self):
842
        if not self.downloader:
843
            return
844
        self.downloader.pause(False)
845
        self.encoder.pause(False)
846
        self.choker.pause(False)
847
        if self.rerequest and self.whenpaused and clock()-self.whenpaused > 60:
848
            self.rerequest.announce(3)      # rerequest automatically if paused for >60 seconds
849
 
850
    def set_super_seed(self):
851
        try:
852
            self.superseedflag.set()
853
            def s(self = self):
854
                if self.finflag.isSet():
855
                    self._set_super_seed()
856
            self.rawserver.add_task(s)
857
        except AttributeError:
858
            pass
859
 
860
    def _set_super_seed(self):
861
        if not self.super_seeding_active:
862
            self.super_seeding_active = True
863
            self.errorfunc('        ** SUPER-SEED OPERATION ACTIVE **\n' +
864
                           '  please set Max uploads so each peer gets 6-8 kB/s')
865
            def s(self = self):
866
                self.downloader.set_super_seed()
867
                self.choker.set_super_seed()
868
            self.rawserver.add_task(s)
869
            if self.finflag.isSet():        # mode started when already finished
870
                def r(self = self):
871
                    self.rerequest.announce(3)  # so after kicking everyone off, reannounce
872
                self.rawserver.add_task(r)
873
 
874
    def am_I_finished(self):
875
        return self.finflag.isSet()
876
 
877
    def get_transfer_stats(self):
878
        return self.upmeasure.get_total(), self.downmeasure.get_total()