Subversion Repositories svnkaklik

Rev

Details | Last modification | View Log

Rev Author Line No. Line
36 kaklik 1
# Written by Bram Cohen
2
# see LICENSE.txt for license information
3
 
4
from BitTornado.parseargs import parseargs, formatDefinitions
5
from BitTornado.RawServer import RawServer, autodetect_ipv6, autodetect_socket_style
6
from BitTornado.HTTPHandler import HTTPHandler, months, weekdays
7
from BitTornado.parsedir import parsedir
8
from NatCheck import NatCheck
9
from T2T import T2TList
10
from BitTornado.subnetparse import IP_List, ipv6_to_ipv4, to_ipv4, is_valid_ip, is_ipv4
11
from BitTornado.iprangeparse import IP_List as IP_Range_List
12
from BitTornado.torrentlistparse import parsetorrentlist
13
from threading import Event, Thread
14
from BitTornado.bencode import bencode, bdecode, Bencached
15
from BitTornado.zurllib import urlopen, quote, unquote
16
from Filter import Filter
17
from urlparse import urlparse
18
from os import rename, getpid
19
from os.path import exists, isfile
20
from cStringIO import StringIO
21
from traceback import print_exc
22
from time import time, gmtime, strftime, localtime
23
from BitTornado.clock import clock
24
from random import shuffle, seed, randrange
25
from sha import sha
26
from types import StringType, IntType, LongType, ListType, DictType
27
from binascii import b2a_hex, a2b_hex, a2b_base64
28
from string import lower
29
import sys, os
30
import signal
31
import re
32
import BitTornado.__init__
33
from BitTornado.__init__ import version, createPeerID
34
try:
35
    True
36
except:
37
    True = 1
38
    False = 0
39
 
40
defaults = [
41
    ('port', 80, "Port to listen on."),
42
    ('dfile', None, 'file to store recent downloader info in'),
43
    ('bind', '', 'comma-separated list of ips/hostnames to bind to locally'),
44
#    ('ipv6_enabled', autodetect_ipv6(),
45
    ('ipv6_enabled', 0,
46
         'allow the client to connect to peers via IPv6'),
47
    ('ipv6_binds_v4', autodetect_socket_style(),
48
        'set if an IPv6 server socket will also field IPv4 connections'),
49
    ('socket_timeout', 15, 'timeout for closing connections'),
50
    ('save_dfile_interval', 5 * 60, 'seconds between saving dfile'),
51
    ('timeout_downloaders_interval', 45 * 60, 'seconds between expiring downloaders'),
52
    ('reannounce_interval', 30 * 60, 'seconds downloaders should wait between reannouncements'),
53
    ('response_size', 50, 'number of peers to send in an info message'),
54
    ('timeout_check_interval', 5,
55
        'time to wait between checking if any connections have timed out'),
56
    ('nat_check', 3,
57
        "how many times to check if a downloader is behind a NAT (0 = don't check)"),
58
    ('log_nat_checks', 0,
59
        "whether to add entries to the log for nat-check results"),
60
    ('min_time_between_log_flushes', 3.0,
61
        'minimum time it must have been since the last flush to do another one'),
62
    ('min_time_between_cache_refreshes', 600.0,
63
        'minimum time in seconds before a cache is considered stale and is flushed'),
64
    ('allowed_dir', '', 'only allow downloads for .torrents in this dir'),
65
    ('allowed_list', '', 'only allow downloads for hashes in this list (hex format, one per line)'),
66
    ('allowed_controls', 0, 'allow special keys in torrents in the allowed_dir to affect tracker access'),
67
    ('multitracker_enabled', 0, 'whether to enable multitracker operation'),
68
    ('multitracker_allowed', 'autodetect', 'whether to allow incoming tracker announces (can be none, autodetect or all)'),
69
    ('multitracker_reannounce_interval', 2 * 60, 'seconds between outgoing tracker announces'),
70
    ('multitracker_maxpeers', 20, 'number of peers to get in a tracker announce'),
71
    ('aggregate_forward', '', 'format: <url>[,<password>] - if set, forwards all non-multitracker to this url with this optional password'),
72
    ('aggregator', '0', 'whether to act as a data aggregator rather than a tracker.  If enabled, may be 1, or <password>; ' +
73
             'if password is set, then an incoming password is required for access'),
74
    ('hupmonitor', 0, 'whether to reopen the log file upon receipt of HUP signal'),
75
    ('http_timeout', 60, 
76
        'number of seconds to wait before assuming that an http connection has timed out'),
77
    ('parse_dir_interval', 60, 'seconds between reloading of allowed_dir or allowed_file ' +
78
             'and allowed_ips and banned_ips lists'),
79
    ('show_infopage', 1, "whether to display an info page when the tracker's root dir is loaded"),
80
    ('infopage_redirect', '', 'a URL to redirect the info page to'),
81
    ('show_names', 1, 'whether to display names from allowed dir'),
82
    ('favicon', '', 'file containing x-icon data to return when browser requests favicon.ico'),
83
    ('allowed_ips', '', 'only allow connections from IPs specified in the given file; '+
84
             'file contains subnet data in the format: aa.bb.cc.dd/len'),
85
    ('banned_ips', '', "don't allow connections from IPs specified in the given file; "+
86
             'file contains IP range data in the format: xxx:xxx:ip1-ip2'),
87
    ('only_local_override_ip', 2, "ignore the ip GET parameter from machines which aren't on local network IPs " +
88
             "(0 = never, 1 = always, 2 = ignore if NAT checking is not enabled)"),
89
    ('logfile', '', 'file to write the tracker logs, use - for stdout (default)'),
90
    ('allow_get', 0, 'use with allowed_dir; adds a /file?hash={hash} url that allows users to download the torrent file'),
91
    ('keep_dead', 0, 'keep dead torrents after they expire (so they still show up on your /scrape and web page)'),
92
    ('scrape_allowed', 'full', 'scrape access allowed (can be none, specific or full)'),
93
    ('dedicated_seed_id', '', 'allows tracker to monitor dedicated seed(s) and flag torrents as seeded'),
94
  ]
95
 
96
def statefiletemplate(x):
97
    if type(x) != DictType:
98
        raise ValueError
99
    for cname, cinfo in x.items():
100
        if cname == 'peers':
101
            for y in cinfo.values():      # The 'peers' key is a dictionary of SHA hashes (torrent ids)
102
                 if type(y) != DictType:   # ... for the active torrents, and each is a dictionary
103
                     raise ValueError
104
                 for id, info in y.items(): # ... of client ids interested in that torrent
105
                     if (len(id) != 20):
106
                         raise ValueError
107
                     if type(info) != DictType:  # ... each of which is also a dictionary
108
                         raise ValueError # ... which has an IP, a Port, and a Bytes Left count for that client for that torrent
109
                     if type(info.get('ip', '')) != StringType:
110
                         raise ValueError
111
                     port = info.get('port')
112
                     if type(port) not in (IntType,LongType) or port < 0:
113
                         raise ValueError
114
                     left = info.get('left')
115
                     if type(left) not in (IntType,LongType) or left < 0:
116
                         raise ValueError
117
        elif cname == 'completed':
118
            if (type(cinfo) != DictType): # The 'completed' key is a dictionary of SHA hashes (torrent ids)
119
                raise ValueError          # ... for keeping track of the total completions per torrent
120
            for y in cinfo.values():      # ... each torrent has an integer value
121
                if type(y) not in (IntType,LongType):
122
                    raise ValueError      # ... for the number of reported completions for that torrent
123
        elif cname == 'allowed':
124
            if (type(cinfo) != DictType): # a list of info_hashes and included data
125
                raise ValueError
126
            if x.has_key('allowed_dir_files'):
127
                adlist = [z[1] for z in x['allowed_dir_files'].values()]
128
                for y in cinfo.keys():        # and each should have a corresponding key here
129
                    if not y in adlist:
130
                        raise ValueError
131
        elif cname == 'allowed_dir_files':
132
            if (type(cinfo) != DictType): # a list of files, their attributes and info hashes
133
                raise ValueError
134
            dirkeys = {}
135
            for y in cinfo.values():      # each entry should have a corresponding info_hash
136
                if not y[1]:
137
                    continue
138
                if not x['allowed'].has_key(y[1]):
139
                    raise ValueError
140
                if dirkeys.has_key(y[1]): # and each should have a unique info_hash
141
                    raise ValueError
142
                dirkeys[y[1]] = 1
143
 
144
 
145
alas = 'your file may exist elsewhere in the universe\nbut alas, not here\n'
146
 
147
local_IPs = IP_List()
148
local_IPs.set_intranet_addresses()
149
 
150
 
151
def isotime(secs = None):
152
    if secs == None:
153
        secs = time()
154
    return strftime('%Y-%m-%d %H:%M UTC', gmtime(secs))
155
 
156
http_via_filter = re.compile(' for ([0-9.]+)\Z')
157
 
158
def _get_forwarded_ip(headers):
159
    header = headers.get('x-forwarded-for')
160
    if header:
161
        try:
162
            x,y = header.split(',')
163
        except:
164
            return header
165
        if is_valid_ip(x) and not local_IPs.includes(x):
166
            return x
167
        return y
168
    header = headers.get('client-ip')
169
    if header:
170
        return header
171
    header = headers.get('via')
172
    if header:
173
        x = http_via_filter.search(header)
174
        try:
175
            return x.group(1)
176
        except:
177
            pass
178
    header = headers.get('from')
179
    #if header:
180
    #    return header
181
    #return None
182
    return header
183
 
184
def get_forwarded_ip(headers):
185
    x = _get_forwarded_ip(headers)
186
    if not is_valid_ip(x) or local_IPs.includes(x):
187
        return None
188
    return x
189
 
190
def compact_peer_info(ip, port):
191
    try:
192
        s = ( ''.join([chr(int(i)) for i in ip.split('.')])
193
              + chr((port & 0xFF00) >> 8) + chr(port & 0xFF) )
194
        if len(s) != 6:
195
            raise ValueError
196
    except:
197
        s = ''  # not a valid IP, must be a domain name
198
    return s
199
 
200
class Tracker:
201
    def __init__(self, config, rawserver):
202
        self.config = config
203
        self.response_size = config['response_size']
204
        self.dfile = config['dfile']
205
        self.natcheck = config['nat_check']
206
        favicon = config['favicon']
207
        self.parse_dir_interval = config['parse_dir_interval']
208
        self.favicon = None
209
        if favicon:
210
            try:
211
                h = open(favicon,'r')
212
                self.favicon = h.read()
213
                h.close()
214
            except:
215
                print "**warning** specified favicon file -- %s -- does not exist." % favicon
216
        self.rawserver = rawserver
217
        self.cached = {}    # format: infohash: [[time1, l1, s1], [time2, l2, s2], [time3, l3, s3]]
218
        self.cached_t = {}  # format: infohash: [time, cache]
219
        self.times = {}
220
        self.state = {}
221
        self.seedcount = {}
222
 
223
        self.allowed_IPs = None
224
        self.banned_IPs = None
225
        if config['allowed_ips'] or config['banned_ips']:
226
            self.allowed_ip_mtime = 0
227
            self.banned_ip_mtime = 0
228
            self.read_ip_lists()
229
 
230
        self.only_local_override_ip = config['only_local_override_ip']
231
        if self.only_local_override_ip == 2:
232
            self.only_local_override_ip = not config['nat_check']
233
 
234
        if exists(self.dfile):
235
            try:
236
                h = open(self.dfile, 'rb')
237
                ds = h.read()
238
                h.close()
239
                tempstate = bdecode(ds)
240
                if not tempstate.has_key('peers'):
241
                    tempstate = {'peers': tempstate}
242
                statefiletemplate(tempstate)
243
                self.state = tempstate
244
            except:
245
                print '**warning** statefile '+self.dfile+' corrupt; resetting'
246
        self.downloads = self.state.setdefault('peers', {})
247
        self.completed = self.state.setdefault('completed', {})
248
 
249
        self.becache = {}   # format: infohash: [[l1, s1], [l2, s2], [l3, s3]]
250
        for infohash, ds in self.downloads.items():
251
            self.seedcount[infohash] = 0
252
            for x,y in ds.items():
253
                ip = y['ip']
254
                if ( (self.allowed_IPs and not self.allowed_IPs.includes(ip))
255
                     or (self.banned_IPs and self.banned_IPs.includes(ip)) ):
256
                    del ds[x]
257
                    continue
258
                if not y['left']:
259
                    self.seedcount[infohash] += 1
260
                if y.get('nat',-1):
261
                    continue
262
                gip = y.get('given_ip')
263
                if is_valid_ip(gip) and (
264
                    not self.only_local_override_ip or local_IPs.includes(ip) ):
265
                    ip = gip
266
                self.natcheckOK(infohash,x,ip,y['port'],y['left'])
267
 
268
        for x in self.downloads.keys():
269
            self.times[x] = {}
270
            for y in self.downloads[x].keys():
271
                self.times[x][y] = 0
272
 
273
        self.trackerid = createPeerID('-T-')
274
        seed(self.trackerid)
275
 
276
        self.reannounce_interval = config['reannounce_interval']
277
        self.save_dfile_interval = config['save_dfile_interval']
278
        self.show_names = config['show_names']
279
        rawserver.add_task(self.save_state, self.save_dfile_interval)
280
        self.prevtime = clock()
281
        self.timeout_downloaders_interval = config['timeout_downloaders_interval']
282
        rawserver.add_task(self.expire_downloaders, self.timeout_downloaders_interval)
283
        self.logfile = None
284
        self.log = None
285
        if (config['logfile']) and (config['logfile'] != '-'):
286
            try:
287
                self.logfile = config['logfile']
288
                self.log = open(self.logfile,'a')
289
                sys.stdout = self.log
290
                print "# Log Started: ", isotime()
291
            except:
292
                print "**warning** could not redirect stdout to log file: ", sys.exc_info()[0]
293
 
294
        if config['hupmonitor']:
295
            def huphandler(signum, frame, self = self):
296
                try:
297
                    self.log.close ()
298
                    self.log = open(self.logfile,'a')
299
                    sys.stdout = self.log
300
                    print "# Log reopened: ", isotime()
301
                except:
302
                    print "**warning** could not reopen logfile"
303
 
304
            signal.signal(signal.SIGHUP, huphandler)            
305
 
306
        self.allow_get = config['allow_get']
307
 
308
        self.t2tlist = T2TList(config['multitracker_enabled'], self.trackerid,
309
                               config['multitracker_reannounce_interval'],
310
                               config['multitracker_maxpeers'], config['http_timeout'],
311
                               self.rawserver)
312
 
313
        if config['allowed_list']:
314
            if config['allowed_dir']:
315
                print '**warning** allowed_dir and allowed_list options cannot be used together'
316
                print '**warning** disregarding allowed_dir'
317
                config['allowed_dir'] = ''
318
            self.allowed = self.state.setdefault('allowed_list',{})
319
            self.allowed_list_mtime = 0
320
            self.parse_allowed()
321
            self.remove_from_state('allowed','allowed_dir_files')
322
            if config['multitracker_allowed'] == 'autodetect':
323
                config['multitracker_allowed'] = 'none'
324
            config['allowed_controls'] = 0
325
 
326
        elif config['allowed_dir']:
327
            self.allowed = self.state.setdefault('allowed',{})
328
            self.allowed_dir_files = self.state.setdefault('allowed_dir_files',{})
329
            self.allowed_dir_blocked = {}
330
            self.parse_allowed()
331
            self.remove_from_state('allowed_list')
332
 
333
        else:
334
            self.allowed = None
335
            self.remove_from_state('allowed','allowed_dir_files', 'allowed_list')
336
            if config['multitracker_allowed'] == 'autodetect':
337
                config['multitracker_allowed'] = 'none'
338
            config['allowed_controls'] = 0
339
 
340
        self.uq_broken = unquote('+') != ' '
341
        self.keep_dead = config['keep_dead']
342
        self.Filter = Filter(rawserver.add_task)
343
 
344
        aggregator = config['aggregator']
345
        if aggregator == '0':
346
            self.is_aggregator = False
347
            self.aggregator_key = None
348
        else:
349
            self.is_aggregator = True
350
            if aggregator == '1':
351
                self.aggregator_key = None
352
            else:
353
                self.aggregator_key = aggregator
354
            self.natcheck = False
355
 
356
        send = config['aggregate_forward']
357
        if not send:
358
            self.aggregate_forward = None
359
        else:
360
            try:
361
                self.aggregate_forward, self.aggregate_password = send.split(',')
362
            except:
363
                self.aggregate_forward = send
364
                self.aggregate_password = None
365
 
366
        self.dedicated_seed_id = config['dedicated_seed_id']
367
        self.is_seeded = {}
368
 
369
        self.cachetime = 0
370
        self.cachetimeupdate()
371
 
372
    def cachetimeupdate(self):
373
        self.cachetime += 1     # raw clock, but more efficient for cache
374
        self.rawserver.add_task(self.cachetimeupdate,1)
375
 
376
    def aggregate_senddata(self, query):
377
        url = self.aggregate_forward+'?'+query
378
        if self.aggregate_password is not None:
379
            url += '&password='+self.aggregate_password
380
        rq = Thread(target = self._aggregate_senddata, args = [url])
381
        rq.setDaemon(False)
382
        rq.start()
383
 
384
    def _aggregate_senddata(self, url):     # just send, don't attempt to error check,
385
        try:                                # discard any returned data
386
            h = urlopen(url)
387
            h.read()
388
            h.close()
389
        except:
390
            return
391
 
392
 
393
    def get_infopage(self):
394
        try:
395
            if not self.config['show_infopage']:
396
                return (404, 'Not Found', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'}, alas)
397
            red = self.config['infopage_redirect']
398
            if red:
399
                return (302, 'Found', {'Content-Type': 'text/html', 'Location': red},
400
                        '<A HREF="'+red+'">Click Here</A>')
401
 
402
            s = StringIO()
403
            s.write('<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">\n' \
404
                '<html><head><title>BitTorrent download info</title>\n')
405
            if self.favicon is not None:
406
                s.write('<link rel="shortcut icon" href="/favicon.ico">\n')
407
            s.write('</head>\n<body>\n' \
408
                '<h3>BitTorrent download info</h3>\n'\
409
                '<ul>\n'
410
                '<li><strong>tracker version:</strong> %s</li>\n' \
411
                '<li><strong>server time:</strong> %s</li>\n' \
412
                '</ul>\n' % (version, isotime()))
413
            if self.config['allowed_dir']:
414
                if self.show_names:
415
                    names = [ (self.allowed[hash]['name'],hash)
416
                              for hash in self.allowed.keys() ]
417
                else:
418
                    names = [ (None,hash)
419
                              for hash in self.allowed.keys() ]
420
            else:
421
                names = [ (None,hash) for hash in self.downloads.keys() ]
422
            if not names:
423
                s.write('<p>not tracking any files yet...</p>\n')
424
            else:
425
                names.sort()
426
                tn = 0
427
                tc = 0
428
                td = 0
429
                tt = 0  # Total transferred
430
                ts = 0  # Total size
431
                nf = 0  # Number of files displayed
432
                if self.config['allowed_dir'] and self.show_names:
433
                    s.write('<table summary="files" border="1">\n' \
434
                        '<tr><th>info hash</th><th>torrent name</th><th align="right">size</th><th align="right">complete</th><th align="right">downloading</th><th align="right">downloaded</th><th align="right">transferred</th></tr>\n')
435
                else:
436
                    s.write('<table summary="files">\n' \
437
                        '<tr><th>info hash</th><th align="right">complete</th><th align="right">downloading</th><th align="right">downloaded</th></tr>\n')
438
                for name,hash in names:
439
                    l = self.downloads[hash]
440
                    n = self.completed.get(hash, 0)
441
                    tn = tn + n
442
                    c = self.seedcount[hash]
443
                    tc = tc + c
444
                    d = len(l) - c
445
                    td = td + d
446
                    if self.config['allowed_dir'] and self.show_names:
447
                        if self.allowed.has_key(hash):
448
                            nf = nf + 1
449
                            sz = self.allowed[hash]['length']  # size
450
                            ts = ts + sz
451
                            szt = sz * n   # Transferred for this torrent
452
                            tt = tt + szt
453
                            if self.allow_get == 1:
454
                                linkname = '<a href="/file?info_hash=' + quote(hash) + '">' + name + '</a>'
455
                            else:
456
                                linkname = name
457
                            s.write('<tr><td><code>%s</code></td><td>%s</td><td align="right">%s</td><td align="right">%i</td><td align="right">%i</td><td align="right">%i</td><td align="right">%s</td></tr>\n' \
458
                                % (b2a_hex(hash), linkname, size_format(sz), c, d, n, size_format(szt)))
459
                    else:
460
                        s.write('<tr><td><code>%s</code></td><td align="right"><code>%i</code></td><td align="right"><code>%i</code></td><td align="right"><code>%i</code></td></tr>\n' \
461
                            % (b2a_hex(hash), c, d, n))
462
                ttn = 0
463
                for i in self.completed.values():
464
                    ttn = ttn + i
465
                if self.config['allowed_dir'] and self.show_names:
466
                    s.write('<tr><td align="right" colspan="2">%i files</td><td align="right">%s</td><td align="right">%i</td><td align="right">%i</td><td align="right">%i/%i</td><td align="right">%s</td></tr>\n'
467
                            % (nf, size_format(ts), tc, td, tn, ttn, size_format(tt)))
468
                else:
469
                    s.write('<tr><td align="right">%i files</td><td align="right">%i</td><td align="right">%i</td><td align="right">%i/%i</td></tr>\n'
470
                            % (nf, tc, td, tn, ttn))
471
                s.write('</table>\n' \
472
                    '<ul>\n' \
473
                    '<li><em>info hash:</em> SHA1 hash of the "info" section of the metainfo (*.torrent)</li>\n' \
474
                    '<li><em>complete:</em> number of connected clients with the complete file</li>\n' \
475
                    '<li><em>downloading:</em> number of connected clients still downloading</li>\n' \
476
                    '<li><em>downloaded:</em> reported complete downloads (total: current/all)</li>\n' \
477
                    '<li><em>transferred:</em> torrent size * total downloaded (does not include partial transfers)</li>\n' \
478
                    '</ul>\n')
479
 
480
            s.write('</body>\n' \
481
                '</html>\n')
482
            return (200, 'OK', {'Content-Type': 'text/html; charset=iso-8859-1'}, s.getvalue())
483
        except:
484
            print_exc()
485
            return (500, 'Internal Server Error', {'Content-Type': 'text/html; charset=iso-8859-1'}, 'Server Error')
486
 
487
 
488
    def scrapedata(self, hash, return_name = True):
489
        l = self.downloads[hash]
490
        n = self.completed.get(hash, 0)
491
        c = self.seedcount[hash]
492
        d = len(l) - c
493
        f = {'complete': c, 'incomplete': d, 'downloaded': n}
494
        if return_name and self.show_names and self.config['allowed_dir']:
495
            f['name'] = self.allowed[hash]['name']
496
        return (f)
497
 
498
    def get_scrape(self, paramslist):
499
        fs = {}
500
        if paramslist.has_key('info_hash'):
501
            if self.config['scrape_allowed'] not in ['specific', 'full']:
502
                return (400, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
503
                    bencode({'failure reason':
504
                    'specific scrape function is not available with this tracker.'}))
505
            for hash in paramslist['info_hash']:
506
                if self.allowed is not None:
507
                    if self.allowed.has_key(hash):
508
                        fs[hash] = self.scrapedata(hash)
509
                else:
510
                    if self.downloads.has_key(hash):
511
                        fs[hash] = self.scrapedata(hash)
512
        else:
513
            if self.config['scrape_allowed'] != 'full':
514
                return (400, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
515
                    bencode({'failure reason':
516
                    'full scrape function is not available with this tracker.'}))
517
            if self.allowed is not None:
518
                keys = self.allowed.keys()
519
            else:
520
                keys = self.downloads.keys()
521
            for hash in keys:
522
                fs[hash] = self.scrapedata(hash)
523
 
524
        return (200, 'OK', {'Content-Type': 'text/plain'}, bencode({'files': fs}))
525
 
526
 
527
    def get_file(self, hash):
528
         if not self.allow_get:
529
             return (400, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
530
                 'get function is not available with this tracker.')
531
         if not self.allowed.has_key(hash):
532
             return (404, 'Not Found', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'}, alas)
533
         fname = self.allowed[hash]['file']
534
         fpath = self.allowed[hash]['path']
535
         return (200, 'OK', {'Content-Type': 'application/x-bittorrent',
536
             'Content-Disposition': 'attachment; filename=' + fname},
537
             open(fpath, 'rb').read())
538
 
539
 
540
    def check_allowed(self, infohash, paramslist):
541
        if ( self.aggregator_key is not None
542
                and not ( paramslist.has_key('password')
543
                        and paramslist['password'][0] == self.aggregator_key ) ):
544
            return (200, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
545
                bencode({'failure reason':
546
                'Requested download is not authorized for use with this tracker.'}))
547
 
548
        if self.allowed is not None:
549
            if not self.allowed.has_key(infohash):
550
                return (200, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
551
                    bencode({'failure reason':
552
                    'Requested download is not authorized for use with this tracker.'}))
553
            if self.config['allowed_controls']:
554
                if self.allowed[infohash].has_key('failure reason'):
555
                    return (200, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
556
                        bencode({'failure reason': self.allowed[infohash]['failure reason']}))
557
 
558
        if paramslist.has_key('tracker'):
559
            if ( self.config['multitracker_allowed'] == 'none' or       # turned off
560
                          paramslist['peer_id'][0] == self.trackerid ): # oops! contacted myself
561
                return (200, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
562
                    bencode({'failure reason': 'disallowed'}))
563
 
564
            if ( self.config['multitracker_allowed'] == 'autodetect'
565
                        and not self.allowed[infohash].has_key('announce-list') ):
566
                return (200, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
567
                    bencode({'failure reason':
568
                    'Requested download is not authorized for multitracker use.'}))
569
 
570
        return None
571
 
572
 
573
    def add_data(self, infohash, event, ip, paramslist):
574
        peers = self.downloads.setdefault(infohash, {})
575
        ts = self.times.setdefault(infohash, {})
576
        self.completed.setdefault(infohash, 0)
577
        self.seedcount.setdefault(infohash, 0)
578
 
579
        def params(key, default = None, l = paramslist):
580
            if l.has_key(key):
581
                return l[key][0]
582
            return default
583
 
584
        myid = params('peer_id','')
585
        if len(myid) != 20:
586
            raise ValueError, 'id not of length 20'
587
        if event not in ['started', 'completed', 'stopped', 'snooped', None]:
588
            raise ValueError, 'invalid event'
589
        port = long(params('port',''))
590
        if port < 0 or port > 65535:
591
            raise ValueError, 'invalid port'
592
        left = long(params('left',''))
593
        if left < 0:
594
            raise ValueError, 'invalid amount left'
595
        uploaded = long(params('uploaded',''))
596
        downloaded = long(params('downloaded',''))
597
 
598
        peer = peers.get(myid)
599
        islocal = local_IPs.includes(ip)
600
        mykey = params('key')
601
        if peer:
602
            auth = peer.get('key',-1) == mykey or peer.get('ip') == ip
603
 
604
        gip = params('ip')
605
        if is_valid_ip(gip) and (islocal or not self.only_local_override_ip):
606
            ip1 = gip
607
        else:
608
            ip1 = ip
609
 
610
        if params('numwant') is not None:
611
            rsize = min(int(params('numwant')),self.response_size)
612
        else:
613
            rsize = self.response_size
614
 
615
        if event == 'stopped':
616
            if peer:
617
                if auth:
618
                    self.delete_peer(infohash,myid)
619
 
620
        elif not peer:
621
            ts[myid] = clock()
622
            peer = {'ip': ip, 'port': port, 'left': left}
623
            if mykey:
624
                peer['key'] = mykey
625
            if gip:
626
                peer['given ip'] = gip
627
            if port:
628
                if not self.natcheck or islocal:
629
                    peer['nat'] = 0
630
                    self.natcheckOK(infohash,myid,ip1,port,left)
631
                else:
632
                    NatCheck(self.connectback_result,infohash,myid,ip1,port,self.rawserver)
633
            else:
634
                peer['nat'] = 2**30
635
            if event == 'completed':
636
                self.completed[infohash] += 1
637
            if not left:
638
                self.seedcount[infohash] += 1
639
 
640
            peers[myid] = peer
641
 
642
        else:
643
            if not auth:
644
                return rsize    # return w/o changing stats
645
 
646
            ts[myid] = clock()
647
            if not left and peer['left']:
648
                self.completed[infohash] += 1
649
                self.seedcount[infohash] += 1
650
                if not peer.get('nat', -1):
651
                    for bc in self.becache[infohash]:
652
                        bc[1][myid] = bc[0][myid]
653
                        del bc[0][myid]
654
            elif left and not peer['left']:
655
                self.completed[infohash] -= 1
656
                self.seedcount[infohash] -= 1
657
                if not peer.get('nat', -1):
658
                    for bc in self.becache[infohash]:
659
                        bc[0][myid] = bc[1][myid]
660
                        del bc[1][myid]
661
            peer['left'] = left
662
 
663
            if port:
664
                recheck = False
665
                if ip != peer['ip']:
666
                    peer['ip'] = ip
667
                    recheck = True
668
                if gip != peer.get('given ip'):
669
                    if gip:
670
                        peer['given ip'] = gip
671
                    elif peer.has_key('given ip'):
672
                        del peer['given ip']
673
                    recheck = True
674
 
675
                natted = peer.get('nat', -1)
676
                if recheck:
677
                    if natted == 0:
678
                        l = self.becache[infohash]
679
                        y = not peer['left']
680
                        for x in l:
681
                            del x[y][myid]
682
                    if natted >= 0:
683
                        del peer['nat'] # restart NAT testing
684
                if natted and natted < self.natcheck:
685
                    recheck = True
686
 
687
                if recheck:
688
                    if not self.natcheck or islocal:
689
                        peer['nat'] = 0
690
                        self.natcheckOK(infohash,myid,ip1,port,left)
691
                    else:
692
                        NatCheck(self.connectback_result,infohash,myid,ip1,port,self.rawserver)
693
 
694
        return rsize
695
 
696
 
697
    def peerlist(self, infohash, stopped, tracker, is_seed, return_type, rsize):
698
        data = {}    # return data
699
        seeds = self.seedcount[infohash]
700
        data['complete'] = seeds
701
        data['incomplete'] = len(self.downloads[infohash]) - seeds
702
 
703
        if ( self.config['allowed_controls']
704
                and self.allowed[infohash].has_key('warning message') ):
705
            data['warning message'] = self.allowed[infohash]['warning message']
706
 
707
        if tracker:
708
            data['interval'] = self.config['multitracker_reannounce_interval']
709
            if not rsize:
710
                return data
711
            cache = self.cached_t.setdefault(infohash, None)
712
            if ( not cache or len(cache[1]) < rsize
713
                 or cache[0] + self.config['min_time_between_cache_refreshes'] < clock() ):
714
                bc = self.becache.setdefault(infohash,[[{}, {}], [{}, {}], [{}, {}]])
715
                cache = [ clock(), bc[0][0].values() + bc[0][1].values() ]
716
                self.cached_t[infohash] = cache
717
                shuffle(cache[1])
718
                cache = cache[1]
719
 
720
            data['peers'] = cache[-rsize:]
721
            del cache[-rsize:]
722
            return data
723
 
724
        data['interval'] = self.reannounce_interval
725
        if stopped or not rsize:     # save some bandwidth
726
            data['peers'] = []
727
            return data
728
 
729
        bc = self.becache.setdefault(infohash,[[{}, {}], [{}, {}], [{}, {}]])
730
        len_l = len(bc[0][0])
731
        len_s = len(bc[0][1])
732
        if not (len_l+len_s):   # caches are empty!
733
            data['peers'] = []
734
            return data
735
        l_get_size = int(float(rsize)*(len_l)/(len_l+len_s))
736
        cache = self.cached.setdefault(infohash,[None,None,None])[return_type]
737
        if cache and ( not cache[1]
738
                       or (is_seed and len(cache[1]) < rsize)
739
                       or len(cache[1]) < l_get_size
740
                       or cache[0]+self.config['min_time_between_cache_refreshes'] < self.cachetime ):
741
            cache = None
742
        if not cache:
743
            peers = self.downloads[infohash]
744
            vv = [[],[],[]]
745
            for key, ip, port in self.t2tlist.harvest(infohash):   # empty if disabled
746
                if not peers.has_key(key):
747
                    vv[0].append({'ip': ip, 'port': port, 'peer id': key})
748
                    vv[1].append({'ip': ip, 'port': port})
749
                    vv[2].append(compact_peer_info(ip, port))
750
            cache = [ self.cachetime,
751
                      bc[return_type][0].values()+vv[return_type],
752
                      bc[return_type][1].values() ]
753
            shuffle(cache[1])
754
            shuffle(cache[2])
755
            self.cached[infohash][return_type] = cache
756
            for rr in xrange(len(self.cached[infohash])):
757
                if rr != return_type:
758
                    try:
759
                        self.cached[infohash][rr][1].extend(vv[rr])
760
                    except:
761
                        pass
762
        if len(cache[1]) < l_get_size:
763
            peerdata = cache[1]
764
            if not is_seed:
765
                peerdata.extend(cache[2])
766
            cache[1] = []
767
            cache[2] = []
768
        else:
769
            if not is_seed:
770
                peerdata = cache[2][l_get_size-rsize:]
771
                del cache[2][l_get_size-rsize:]
772
                rsize -= len(peerdata)
773
            else:
774
                peerdata = []
775
            if rsize:
776
                peerdata.extend(cache[1][-rsize:])
777
                del cache[1][-rsize:]
778
        if return_type == 2:
779
            peerdata = ''.join(peerdata)
780
        data['peers'] = peerdata
781
        return data
782
 
783
 
784
    def get(self, connection, path, headers):
785
        real_ip = connection.get_ip()
786
        ip = real_ip
787
        if is_ipv4(ip):
788
            ipv4 = True
789
        else:
790
            try:
791
                ip = ipv6_to_ipv4(ip)
792
                ipv4 = True
793
            except ValueError:
794
                ipv4 = False
795
 
796
        if ( (self.allowed_IPs and not self.allowed_IPs.includes(ip))
797
             or (self.banned_IPs and self.banned_IPs.includes(ip)) ):
798
            return (400, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
799
                bencode({'failure reason':
800
                'your IP is not allowed on this tracker'}))
801
 
802
        nip = get_forwarded_ip(headers)
803
        if nip and not self.only_local_override_ip:
804
            ip = nip
805
            try:
806
                ip = to_ipv4(ip)
807
                ipv4 = True
808
            except ValueError:
809
                ipv4 = False
810
 
811
        paramslist = {}
812
        def params(key, default = None, l = paramslist):
813
            if l.has_key(key):
814
                return l[key][0]
815
            return default
816
 
817
        try:
818
            (scheme, netloc, path, pars, query, fragment) = urlparse(path)
819
            if self.uq_broken == 1:
820
                path = path.replace('+',' ')
821
                query = query.replace('+',' ')
822
            path = unquote(path)[1:]
823
            for s in query.split('&'):
824
                if s:
825
                    i = s.index('=')
826
                    kw = unquote(s[:i])
827
                    paramslist.setdefault(kw, [])
828
                    paramslist[kw] += [unquote(s[i+1:])]
829
 
830
            if path == '' or path == 'index.html':
831
                return self.get_infopage()
832
            if (path == 'file'):
833
                return self.get_file(params('info_hash'))
834
            if path == 'favicon.ico' and self.favicon is not None:
835
                return (200, 'OK', {'Content-Type' : 'image/x-icon'}, self.favicon)
836
 
837
            # automated access from here on
838
 
839
            if path in ('scrape', 'scrape.php', 'tracker.php/scrape'):
840
                return self.get_scrape(paramslist)
841
 
842
            if not path in ('announce', 'announce.php', 'tracker.php/announce'):
843
                return (404, 'Not Found', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'}, alas)
844
 
845
            # main tracker function
846
 
847
            filtered = self.Filter.check(real_ip, paramslist, headers)
848
            if filtered:
849
                return (400, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
850
                    bencode({'failure reason': filtered}))
851
 
852
            infohash = params('info_hash')
853
            if not infohash:
854
                raise ValueError, 'no info hash'
855
 
856
            notallowed = self.check_allowed(infohash, paramslist)
857
            if notallowed:
858
                return notallowed
859
 
860
            event = params('event')
861
 
862
            rsize = self.add_data(infohash, event, ip, paramslist)
863
 
864
        except ValueError, e:
865
            return (400, 'Bad Request', {'Content-Type': 'text/plain'}, 
866
                'you sent me garbage - ' + str(e))
867
 
868
        if self.aggregate_forward and not paramslist.has_key('tracker'):
869
            self.aggregate_senddata(query)
870
 
871
        if self.is_aggregator:      # don't return peer data here
872
            return (200, 'OK', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
873
                    bencode({'response': 'OK'}))
874
 
875
        if params('compact') and ipv4:
876
            return_type = 2
877
        elif params('no_peer_id'):
878
            return_type = 1
879
        else:
880
            return_type = 0
881
 
882
        data = self.peerlist(infohash, event=='stopped',
883
                             params('tracker'), not params('left'),
884
                             return_type, rsize)
885
 
886
        if paramslist.has_key('scrape'):    # deprecated
887
            data['scrape'] = self.scrapedata(infohash, False)
888
 
889
        if self.dedicated_seed_id:
890
            if params('seed_id') == self.dedicated_seed_id and params('left') == 0:
891
                self.is_seeded[infohash] = True
892
            if params('check_seeded') and self.is_seeded.get(infohash):
893
                data['seeded'] = 1
894
 
895
        return (200, 'OK', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'}, bencode(data))
896
 
897
 
898
    def natcheckOK(self, infohash, peerid, ip, port, not_seed):
899
        bc = self.becache.setdefault(infohash,[[{}, {}], [{}, {}], [{}, {}]])
900
        bc[0][not not_seed][peerid] = Bencached(bencode({'ip': ip, 'port': port,
901
                                              'peer id': peerid}))
902
        bc[1][not not_seed][peerid] = Bencached(bencode({'ip': ip, 'port': port}))
903
        bc[2][not not_seed][peerid] = compact_peer_info(ip, port)
904
 
905
 
906
    def natchecklog(self, peerid, ip, port, result):
907
        year, month, day, hour, minute, second, a, b, c = localtime(time())
908
        print '%s - %s [%02d/%3s/%04d:%02d:%02d:%02d] "!natcheck-%s:%i" %i 0 - -' % (
909
            ip, quote(peerid), day, months[month], year, hour, minute, second,
910
            ip, port, result)
911
 
912
    def connectback_result(self, result, downloadid, peerid, ip, port):
913
        record = self.downloads.get(downloadid, {}).get(peerid)
914
        if ( record is None 
915
                 or (record['ip'] != ip and record.get('given ip') != ip)
916
                 or record['port'] != port ):
917
            if self.config['log_nat_checks']:
918
                self.natchecklog(peerid, ip, port, 404)
919
            return
920
        if self.config['log_nat_checks']:
921
            if result:
922
                x = 200
923
            else:
924
                x = 503
925
            self.natchecklog(peerid, ip, port, x)
926
        if not record.has_key('nat'):
927
            record['nat'] = int(not result)
928
            if result:
929
                self.natcheckOK(downloadid,peerid,ip,port,record['left'])
930
        elif result and record['nat']:
931
            record['nat'] = 0
932
            self.natcheckOK(downloadid,peerid,ip,port,record['left'])
933
        elif not result:
934
            record['nat'] += 1
935
 
936
 
937
    def remove_from_state(self, *l):
938
        for s in l:
939
            try:
940
                del self.state[s]
941
            except:
942
                pass
943
 
944
    def save_state(self):
945
        self.rawserver.add_task(self.save_state, self.save_dfile_interval)
946
        h = open(self.dfile, 'wb')
947
        h.write(bencode(self.state))
948
        h.close()
949
 
950
 
951
    def parse_allowed(self):
952
        self.rawserver.add_task(self.parse_allowed, self.parse_dir_interval)
953
 
954
        if self.config['allowed_dir']:
955
            r = parsedir( self.config['allowed_dir'], self.allowed,
956
                          self.allowed_dir_files, self.allowed_dir_blocked,
957
                          [".torrent"] )
958
            ( self.allowed, self.allowed_dir_files, self.allowed_dir_blocked,
959
                added, garbage2 ) = r
960
 
961
            self.state['allowed'] = self.allowed
962
            self.state['allowed_dir_files'] = self.allowed_dir_files
963
 
964
            self.t2tlist.parse(self.allowed)
965
 
966
        else:
967
            f = self.config['allowed_list']
968
            if self.allowed_list_mtime == os.path.getmtime(f):
969
                return
970
            try:
971
                r = parsetorrentlist(f, self.allowed)
972
                (self.allowed, added, garbage2) = r
973
                self.state['allowed_list'] = self.allowed
974
            except (IOError, OSError):
975
                print '**warning** unable to read allowed torrent list'
976
                return
977
            self.allowed_list_mtime = os.path.getmtime(f)
978
 
979
        for infohash in added.keys():
980
            self.downloads.setdefault(infohash, {})
981
            self.completed.setdefault(infohash, 0)
982
            self.seedcount.setdefault(infohash, 0)
983
 
984
 
985
    def read_ip_lists(self):
986
        self.rawserver.add_task(self.read_ip_lists,self.parse_dir_interval)
987
 
988
        f = self.config['allowed_ips']
989
        if f and self.allowed_ip_mtime != os.path.getmtime(f):
990
            self.allowed_IPs = IP_List()
991
            try:
992
                self.allowed_IPs.read_fieldlist(f)
993
                self.allowed_ip_mtime = os.path.getmtime(f)
994
            except (IOError, OSError):
995
                print '**warning** unable to read allowed_IP list'
996
 
997
        f = self.config['banned_ips']
998
        if f and self.banned_ip_mtime != os.path.getmtime(f):
999
            self.banned_IPs = IP_Range_List()
1000
            try:
1001
                self.banned_IPs.read_rangelist(f)
1002
                self.banned_ip_mtime = os.path.getmtime(f)
1003
            except (IOError, OSError):
1004
                print '**warning** unable to read banned_IP list'
1005
 
1006
 
1007
    def delete_peer(self, infohash, peerid):
1008
        dls = self.downloads[infohash]
1009
        peer = dls[peerid]
1010
        if not peer['left']:
1011
            self.seedcount[infohash] -= 1
1012
        if not peer.get('nat',-1):
1013
            l = self.becache[infohash]
1014
            y = not peer['left']
1015
            for x in l:
1016
                del x[y][peerid]
1017
        del self.times[infohash][peerid]
1018
        del dls[peerid]
1019
 
1020
    def expire_downloaders(self):
1021
        for x in self.times.keys():
1022
            for myid, t in self.times[x].items():
1023
                if t < self.prevtime:
1024
                    self.delete_peer(x,myid)
1025
        self.prevtime = clock()
1026
        if (self.keep_dead != 1):
1027
            for key, value in self.downloads.items():
1028
                if len(value) == 0 and (
1029
                        self.allowed is None or not self.allowed.has_key(key) ):
1030
                    del self.times[key]
1031
                    del self.downloads[key]
1032
                    del self.seedcount[key]
1033
        self.rawserver.add_task(self.expire_downloaders, self.timeout_downloaders_interval)
1034
 
1035
 
1036
def track(args):
1037
    if len(args) == 0:
1038
        print formatDefinitions(defaults, 80)
1039
        return
1040
    try:
1041
        config, files = parseargs(args, defaults, 0, 0)
1042
    except ValueError, e:
1043
        print 'error: ' + str(e)
1044
        print 'run with no arguments for parameter explanations'
1045
        return
1046
    r = RawServer(Event(), config['timeout_check_interval'],
1047
                  config['socket_timeout'], ipv6_enable = config['ipv6_enabled'])
1048
    t = Tracker(config, r)
1049
    r.bind(config['port'], config['bind'],
1050
           reuse = True, ipv6_socket_style = config['ipv6_binds_v4'])
1051
    r.listen_forever(HTTPHandler(t.get, config['min_time_between_log_flushes']))
1052
    t.save_state()
1053
    print '# Shutting down: ' + isotime()
1054
 
1055
def size_format(s):
1056
    if (s < 1024):
1057
        r = str(s) + 'B'
1058
    elif (s < 1048576):
1059
        r = str(int(s/1024)) + 'KiB'
1060
    elif (s < 1073741824L):
1061
        r = str(int(s/1048576)) + 'MiB'
1062
    elif (s < 1099511627776L):
1063
        r = str(int((s/1073741824.0)*100.0)/100.0) + 'GiB'
1064
    else:
1065
        r = str(int((s/1099511627776.0)*100.0)/100.0) + 'TiB'
1066
    return(r)
1067