1
# Written by Bram Cohen
2
# see LICENSE.txt for license information
4
from BitTornado.parseargs import parseargs, formatDefinitions
5
from BitTornado.RawServer import RawServer, autodetect_ipv6, autodetect_socket_style
6
from BitTornado.HTTPHandler import HTTPHandler, months, weekdays
7
from BitTornado.parsedir import parsedir
8
from NatCheck import NatCheck, CHECK_PEER_ID_ENCRYPTED
9
from BitTornado.BTcrypto import CRYPTO_OK
10
from T2T import T2TList
11
from BitTornado.subnetparse import IP_List, ipv6_to_ipv4, to_ipv4, is_valid_ip, is_ipv4
12
from BitTornado.iprangeparse import IP_List as IP_Range_List
13
from BitTornado.torrentlistparse import parsetorrentlist
14
from threading import Event, Thread
15
from BitTornado.bencode import bencode, bdecode, Bencached
16
from BitTornado.zurllib import urlopen, quote, unquote
17
from Filter import Filter
18
from urlparse import urlparse
19
from os import rename, getpid
20
from os.path import exists, isfile
21
from cStringIO import StringIO
22
from traceback import print_exc
23
from time import time, gmtime, strftime, localtime
24
from BitTornado.clock import clock
25
from random import shuffle, seed, randrange
27
from types import StringType, IntType, LongType, ListType, DictType
28
from binascii import b2a_hex, a2b_hex, a2b_base64
29
from string import lower
33
import BitTornado.__init__
34
from BitTornado.__init__ import version, createPeerID
40
bool = lambda x: not not x
43
('port', 80, "Port to listen on."),
44
('dfile', None, 'file to store recent downloader info in'),
45
('bind', '', 'comma-separated list of ips/hostnames to bind to locally'),
46
# ('ipv6_enabled', autodetect_ipv6(),
48
'allow the client to connect to peers via IPv6'),
49
('ipv6_binds_v4', autodetect_socket_style(),
50
'set if an IPv6 server socket will also field IPv4 connections'),
51
('socket_timeout', 15, 'timeout for closing connections'),
52
('save_dfile_interval', 5 * 60, 'seconds between saving dfile'),
53
('timeout_downloaders_interval', 45 * 60, 'seconds between expiring downloaders'),
54
('reannounce_interval', 30 * 60, 'seconds downloaders should wait between reannouncements'),
55
('response_size', 50, 'number of peers to send in an info message'),
56
('timeout_check_interval', 5,
57
'time to wait between checking if any connections have timed out'),
59
"how many times to check if a downloader is behind a NAT (0 = don't check)"),
61
"whether to add entries to the log for nat-check results"),
62
('min_time_between_log_flushes', 3.0,
63
'minimum time it must have been since the last flush to do another one'),
64
('min_time_between_cache_refreshes', 600.0,
65
'minimum time in seconds before a cache is considered stale and is flushed'),
66
('allowed_dir', '', 'only allow downloads for .torrents in this dir'),
67
('allowed_list', '', 'only allow downloads for hashes in this list (hex format, one per line)'),
68
('allowed_controls', 0, 'allow special keys in torrents in the allowed_dir to affect tracker access'),
69
('multitracker_enabled', 0, 'whether to enable multitracker operation'),
70
('multitracker_allowed', 'autodetect', 'whether to allow incoming tracker announces (can be none, autodetect or all)'),
71
('multitracker_reannounce_interval', 2 * 60, 'seconds between outgoing tracker announces'),
72
('multitracker_maxpeers', 20, 'number of peers to get in a tracker announce'),
73
('aggregate_forward', '', 'format: <url>[,<password>] - if set, forwards all non-multitracker to this url with this optional password'),
74
('aggregator', '0', 'whether to act as a data aggregator rather than a tracker. If enabled, may be 1, or <password>; ' +
75
'if password is set, then an incoming password is required for access'),
76
('hupmonitor', 0, 'whether to reopen the log file upon receipt of HUP signal'),
78
'number of seconds to wait before assuming that an http connection has timed out'),
79
('parse_dir_interval', 60, 'seconds between reloading of allowed_dir or allowed_file ' +
80
'and allowed_ips and banned_ips lists'),
81
('show_infopage', 1, "whether to display an info page when the tracker's root dir is loaded"),
82
('infopage_redirect', '', 'a URL to redirect the info page to'),
83
('show_names', 1, 'whether to display names from allowed dir'),
84
('favicon', '', 'file containing x-icon data to return when browser requests favicon.ico'),
85
('allowed_ips', '', 'only allow connections from IPs specified in the given file; '+
86
'file contains subnet data in the format: aa.bb.cc.dd/len'),
87
('banned_ips', '', "don't allow connections from IPs specified in the given file; "+
88
'file contains IP range data in the format: xxx:xxx:ip1-ip2'),
89
('only_local_override_ip', 2, "ignore the ip GET parameter from machines which aren't on local network IPs " +
90
"(0 = never, 1 = always, 2 = ignore if NAT checking is not enabled)"),
91
('logfile', '', 'file to write the tracker logs, use - for stdout (default)'),
92
('allow_get', 0, 'use with allowed_dir; adds a /file?hash={hash} url that allows users to download the torrent file'),
93
('keep_dead', 0, 'keep dead torrents after they expire (so they still show up on your /scrape and web page)'),
94
('scrape_allowed', 'full', 'scrape access allowed (can be none, specific or full)'),
95
('dedicated_seed_id', '', 'allows tracker to monitor dedicated seed(s) and flag torrents as seeded'),
96
('compact_reqd', 1, "only allow peers that accept a compact response"),
99
def statefiletemplate(x):
100
if type(x) != DictType:
102
for cname, cinfo in x.items():
104
for y in cinfo.values(): # The 'peers' key is a dictionary of SHA hashes (torrent ids)
105
if type(y) != DictType: # ... for the active torrents, and each is a dictionary
107
for id, info in y.items(): # ... of client ids interested in that torrent
110
if type(info) != DictType: # ... each of which is also a dictionary
111
raise ValueError # ... which has an IP, a Port, and a Bytes Left count for that client for that torrent
112
if type(info.get('ip', '')) != StringType:
114
port = info.get('port')
115
if type(port) not in (IntType,LongType) or port < 0:
117
left = info.get('left')
118
if type(left) not in (IntType,LongType) or left < 0:
120
if type(info.get('supportcrypto')) not in (IntType,LongType):
122
if type(info.get('requirecrypto')) not in (IntType,LongType):
124
elif cname == 'completed':
125
if (type(cinfo) != DictType): # The 'completed' key is a dictionary of SHA hashes (torrent ids)
126
raise ValueError # ... for keeping track of the total completions per torrent
127
for y in cinfo.values(): # ... each torrent has an integer value
128
if type(y) not in (IntType,LongType):
129
raise ValueError # ... for the number of reported completions for that torrent
130
elif cname == 'allowed':
131
if (type(cinfo) != DictType): # a list of info_hashes and included data
133
if x.has_key('allowed_dir_files'):
134
adlist = [z[1] for z in x['allowed_dir_files'].values()]
135
for y in cinfo.keys(): # and each should have a corresponding key here
138
elif cname == 'allowed_dir_files':
139
if (type(cinfo) != DictType): # a list of files, their attributes and info hashes
142
for y in cinfo.values(): # each entry should have a corresponding info_hash
145
if not x['allowed'].has_key(y[1]):
147
if dirkeys.has_key(y[1]): # and each should have a unique info_hash
152
alas = 'your file may exist elsewhere in the universe\nbut alas, not here\n'
154
local_IPs = IP_List()
155
local_IPs.set_intranet_addresses()
158
def isotime(secs = None):
161
return strftime('%Y-%m-%d %H:%M UTC', gmtime(secs))
163
http_via_filter = re.compile(' for ([0-9.]+)\Z')
165
def _get_forwarded_ip(headers):
166
header = headers.get('x-forwarded-for')
169
x,y = header.split(',')
172
if is_valid_ip(x) and not local_IPs.includes(x):
175
header = headers.get('client-ip')
178
header = headers.get('via')
180
x = http_via_filter.search(header)
185
header = headers.get('from')
191
def get_forwarded_ip(headers):
192
x = _get_forwarded_ip(headers)
193
if x is None or not is_valid_ip(x) or local_IPs.includes(x):
197
def compact_peer_info(ip, port):
199
s = ( ''.join([chr(int(i)) for i in ip.split('.')])
200
+ chr((port & 0xFF00) >> 8) + chr(port & 0xFF) )
204
s = '' # not a valid IP, must be a domain name
208
def __init__(self, config, rawserver):
210
self.response_size = config['response_size']
211
self.dfile = config['dfile']
212
self.natcheck = config['nat_check']
213
favicon = config['favicon']
214
self.parse_dir_interval = config['parse_dir_interval']
218
h = open(favicon,'r')
219
self.favicon = h.read()
222
print "**warning** specified favicon file -- %s -- does not exist." % favicon
223
self.rawserver = rawserver
224
self.cached = {} # format: infohash: [[time1, l1, s1], [time2, l2, s2], ...]
225
self.cached_t = {} # format: infohash: [time, cache]
230
self.allowed_IPs = None
231
self.banned_IPs = None
232
if config['allowed_ips'] or config['banned_ips']:
233
self.allowed_ip_mtime = 0
234
self.banned_ip_mtime = 0
237
self.only_local_override_ip = config['only_local_override_ip']
238
if self.only_local_override_ip == 2:
239
self.only_local_override_ip = not config['nat_check']
241
if CHECK_PEER_ID_ENCRYPTED and not CRYPTO_OK:
242
print ('**warning** crypto library not installed,' +
243
' cannot completely verify encrypted peers')
245
if exists(self.dfile):
247
h = open(self.dfile, 'rb')
250
tempstate = bdecode(ds)
251
if not tempstate.has_key('peers'):
252
tempstate = {'peers': tempstate}
253
statefiletemplate(tempstate)
254
self.state = tempstate
256
print '**warning** statefile '+self.dfile+' corrupt; resetting'
257
self.downloads = self.state.setdefault('peers', {})
258
self.completed = self.state.setdefault('completed', {})
261
''' format: infohash: [[l0, s0], [l1, s1], ...]
262
l0,s0 = compact, not requirecrypto=1
263
l1,s1 = compact, only supportcrypto=1
264
l2,s2 = [compact, crypto_flag], all peers
267
l4,l4 = [ip,port] nopeerid
269
if config['compact_reqd']:
270
self.cache_default_len = 3
272
self.cache_default_len = 5
273
for infohash, ds in self.downloads.items():
274
self.seedcount[infohash] = 0
275
for x,y in ds.items():
277
if ( (self.allowed_IPs and not self.allowed_IPs.includes(ip))
278
or (self.banned_IPs and self.banned_IPs.includes(ip)) ):
282
self.seedcount[infohash] += 1
285
gip = y.get('given_ip')
286
if is_valid_ip(gip) and (
287
not self.only_local_override_ip or local_IPs.includes(ip) ):
289
self.natcheckOK(infohash,x,ip,y['port'],y)
291
for x in self.downloads.keys():
293
for y in self.downloads[x].keys():
296
self.trackerid = createPeerID('-T-')
299
self.reannounce_interval = config['reannounce_interval']
300
self.save_dfile_interval = config['save_dfile_interval']
301
self.show_names = config['show_names']
302
rawserver.add_task(self.save_state, self.save_dfile_interval)
303
self.prevtime = clock()
304
self.timeout_downloaders_interval = config['timeout_downloaders_interval']
305
rawserver.add_task(self.expire_downloaders, self.timeout_downloaders_interval)
308
if (config['logfile']) and (config['logfile'] != '-'):
310
self.logfile = config['logfile']
311
self.log = open(self.logfile,'a')
312
sys.stdout = self.log
313
print "# Log Started: ", isotime()
315
print "**warning** could not redirect stdout to log file: ", sys.exc_info()[0]
317
if config['hupmonitor']:
318
def huphandler(signum, frame, self = self):
321
self.log = open(self.logfile,'a')
322
sys.stdout = self.log
323
print "# Log reopened: ", isotime()
325
print "**warning** could not reopen logfile"
327
signal.signal(signal.SIGHUP, huphandler)
329
self.allow_get = config['allow_get']
331
self.t2tlist = T2TList(config['multitracker_enabled'], self.trackerid,
332
config['multitracker_reannounce_interval'],
333
config['multitracker_maxpeers'], config['http_timeout'],
336
if config['allowed_list']:
337
if config['allowed_dir']:
338
print '**warning** allowed_dir and allowed_list options cannot be used together'
339
print '**warning** disregarding allowed_dir'
340
config['allowed_dir'] = ''
341
self.allowed = self.state.setdefault('allowed_list',{})
342
self.allowed_list_mtime = 0
344
self.remove_from_state('allowed','allowed_dir_files')
345
if config['multitracker_allowed'] == 'autodetect':
346
config['multitracker_allowed'] = 'none'
347
config['allowed_controls'] = 0
349
elif config['allowed_dir']:
350
self.allowed = self.state.setdefault('allowed',{})
351
self.allowed_dir_files = self.state.setdefault('allowed_dir_files',{})
352
self.allowed_dir_blocked = {}
354
self.remove_from_state('allowed_list')
358
self.remove_from_state('allowed','allowed_dir_files', 'allowed_list')
359
if config['multitracker_allowed'] == 'autodetect':
360
config['multitracker_allowed'] = 'none'
361
config['allowed_controls'] = 0
363
self.uq_broken = unquote('+') != ' '
364
self.keep_dead = config['keep_dead']
365
self.Filter = Filter(rawserver.add_task)
367
aggregator = config['aggregator']
368
if aggregator == '0':
369
self.is_aggregator = False
370
self.aggregator_key = None
372
self.is_aggregator = True
373
if aggregator == '1':
374
self.aggregator_key = None
376
self.aggregator_key = aggregator
377
self.natcheck = False
379
send = config['aggregate_forward']
381
self.aggregate_forward = None
384
self.aggregate_forward, self.aggregate_password = send.split(',')
386
self.aggregate_forward = send
387
self.aggregate_password = None
389
self.dedicated_seed_id = config['dedicated_seed_id']
393
self.cachetimeupdate()
395
def cachetimeupdate(self):
396
self.cachetime += 1 # raw clock, but more efficient for cache
397
self.rawserver.add_task(self.cachetimeupdate,1)
399
def aggregate_senddata(self, query):
400
url = self.aggregate_forward+'?'+query
401
if self.aggregate_password is not None:
402
url += '&password='+self.aggregate_password
403
rq = Thread(target = self._aggregate_senddata, args = [url])
407
def _aggregate_senddata(self, url): # just send, don't attempt to error check,
408
try: # discard any returned data
416
def get_infopage(self):
418
if not self.config['show_infopage']:
419
return (404, 'Not Found', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'}, alas)
420
red = self.config['infopage_redirect']
422
return (302, 'Found', {'Content-Type': 'text/html', 'Location': red},
423
'<A HREF="'+red+'">Click Here</A>')
426
s.write('<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">\n' \
427
'<html><head><title>BitTorrent download info</title>\n')
428
if self.favicon is not None:
429
s.write('<link rel="shortcut icon" href="/favicon.ico">\n')
430
s.write('</head>\n<body>\n' \
431
'<h3>BitTorrent download info</h3>\n'\
433
'<li><strong>tracker version:</strong> %s</li>\n' \
434
'<li><strong>server time:</strong> %s</li>\n' \
435
'</ul>\n' % (version, isotime()))
436
if self.config['allowed_dir']:
438
names = [ (self.allowed[hash]['name'],hash)
439
for hash in self.allowed.keys() ]
441
names = [ (None,hash)
442
for hash in self.allowed.keys() ]
444
names = [ (None,hash) for hash in self.downloads.keys() ]
446
s.write('<p>not tracking any files yet...</p>\n')
452
tt = 0 # Total transferred
454
nf = 0 # Number of files displayed
455
if self.config['allowed_dir'] and self.show_names:
456
s.write('<table summary="files" border="1">\n' \
457
'<tr><th>info hash</th><th>torrent name</th><th align="right">size</th><th align="right">complete</th><th align="right">downloading</th><th align="right">downloaded</th><th align="right">transferred</th></tr>\n')
459
s.write('<table summary="files">\n' \
460
'<tr><th>info hash</th><th align="right">complete</th><th align="right">downloading</th><th align="right">downloaded</th></tr>\n')
461
for name,hash in names:
462
l = self.downloads[hash]
463
n = self.completed.get(hash, 0)
465
c = self.seedcount[hash]
469
if self.config['allowed_dir'] and self.show_names:
470
if self.allowed.has_key(hash):
472
sz = self.allowed[hash]['length'] # size
474
szt = sz * n # Transferred for this torrent
476
if self.allow_get == 1:
477
linkname = '<a href="/file?info_hash=' + quote(hash) + '">' + name + '</a>'
480
s.write('<tr><td><code>%s</code></td><td>%s</td><td align="right">%s</td><td align="right">%i</td><td align="right">%i</td><td align="right">%i</td><td align="right">%s</td></tr>\n' \
481
% (b2a_hex(hash), linkname, size_format(sz), c, d, n, size_format(szt)))
483
s.write('<tr><td><code>%s</code></td><td align="right"><code>%i</code></td><td align="right"><code>%i</code></td><td align="right"><code>%i</code></td></tr>\n' \
484
% (b2a_hex(hash), c, d, n))
485
if self.config['allowed_dir'] and self.show_names:
486
s.write('<tr><td align="right" colspan="2">%i files</td><td align="right">%s</td><td align="right">%i</td><td align="right">%i</td><td align="right">%i</td><td align="right">%s</td></tr>\n'
487
% (nf, size_format(ts), tc, td, tn, size_format(tt)))
489
s.write('<tr><td align="right">%i files</td><td align="right">%i</td><td align="right">%i</td><td align="right">%i</td></tr>\n'
491
s.write('</table>\n' \
493
'<li><em>info hash:</em> SHA1 hash of the "info" section of the metainfo (*.torrent)</li>\n' \
494
'<li><em>complete:</em> number of connected clients with the complete file</li>\n' \
495
'<li><em>downloading:</em> number of connected clients still downloading</li>\n' \
496
'<li><em>downloaded:</em> reported complete downloads</li>\n' \
497
'<li><em>transferred:</em> torrent size * total downloaded (does not include partial transfers)</li>\n' \
500
s.write('</body>\n' \
502
return (200, 'OK', {'Content-Type': 'text/html; charset=iso-8859-1'}, s.getvalue())
505
return (500, 'Internal Server Error', {'Content-Type': 'text/html; charset=iso-8859-1'}, 'Server Error')
508
def scrapedata(self, hash, return_name = True):
509
l = self.downloads[hash]
510
n = self.completed.get(hash, 0)
511
c = self.seedcount[hash]
513
f = {'complete': c, 'incomplete': d, 'downloaded': n}
514
if return_name and self.show_names and self.config['allowed_dir']:
515
f['name'] = self.allowed[hash]['name']
518
def get_scrape(self, paramslist):
520
if paramslist.has_key('info_hash'):
521
if self.config['scrape_allowed'] not in ['specific', 'full']:
522
return (400, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
523
bencode({'failure reason':
524
'specific scrape function is not available with this tracker.'}))
525
for hash in paramslist['info_hash']:
526
if self.allowed is not None:
527
if self.allowed.has_key(hash):
528
fs[hash] = self.scrapedata(hash)
530
if self.downloads.has_key(hash):
531
fs[hash] = self.scrapedata(hash)
533
if self.config['scrape_allowed'] != 'full':
534
return (400, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
535
bencode({'failure reason':
536
'full scrape function is not available with this tracker.'}))
537
if self.allowed is not None:
538
keys = self.allowed.keys()
540
keys = self.downloads.keys()
542
fs[hash] = self.scrapedata(hash)
544
return (200, 'OK', {'Content-Type': 'text/plain'}, bencode({'files': fs}))
547
def get_file(self, hash):
548
if not self.allow_get:
549
return (400, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
550
'get function is not available with this tracker.')
551
if not self.allowed.has_key(hash):
552
return (404, 'Not Found', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'}, alas)
553
fname = self.allowed[hash]['file']
554
fpath = self.allowed[hash]['path']
555
return (200, 'OK', {'Content-Type': 'application/x-bittorrent',
556
'Content-Disposition': 'attachment; filename=' + fname},
557
open(fpath, 'rb').read())
560
def check_allowed(self, infohash, paramslist):
561
if ( self.aggregator_key is not None
562
and not ( paramslist.has_key('password')
563
and paramslist['password'][0] == self.aggregator_key ) ):
564
return (200, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
565
bencode({'failure reason':
566
'Requested download is not authorized for use with this tracker.'}))
568
if self.allowed is not None:
569
if not self.allowed.has_key(infohash):
570
return (200, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
571
bencode({'failure reason':
572
'Requested download is not authorized for use with this tracker.'}))
573
if self.config['allowed_controls']:
574
if self.allowed[infohash].has_key('failure reason'):
575
return (200, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
576
bencode({'failure reason': self.allowed[infohash]['failure reason']}))
578
if paramslist.has_key('tracker'):
579
if ( self.config['multitracker_allowed'] == 'none' or # turned off
580
paramslist['peer_id'][0] == self.trackerid ): # oops! contacted myself
581
return (200, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
582
bencode({'failure reason': 'disallowed'}))
584
if ( self.config['multitracker_allowed'] == 'autodetect'
585
and not self.allowed[infohash].has_key('announce-list') ):
586
return (200, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
587
bencode({'failure reason':
588
'Requested download is not authorized for multitracker use.'}))
592
def cache_default(self):
593
return [({},{}) for i in xrange(self.cache_default_len)]
595
def add_data(self, infohash, event, ip, paramslist):
596
peers = self.downloads.setdefault(infohash, {})
597
ts = self.times.setdefault(infohash, {})
598
self.completed.setdefault(infohash, 0)
599
self.seedcount.setdefault(infohash, 0)
601
def params(key, default = None, l = paramslist):
606
myid = params('peer_id','')
608
raise ValueError, 'id not of length 20'
609
if event not in ['started', 'completed', 'stopped', 'snooped', None]:
610
raise ValueError, 'invalid event'
611
port = params('cryptoport')
613
port = params('port','')
615
if port < 0 or port > 65535:
616
raise ValueError, 'invalid port'
617
left = long(params('left',''))
619
raise ValueError, 'invalid amount left'
620
uploaded = long(params('uploaded',''))
621
downloaded = long(params('downloaded',''))
622
if params('supportcrypto'):
625
s = int(params['requirecrypto'])
634
peer = peers.get(myid)
635
islocal = local_IPs.includes(ip)
636
mykey = params('key')
638
auth = peer.get('key',-1) == mykey or peer.get('ip') == ip
641
if is_valid_ip(gip) and (islocal or not self.only_local_override_ip):
646
if params('numwant') is not None:
647
rsize = min(int(params('numwant')),self.response_size)
649
rsize = self.response_size
651
if event == 'stopped':
654
self.delete_peer(infohash,myid)
658
peer = { 'ip': ip, 'port': port, 'left': left,
659
'supportcrypto': supportcrypto,
660
'requirecrypto': requirecrypto }
664
peer['given ip'] = gip
666
if not self.natcheck or islocal:
668
self.natcheckOK(infohash,myid,ip1,port,peer)
670
NatCheck(self.connectback_result,infohash,myid,ip1,port,
671
self.rawserver,encrypted=requirecrypto)
674
if event == 'completed':
675
self.completed[infohash] += 1
677
self.seedcount[infohash] += 1
683
return rsize # return w/o changing stats
686
if not left and peer['left']:
687
self.completed[infohash] += 1
688
self.seedcount[infohash] += 1
689
if not peer.get('nat', -1):
690
for bc in self.becache[infohash]:
691
bc[1][myid] = bc[0][myid]
693
elif left and not peer['left']:
694
self.completed[infohash] -= 1
695
self.seedcount[infohash] -= 1
696
if not peer.get('nat', -1):
697
for bc in self.becache[infohash]:
698
bc[0][myid] = bc[1][myid]
707
if gip != peer.get('given ip'):
709
peer['given ip'] = gip
710
elif peer.has_key('given ip'):
714
natted = peer.get('nat', -1)
717
l = self.becache[infohash]
722
del peer['nat'] # restart NAT testing
723
if natted and natted < self.natcheck:
727
if not self.natcheck or islocal:
729
self.natcheckOK(infohash,myid,ip1,port,peer)
731
NatCheck(self.connectback_result,infohash,myid,ip1,port,
732
self.rawserver,encrypted=requirecrypto)
737
def peerlist(self, infohash, stopped, tracker, is_seed,
738
return_type, rsize, supportcrypto):
739
data = {} # return data
740
seeds = self.seedcount[infohash]
741
data['complete'] = seeds
742
data['incomplete'] = len(self.downloads[infohash]) - seeds
744
if ( self.config['allowed_controls']
745
and self.allowed[infohash].has_key('warning message') ):
746
data['warning message'] = self.allowed[infohash]['warning message']
749
data['interval'] = self.config['multitracker_reannounce_interval']
752
cache = self.cached_t.setdefault(infohash, None)
753
if ( not cache or len(cache[1]) < rsize
754
or cache[0] + self.config['min_time_between_cache_refreshes'] < clock() ):
755
bc = self.becache.setdefault(infohash,self.cache_default())
756
cache = [ clock(), bc[0][0].values() + bc[0][1].values() ]
757
self.cached_t[infohash] = cache
761
data['peers'] = cache[-rsize:]
765
data['interval'] = self.reannounce_interval
766
if stopped or not rsize: # save some bandwidth
770
bc = self.becache.setdefault(infohash,self.cache_default())
771
len_l = len(bc[2][0])
772
len_s = len(bc[2][1])
773
if not (len_l+len_s): # caches are empty!
776
l_get_size = int(float(rsize)*(len_l)/(len_l+len_s))
777
cache = self.cached.setdefault(infohash,[None,None,None])[return_type]
778
if cache and ( not cache[1]
779
or (is_seed and len(cache[1]) < rsize)
780
or len(cache[1]) < l_get_size
781
or cache[0]+self.config['min_time_between_cache_refreshes'] < self.cachetime ):
784
peers = self.downloads[infohash]
785
if self.config['compact_reqd']:
788
vv = ([],[],[],[],[])
789
for key, ip, port in self.t2tlist.harvest(infohash): # empty if disabled
790
if not peers.has_key(key):
791
cp = compact_peer_info(ip, port)
793
vv[2].append((cp,'\x00'))
794
if not self.config['compact_reqd']:
795
vv[3].append({'ip': ip, 'port': port, 'peer id': key})
796
vv[4].append({'ip': ip, 'port': port})
797
cache = [ self.cachetime,
798
bc[return_type][0].values()+vv[return_type],
799
bc[return_type][1].values() ]
802
self.cached[infohash][return_type] = cache
803
for rr in xrange(len(self.cached[infohash])):
804
if rr != return_type:
806
self.cached[infohash][rr][1].extend(vv[rr])
809
if len(cache[1]) < l_get_size:
812
peerdata.extend(cache[2])
817
peerdata = cache[2][l_get_size-rsize:]
818
del cache[2][l_get_size-rsize:]
819
rsize -= len(peerdata)
823
peerdata.extend(cache[1][-rsize:])
824
del cache[1][-rsize:]
826
data['peers'] = ''.join(peerdata)
827
elif return_type == 1:
828
data['crypto_flags'] = "0x01"*len(peerdata)
829
data['peers'] = ''.join(peerdata)
830
elif return_type == 2:
831
data['crypto_flags'] = ''.join([p[1] for p in peerdata])
832
data['peers'] = ''.join([p[0] for p in peerdata])
834
data['peers'] = peerdata
838
def get(self, connection, path, headers):
839
real_ip = connection.get_ip()
845
ip = ipv6_to_ipv4(ip)
850
if ( (self.allowed_IPs and not self.allowed_IPs.includes(ip))
851
or (self.banned_IPs and self.banned_IPs.includes(ip)) ):
852
return (400, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
853
bencode({'failure reason':
854
'your IP is not allowed on this tracker'}))
856
nip = get_forwarded_ip(headers)
857
if nip and not self.only_local_override_ip:
866
def params(key, default = None, l = paramslist):
872
(scheme, netloc, path, pars, query, fragment) = urlparse(path)
873
if self.uq_broken == 1:
874
path = path.replace('+',' ')
875
query = query.replace('+',' ')
876
path = unquote(path)[1:]
877
for s in query.split('&'):
881
paramslist.setdefault(kw, [])
882
paramslist[kw] += [unquote(s[i+1:])]
884
if path == '' or path == 'index.html':
885
return self.get_infopage()
887
return self.get_file(params('info_hash'))
888
if path == 'favicon.ico' and self.favicon is not None:
889
return (200, 'OK', {'Content-Type' : 'image/x-icon'}, self.favicon)
891
# automated access from here on
893
if path in ('scrape', 'scrape.php', 'tracker.php/scrape'):
894
return self.get_scrape(paramslist)
896
if not path in ('announce', 'announce.php', 'tracker.php/announce'):
897
return (404, 'Not Found', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'}, alas)
899
# main tracker function
901
filtered = self.Filter.check(real_ip, paramslist, headers)
903
return (400, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
904
bencode({'failure reason': filtered}))
906
infohash = params('info_hash')
908
raise ValueError, 'no info hash'
910
notallowed = self.check_allowed(infohash, paramslist)
914
event = params('event')
916
rsize = self.add_data(infohash, event, ip, paramslist)
918
except ValueError, e:
919
return (400, 'Bad Request', {'Content-Type': 'text/plain'},
920
'you sent me garbage - ' + str(e))
922
if self.aggregate_forward and not paramslist.has_key('tracker'):
923
self.aggregate_senddata(query)
925
if self.is_aggregator: # don't return peer data here
926
return (200, 'OK', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
927
bencode({'response': 'OK'}))
929
if params('compact') and ipv4:
930
if params('requirecrypto'):
932
elif params('supportcrypto'):
936
elif self.config['compact_reqd'] and ipv4:
937
return (400, 'Bad Request', {'Content-Type': 'text/plain'},
938
'your client is outdated, please upgrade')
939
elif params('no_peer_id'):
944
data = self.peerlist(infohash, event=='stopped',
945
params('tracker'), not params('left'),
946
return_type, rsize, params('supportcrypto'))
948
if paramslist.has_key('scrape'): # deprecated
949
data['scrape'] = self.scrapedata(infohash, False)
951
if self.dedicated_seed_id:
952
if params('seed_id') == self.dedicated_seed_id and params('left') == 0:
953
self.is_seeded[infohash] = True
954
if params('check_seeded') and self.is_seeded.get(infohash):
957
return (200, 'OK', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'}, bencode(data))
960
def natcheckOK(self, infohash, peerid, ip, port, peer):
961
seed = not peer['left']
962
bc = self.becache.setdefault(infohash,self.cache_default())
963
cp = compact_peer_info(ip, port)
964
reqc = peer['requirecrypto']
965
bc[2][seed][peerid] = (cp,chr(reqc))
966
if peer['supportcrypto']:
967
bc[1][seed][peerid] = cp
969
bc[0][seed][peerid] = cp
970
if not self.config['compact_reqd']:
971
bc[3][seed][peerid] = Bencached(bencode({'ip': ip, 'port': port,
973
bc[4][seed][peerid] = Bencached(bencode({'ip': ip, 'port': port}))
976
def natchecklog(self, peerid, ip, port, result):
977
year, month, day, hour, minute, second, a, b, c = localtime(time())
978
print '%s - %s [%02d/%3s/%04d:%02d:%02d:%02d] "!natcheck-%s:%i" %i 0 - -' % (
979
ip, quote(peerid), day, months[month], year, hour, minute, second,
982
def connectback_result(self, result, downloadid, peerid, ip, port):
983
record = self.downloads.get(downloadid,{}).get(peerid)
985
or (record['ip'] != ip and record.get('given ip') != ip)
986
or record['port'] != port ):
987
if self.config['log_nat_checks']:
988
self.natchecklog(peerid, ip, port, 404)
990
if self.config['log_nat_checks']:
995
self.natchecklog(peerid, ip, port, x)
996
if not record.has_key('nat'):
997
record['nat'] = int(not result)
999
self.natcheckOK(downloadid,peerid,ip,port,record)
1000
elif result and record['nat']:
1002
self.natcheckOK(downloadid,peerid,ip,port,record)
1007
def remove_from_state(self, *l):
1014
def save_state(self):
1015
self.rawserver.add_task(self.save_state, self.save_dfile_interval)
1016
h = open(self.dfile, 'wb')
1017
h.write(bencode(self.state))
1021
def parse_allowed(self):
1022
self.rawserver.add_task(self.parse_allowed, self.parse_dir_interval)
1024
if self.config['allowed_dir']:
1025
r = parsedir( self.config['allowed_dir'], self.allowed,
1026
self.allowed_dir_files, self.allowed_dir_blocked,
1028
( self.allowed, self.allowed_dir_files, self.allowed_dir_blocked,
1029
added, garbage2 ) = r
1031
self.state['allowed'] = self.allowed
1032
self.state['allowed_dir_files'] = self.allowed_dir_files
1034
self.t2tlist.parse(self.allowed)
1037
f = self.config['allowed_list']
1038
if self.allowed_list_mtime == os.path.getmtime(f):
1041
r = parsetorrentlist(f, self.allowed)
1042
(self.allowed, added, garbage2) = r
1043
self.state['allowed_list'] = self.allowed
1044
except (IOError, OSError):
1045
print '**warning** unable to read allowed torrent list'
1047
self.allowed_list_mtime = os.path.getmtime(f)
1049
for infohash in added.keys():
1050
self.downloads.setdefault(infohash, {})
1051
self.completed.setdefault(infohash, 0)
1052
self.seedcount.setdefault(infohash, 0)
1055
def read_ip_lists(self):
1056
self.rawserver.add_task(self.read_ip_lists,self.parse_dir_interval)
1058
f = self.config['allowed_ips']
1059
if f and self.allowed_ip_mtime != os.path.getmtime(f):
1060
self.allowed_IPs = IP_List()
1062
self.allowed_IPs.read_fieldlist(f)
1063
self.allowed_ip_mtime = os.path.getmtime(f)
1064
except (IOError, OSError):
1065
print '**warning** unable to read allowed_IP list'
1067
f = self.config['banned_ips']
1068
if f and self.banned_ip_mtime != os.path.getmtime(f):
1069
self.banned_IPs = IP_Range_List()
1071
self.banned_IPs.read_rangelist(f)
1072
self.banned_ip_mtime = os.path.getmtime(f)
1073
except (IOError, OSError):
1074
print '**warning** unable to read banned_IP list'
1077
def delete_peer(self, infohash, peerid):
1078
dls = self.downloads[infohash]
1080
if not peer['left']:
1081
self.seedcount[infohash] -= 1
1082
if not peer.get('nat',-1):
1083
l = self.becache[infohash]
1084
y = not peer['left']
1086
if x[y].has_key(peerid):
1088
del self.times[infohash][peerid]
1091
def expire_downloaders(self):
1092
for x in self.times.keys():
1093
for myid, t in self.times[x].items():
1094
if t < self.prevtime:
1095
self.delete_peer(x,myid)
1096
self.prevtime = clock()
1097
if (self.keep_dead != 1):
1098
for key, value in self.downloads.items():
1099
if len(value) == 0 and (
1100
self.allowed is None or not self.allowed.has_key(key) ):
1102
del self.downloads[key]
1103
del self.seedcount[key]
1104
self.rawserver.add_task(self.expire_downloaders, self.timeout_downloaders_interval)
1109
print formatDefinitions(defaults, 80)
1112
config, files = parseargs(args, defaults, 0, 0)
1113
except ValueError, e:
1114
print 'error: ' + str(e)
1115
print 'run with no arguments for parameter explanations'
1117
r = RawServer(Event(), config['timeout_check_interval'],
1118
config['socket_timeout'], ipv6_enable = config['ipv6_enabled'])
1119
t = Tracker(config, r)
1120
r.bind(config['port'], config['bind'],
1121
reuse = True, ipv6_socket_style = config['ipv6_binds_v4'])
1122
r.listen_forever(HTTPHandler(t.get, config['min_time_between_log_flushes']))
1124
print '# Shutting down: ' + isotime()
1130
r = str(int(s/1024)) + 'KiB'
1131
elif (s < 1073741824L):
1132
r = str(int(s/1048576)) + 'MiB'
1133
elif (s < 1099511627776L):
1134
r = str(int((s/1073741824.0)*100.0)/100.0) + 'GiB'
1136
r = str(int((s/1099511627776.0)*100.0)/100.0) + 'TiB'