1
# Written by Bram Cohen
2
# see LICENSE.txt for license information
4
from BitTornado.parseargs import parseargs, formatDefinitions
5
from BitTornado.RawServer import RawServer, autodetect_ipv6, autodetect_socket_style
6
from BitTornado.HTTPHandler import HTTPHandler, months, weekdays
7
from BitTornado.parsedir import parsedir
8
from NatCheck import NatCheck, CHECK_PEER_ID_ENCRYPTED
9
from BitTornado.BTcrypto import CRYPTO_OK
10
from T2T import T2TList
11
from BitTornado.subnetparse import IP_List, ipv6_to_ipv4, to_ipv4, is_valid_ip, is_ipv4
12
from BitTornado.iprangeparse import IP_List as IP_Range_List
13
from BitTornado.torrentlistparse import parsetorrentlist
14
from threading import Event, Thread
15
from BitTornado.bencode import bencode, bdecode, Bencached
16
from BitTornado.zurllib import urlopen, quote, unquote
17
from Filter import Filter
18
from urlparse import urlparse
19
from os import rename, getpid
20
from os.path import exists, isfile
21
from cStringIO import StringIO
22
from traceback import print_exc
23
from time import time, gmtime, strftime, localtime
24
from BitTornado.clock import clock
25
from random import shuffle, seed, randrange
27
from types import StringType, IntType, LongType, ListType, DictType
28
from binascii import b2a_hex, a2b_hex, a2b_base64
29
from string import lower
33
import BitTornado.__init__
34
from BitTornado.__init__ import version, createPeerID
40
bool = lambda x: not not x
43
('port', 80, "Port to listen on."),
44
('dfile', None, 'file to store recent downloader info in'),
45
('bind', '', 'comma-separated list of ips/hostnames to bind to locally'),
46
# ('ipv6_enabled', autodetect_ipv6(),
48
'allow the client to connect to peers via IPv6'),
49
('ipv6_binds_v4', autodetect_socket_style(),
50
'set if an IPv6 server socket will also field IPv4 connections'),
51
('socket_timeout', 15, 'timeout for closing connections'),
52
('save_dfile_interval', 5 * 60, 'seconds between saving dfile'),
53
('timeout_downloaders_interval', 45 * 60, 'seconds between expiring downloaders'),
54
('reannounce_interval', 30 * 60, 'seconds downloaders should wait between reannouncements'),
55
('response_size', 50, 'number of peers to send in an info message'),
56
('timeout_check_interval', 5,
57
'time to wait between checking if any connections have timed out'),
59
"how many times to check if a downloader is behind a NAT (0 = don't check)"),
61
"whether to add entries to the log for nat-check results"),
62
('min_time_between_log_flushes', 3.0,
63
'minimum time it must have been since the last flush to do another one'),
64
('min_time_between_cache_refreshes', 600.0,
65
'minimum time in seconds before a cache is considered stale and is flushed'),
66
('allowed_dir', '', 'only allow downloads for .torrents in this dir'),
67
('allowed_list', '', 'only allow downloads for hashes in this list (hex format, one per line)'),
68
('allowed_controls', 0, 'allow special keys in torrents in the allowed_dir to affect tracker access'),
69
('multitracker_enabled', 0, 'whether to enable multitracker operation'),
70
('multitracker_allowed', 'autodetect', 'whether to allow incoming tracker announces (can be none, autodetect or all)'),
71
('multitracker_reannounce_interval', 2 * 60, 'seconds between outgoing tracker announces'),
72
('multitracker_maxpeers', 20, 'number of peers to get in a tracker announce'),
73
('aggregate_forward', '', 'format: <url>[,<password>] - if set, forwards all non-multitracker to this url with this optional password'),
74
('aggregator', '0', 'whether to act as a data aggregator rather than a tracker. If enabled, may be 1, or <password>; ' +
75
'if password is set, then an incoming password is required for access'),
76
('hupmonitor', 0, 'whether to reopen the log file upon receipt of HUP signal'),
78
'number of seconds to wait before assuming that an http connection has timed out'),
79
('parse_dir_interval', 60, 'seconds between reloading of allowed_dir or allowed_file ' +
80
'and allowed_ips and banned_ips lists'),
81
('show_infopage', 1, "whether to display an info page when the tracker's root dir is loaded"),
82
('infopage_redirect', '', 'a URL to redirect the info page to'),
83
('show_names', 1, 'whether to display names from allowed dir'),
84
('favicon', '', 'file containing x-icon data to return when browser requests favicon.ico'),
85
('allowed_ips', '', 'only allow connections from IPs specified in the given file; '+
86
'file contains subnet data in the format: aa.bb.cc.dd/len'),
87
('banned_ips', '', "don't allow connections from IPs specified in the given file; "+
88
'file contains IP range data in the format: xxx:xxx:ip1-ip2'),
89
('only_local_override_ip', 2, "ignore the ip GET parameter from machines which aren't on local network IPs " +
90
"(0 = never, 1 = always, 2 = ignore if NAT checking is not enabled)"),
91
('logfile', '', 'file to write the tracker logs, use - for stdout (default)'),
92
('allow_get', 0, 'use with allowed_dir; adds a /file?hash={hash} url that allows users to download the torrent file'),
93
('keep_dead', 0, 'keep dead torrents after they expire (so they still show up on your /scrape and web page)'),
94
('scrape_allowed', 'full', 'scrape access allowed (can be none, specific or full)'),
95
('dedicated_seed_id', '', 'allows tracker to monitor dedicated seed(s) and flag torrents as seeded'),
96
('compact_reqd', 1, "only allow peers that accept a compact response"),
99
def statefiletemplate(x):
100
if type(x) != DictType:
102
for cname, cinfo in x.items():
104
for y in cinfo.values(): # The 'peers' key is a dictionary of SHA hashes (torrent ids)
105
if type(y) != DictType: # ... for the active torrents, and each is a dictionary
107
for id, info in y.items(): # ... of client ids interested in that torrent
110
if type(info) != DictType: # ... each of which is also a dictionary
111
raise ValueError # ... which has an IP, a Port, and a Bytes Left count for that client for that torrent
112
if type(info.get('ip', '')) != StringType:
114
port = info.get('port')
115
if type(port) not in (IntType,LongType) or port < 0:
117
left = info.get('left')
118
if type(left) not in (IntType,LongType) or left < 0:
120
if type(info.get('supportcrypto')) not in (IntType,LongType):
122
if type(info.get('requirecrypto')) not in (IntType,LongType):
124
elif cname == 'completed':
125
if (type(cinfo) != DictType): # The 'completed' key is a dictionary of SHA hashes (torrent ids)
126
raise ValueError # ... for keeping track of the total completions per torrent
127
for y in cinfo.values(): # ... each torrent has an integer value
128
if type(y) not in (IntType,LongType):
129
raise ValueError # ... for the number of reported completions for that torrent
130
elif cname == 'allowed':
131
if (type(cinfo) != DictType): # a list of info_hashes and included data
133
if x.has_key('allowed_dir_files'):
134
adlist = [z[1] for z in x['allowed_dir_files'].values()]
135
for y in cinfo.keys(): # and each should have a corresponding key here
138
elif cname == 'allowed_dir_files':
139
if (type(cinfo) != DictType): # a list of files, their attributes and info hashes
142
for y in cinfo.values(): # each entry should have a corresponding info_hash
145
if not x['allowed'].has_key(y[1]):
147
if dirkeys.has_key(y[1]): # and each should have a unique info_hash
152
alas = 'your file may exist elsewhere in the universe\nbut alas, not here\n'
154
local_IPs = IP_List()
155
local_IPs.set_intranet_addresses()
158
def isotime(secs = None):
161
return strftime('%Y-%m-%d %H:%M UTC', gmtime(secs))
163
http_via_filter = re.compile(' for ([0-9.]+)\Z')
165
def _get_forwarded_ip(headers):
166
header = headers.get('x-forwarded-for')
169
x,y = header.split(',')
172
if is_valid_ip(x) and not local_IPs.includes(x):
175
header = headers.get('client-ip')
178
header = headers.get('via')
180
x = http_via_filter.search(header)
185
header = headers.get('from')
191
def get_forwarded_ip(headers):
192
x = _get_forwarded_ip(headers)
193
if x is None or not is_valid_ip(x) or local_IPs.includes(x):
197
def compact_peer_info(ip, port):
199
s = ( ''.join([chr(int(i)) for i in ip.split('.')])
200
+ chr((port & 0xFF00) >> 8) + chr(port & 0xFF) )
204
s = '' # not a valid IP, must be a domain name
208
def __init__(self, config, rawserver):
210
self.response_size = config['response_size']
211
self.dfile = config['dfile']
212
self.natcheck = config['nat_check']
213
favicon = config['favicon']
214
self.parse_dir_interval = config['parse_dir_interval']
218
h = open(favicon,'r')
219
self.favicon = h.read()
222
print "**warning** specified favicon file -- %s -- does not exist." % favicon
223
self.rawserver = rawserver
224
self.cached = {} # format: infohash: [[time1, l1, s1], [time2, l2, s2], ...]
225
self.cached_t = {} # format: infohash: [time, cache]
230
self.allowed_IPs = None
231
self.banned_IPs = None
232
if config['allowed_ips'] or config['banned_ips']:
233
self.allowed_ip_mtime = 0
234
self.banned_ip_mtime = 0
237
self.only_local_override_ip = config['only_local_override_ip']
238
if self.only_local_override_ip == 2:
239
self.only_local_override_ip = not config['nat_check']
241
if CHECK_PEER_ID_ENCRYPTED and not CRYPTO_OK:
242
print ('**warning** crypto library not installed,' +
243
' cannot completely verify encrypted peers')
245
if exists(self.dfile):
247
h = open(self.dfile, 'rb')
250
tempstate = bdecode(ds)
251
if not tempstate.has_key('peers'):
252
tempstate = {'peers': tempstate}
253
statefiletemplate(tempstate)
254
self.state = tempstate
256
print '**warning** statefile '+self.dfile+' corrupt; resetting'
257
self.downloads = self.state.setdefault('peers', {})
258
self.completed = self.state.setdefault('completed', {})
261
''' format: infohash: [[l0, s0], [l1, s1], ...]
262
l0,s0 = compact, not requirecrypto=1
263
l1,s1 = compact, only supportcrypto=1
264
l2,s2 = [compact, crypto_flag], all peers
267
l4,l4 = [ip,port] nopeerid
269
if config['compact_reqd']:
270
self.cache_default_len = 3
272
self.cache_default_len = 5
273
for infohash, ds in self.downloads.items():
274
self.seedcount[infohash] = 0
275
for x,y in ds.items():
277
if ( (self.allowed_IPs and not self.allowed_IPs.includes(ip))
278
or (self.banned_IPs and self.banned_IPs.includes(ip)) ):
282
self.seedcount[infohash] += 1
285
gip = y.get('given_ip')
286
if is_valid_ip(gip) and (
287
not self.only_local_override_ip or local_IPs.includes(ip) ):
289
self.natcheckOK(infohash,x,ip,y['port'],y)
291
for x in self.downloads.keys():
293
for y in self.downloads[x].keys():
296
self.trackerid = createPeerID('-T-')
299
self.reannounce_interval = config['reannounce_interval']
300
self.save_dfile_interval = config['save_dfile_interval']
301
self.show_names = config['show_names']
302
rawserver.add_task(self.save_state, self.save_dfile_interval)
303
self.prevtime = clock()
304
self.timeout_downloaders_interval = config['timeout_downloaders_interval']
305
rawserver.add_task(self.expire_downloaders, self.timeout_downloaders_interval)
308
if (config['logfile']) and (config['logfile'] != '-'):
310
self.logfile = config['logfile']
311
self.log = open(self.logfile,'a')
312
sys.stdout = self.log
313
print "# Log Started: ", isotime()
315
print "**warning** could not redirect stdout to log file: ", sys.exc_info()[0]
317
if config['hupmonitor']:
318
def huphandler(signum, frame, self = self):
321
self.log = open(self.logfile,'a')
322
sys.stdout = self.log
323
print "# Log reopened: ", isotime()
325
print "**warning** could not reopen logfile"
327
signal.signal(signal.SIGHUP, huphandler)
329
self.allow_get = config['allow_get']
331
self.t2tlist = T2TList(config['multitracker_enabled'], self.trackerid,
332
config['multitracker_reannounce_interval'],
333
config['multitracker_maxpeers'], config['http_timeout'],
336
if config['allowed_list']:
337
if config['allowed_dir']:
338
print '**warning** allowed_dir and allowed_list options cannot be used together'
339
print '**warning** disregarding allowed_dir'
340
config['allowed_dir'] = ''
341
self.allowed = self.state.setdefault('allowed_list',{})
342
self.allowed_list_mtime = 0
344
self.remove_from_state('allowed','allowed_dir_files')
345
if config['multitracker_allowed'] == 'autodetect':
346
config['multitracker_allowed'] = 'none'
347
config['allowed_controls'] = 0
349
elif config['allowed_dir']:
350
self.allowed = self.state.setdefault('allowed',{})
351
self.allowed_dir_files = self.state.setdefault('allowed_dir_files',{})
352
self.allowed_dir_blocked = {}
354
self.remove_from_state('allowed_list')
358
self.remove_from_state('allowed','allowed_dir_files', 'allowed_list')
359
if config['multitracker_allowed'] == 'autodetect':
360
config['multitracker_allowed'] = 'none'
361
config['allowed_controls'] = 0
363
self.uq_broken = unquote('+') != ' '
364
self.keep_dead = config['keep_dead']
365
self.Filter = Filter(rawserver.add_task)
367
aggregator = config['aggregator']
368
if aggregator == '0':
369
self.is_aggregator = False
370
self.aggregator_key = None
372
self.is_aggregator = True
373
if aggregator == '1':
374
self.aggregator_key = None
376
self.aggregator_key = aggregator
377
self.natcheck = False
379
send = config['aggregate_forward']
381
self.aggregate_forward = None
384
self.aggregate_forward, self.aggregate_password = send.split(',')
386
self.aggregate_forward = send
387
self.aggregate_password = None
389
self.dedicated_seed_id = config['dedicated_seed_id']
393
self.cachetimeupdate()
395
def cachetimeupdate(self):
396
self.cachetime += 1 # raw clock, but more efficient for cache
397
self.rawserver.add_task(self.cachetimeupdate,1)
399
def aggregate_senddata(self, query):
400
url = self.aggregate_forward+'?'+query
401
if self.aggregate_password is not None:
402
url += '&password='+self.aggregate_password
403
rq = Thread(target = self._aggregate_senddata, args = [url])
407
def _aggregate_senddata(self, url): # just send, don't attempt to error check,
408
try: # discard any returned data
416
def get_infopage(self):
418
if not self.config['show_infopage']:
419
return (404, 'Not Found', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'}, alas)
420
red = self.config['infopage_redirect']
422
return (302, 'Found', {'Content-Type': 'text/html', 'Location': red},
423
'<A HREF="'+red+'">Click Here</A>')
426
s.write('<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">\n' \
427
'<html><head><title>BitTorrent download info</title>\n')
428
if self.favicon is not None:
429
s.write('<link rel="shortcut icon" href="/favicon.ico">\n')
430
s.write('</head>\n<body>\n' \
431
'<h3>BitTorrent download info</h3>\n'\
433
'<li><strong>tracker version:</strong> %s</li>\n' \
434
'<li><strong>server time:</strong> %s</li>\n' \
435
'</ul>\n' % (version, isotime()))
436
if self.config['allowed_dir']:
438
names = [ (self.allowed[hash]['name'],hash)
439
for hash in self.allowed.keys() ]
441
names = [ (None,hash)
442
for hash in self.allowed.keys() ]
444
names = [ (None,hash) for hash in self.downloads.keys() ]
446
s.write('<p>not tracking any files yet...</p>\n')
452
tt = 0 # Total transferred
454
nf = 0 # Number of files displayed
455
if self.config['allowed_dir'] and self.show_names:
456
s.write('<table summary="files" border="1">\n' \
457
'<tr><th>info hash</th><th>torrent name</th><th align="right">size</th><th align="right">complete</th><th align="right">downloading</th><th align="right">downloaded</th><th align="right">transferred</th></tr>\n')
459
s.write('<table summary="files">\n' \
460
'<tr><th>info hash</th><th align="right">complete</th><th align="right">downloading</th><th align="right">downloaded</th></tr>\n')
461
for name,hash in names:
462
l = self.downloads[hash]
463
n = self.completed.get(hash, 0)
465
c = self.seedcount[hash]
469
if self.config['allowed_dir'] and self.show_names:
470
if self.allowed.has_key(hash):
472
sz = self.allowed[hash]['length'] # size
474
szt = sz * n # Transferred for this torrent
476
if self.allow_get == 1:
477
linkname = '<a href="/file?info_hash=' + quote(hash) + '">' + name + '</a>'
480
s.write('<tr><td><code>%s</code></td><td>%s</td><td align="right">%s</td><td align="right">%i</td><td align="right">%i</td><td align="right">%i</td><td align="right">%s</td></tr>\n' \
481
% (b2a_hex(hash), linkname, size_format(sz), c, d, n, size_format(szt)))
483
s.write('<tr><td><code>%s</code></td><td align="right"><code>%i</code></td><td align="right"><code>%i</code></td><td align="right"><code>%i</code></td></tr>\n' \
484
% (b2a_hex(hash), c, d, n))
485
if self.config['allowed_dir'] and self.show_names:
486
s.write('<tr><td align="right" colspan="2">%i files</td><td align="right">%s</td><td align="right">%i</td><td align="right">%i</td><td align="right">%i</td><td align="right">%s</td></tr>\n'
487
% (nf, size_format(ts), tc, td, tn, size_format(tt)))
489
s.write('<tr><td align="right">%i files</td><td align="right">%i</td><td align="right">%i</td><td align="right">%i</td></tr>\n'
491
s.write('</table>\n' \
493
'<li><em>info hash:</em> SHA1 hash of the "info" section of the metainfo (*.torrent)</li>\n' \
494
'<li><em>complete:</em> number of connected clients with the complete file</li>\n' \
495
'<li><em>downloading:</em> number of connected clients still downloading</li>\n' \
496
'<li><em>downloaded:</em> reported complete downloads</li>\n' \
497
'<li><em>transferred:</em> torrent size * total downloaded (does not include partial transfers)</li>\n' \
500
s.write('</body>\n' \
502
return (200, 'OK', {'Content-Type': 'text/html; charset=iso-8859-1'}, s.getvalue())
505
return (500, 'Internal Server Error', {'Content-Type': 'text/html; charset=iso-8859-1'}, 'Server Error')
508
def scrapedata(self, hash, return_name = True):
509
l = self.downloads[hash]
510
n = self.completed.get(hash, 0)
511
c = self.seedcount[hash]
513
f = {'complete': c, 'incomplete': d, 'downloaded': n}
514
if return_name and self.show_names and self.config['allowed_dir']:
515
f['name'] = self.allowed[hash]['name']
518
def get_scrape(self, paramslist):
520
if paramslist.has_key('info_hash'):
521
if self.config['scrape_allowed'] not in ['specific', 'full']:
522
return (400, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
523
bencode({'failure reason':
524
'specific scrape function is not available with this tracker.'}))
525
for hash in paramslist['info_hash']:
526
if self.allowed is not None:
527
if self.allowed.has_key(hash):
528
fs[hash] = self.scrapedata(hash)
530
if self.downloads.has_key(hash):
531
fs[hash] = self.scrapedata(hash)
533
if self.config['scrape_allowed'] != 'full':
534
return (400, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
535
bencode({'failure reason':
536
'full scrape function is not available with this tracker.'}))
537
if self.allowed is not None:
538
keys = self.allowed.keys()
540
keys = self.downloads.keys()
542
fs[hash] = self.scrapedata(hash)
544
return (200, 'OK', {'Content-Type': 'text/plain'}, bencode({'files': fs}))
547
def get_file(self, hash):
548
if not self.allow_get:
549
return (400, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
550
'get function is not available with this tracker.')
551
if not self.allowed.has_key(hash):
552
return (404, 'Not Found', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'}, alas)
553
fname = self.allowed[hash]['file']
554
fpath = self.allowed[hash]['path']
555
return (200, 'OK', {'Content-Type': 'application/x-bittorrent',
556
'Content-Disposition': 'attachment; filename=' + fname},
557
open(fpath, 'rb').read())
560
def check_allowed(self, infohash, paramslist):
561
if ( self.aggregator_key is not None
562
and not ( paramslist.has_key('password')
563
and paramslist['password'][0] == self.aggregator_key ) ):
564
return (200, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
565
bencode({'failure reason':
566
'Requested download is not authorized for use with this tracker.'}))
568
if self.allowed is not None:
569
if not self.allowed.has_key(infohash):
570
return (200, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
571
bencode({'failure reason':
572
'Requested download is not authorized for use with this tracker.'}))
573
if self.config['allowed_controls']:
574
if self.allowed[infohash].has_key('failure reason'):
575
return (200, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
576
bencode({'failure reason': self.allowed[infohash]['failure reason']}))
578
if paramslist.has_key('tracker'):
579
if ( self.config['multitracker_allowed'] == 'none' or # turned off
580
paramslist['peer_id'][0] == self.trackerid ): # oops! contacted myself
581
return (200, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
582
bencode({'failure reason': 'disallowed'}))
584
if ( self.config['multitracker_allowed'] == 'autodetect'
585
and not self.allowed[infohash].has_key('announce-list') ):
586
return (200, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
587
bencode({'failure reason':
588
'Requested download is not authorized for multitracker use.'}))
592
def cache_default(self):
593
return [({},{}) for i in xrange(self.cache_default_len)]
595
def add_data(self, infohash, event, ip, paramslist):
596
peers = self.downloads.setdefault(infohash, {})
597
ts = self.times.setdefault(infohash, {})
598
self.completed.setdefault(infohash, 0)
599
self.seedcount.setdefault(infohash, 0)
601
def params(key, default = None, l = paramslist):
606
myid = params('peer_id','')
608
raise ValueError, 'id not of length 20'
609
if event not in ['started', 'completed', 'stopped', 'snooped', None]:
610
raise ValueError, 'invalid event'
611
port = params('cryptoport')
613
port = params('port','')
615
if port < 0 or port > 65535:
616
raise ValueError, 'invalid port'
617
left = long(params('left',''))
619
raise ValueError, 'invalid amount left'
620
uploaded = long(params('uploaded',''))
621
downloaded = long(params('downloaded',''))
622
if params('supportcrypto'):
625
s = int(params['requirecrypto'])
634
peer = peers.get(myid)
635
islocal = local_IPs.includes(ip)
636
mykey = params('key')
638
auth = peer.get('key',-1) == mykey or peer.get('ip') == ip
641
if is_valid_ip(gip) and (islocal or not self.only_local_override_ip):
646
if params('numwant') is not None:
647
rsize = min(int(params('numwant')),self.response_size)
649
rsize = self.response_size
651
if event == 'stopped':
654
self.delete_peer(infohash,myid)
658
peer = { 'ip': ip, 'port': port, 'left': left,
659
'supportcrypto': supportcrypto,
660
'requirecrypto': requirecrypto }
664
peer['given ip'] = gip
666
if not self.natcheck or islocal:
668
self.natcheckOK(infohash,myid,ip1,port,peer)
670
NatCheck(self.connectback_result,infohash,myid,ip1,port,
671
self.rawserver,encrypted=requirecrypto)
674
if event == 'completed':
675
self.completed[infohash] += 1
677
self.seedcount[infohash] += 1
683
return rsize # return w/o changing stats
686
if not left and peer['left']:
687
self.completed[infohash] += 1
688
self.seedcount[infohash] += 1
689
if not peer.get('nat', -1):
690
for bc in self.becache[infohash]:
695
elif left and not peer['left']:
696
self.completed[infohash] -= 1
697
self.seedcount[infohash] -= 1
698
if not peer.get('nat', -1):
699
for bc in self.becache[infohash]:
711
if gip != peer.get('given ip'):
713
peer['given ip'] = gip
714
elif peer.has_key('given ip'):
718
natted = peer.get('nat', -1)
721
l = self.becache[infohash]
724
if x[y].has_key(myid):
727
del peer['nat'] # restart NAT testing
728
if natted and natted < self.natcheck:
732
if not self.natcheck or islocal:
734
self.natcheckOK(infohash,myid,ip1,port,peer)
736
NatCheck(self.connectback_result,infohash,myid,ip1,port,
737
self.rawserver,encrypted=requirecrypto)
742
def peerlist(self, infohash, stopped, tracker, is_seed,
743
return_type, rsize, supportcrypto):
744
data = {} # return data
745
seeds = self.seedcount[infohash]
746
data['complete'] = seeds
747
data['incomplete'] = len(self.downloads[infohash]) - seeds
749
if ( self.config['allowed_controls']
750
and self.allowed[infohash].has_key('warning message') ):
751
data['warning message'] = self.allowed[infohash]['warning message']
754
data['interval'] = self.config['multitracker_reannounce_interval']
757
cache = self.cached_t.setdefault(infohash, None)
758
if ( not cache or len(cache[1]) < rsize
759
or cache[0] + self.config['min_time_between_cache_refreshes'] < clock() ):
760
bc = self.becache.setdefault(infohash,self.cache_default())
761
cache = [ clock(), bc[0][0].values() + bc[0][1].values() ]
762
self.cached_t[infohash] = cache
766
data['peers'] = cache[-rsize:]
770
data['interval'] = self.reannounce_interval
771
if stopped or not rsize: # save some bandwidth
775
bc = self.becache.setdefault(infohash,self.cache_default())
776
len_l = len(bc[2][0])
777
len_s = len(bc[2][1])
778
if not (len_l+len_s): # caches are empty!
781
l_get_size = int(float(rsize)*(len_l)/(len_l+len_s))
782
cache = self.cached.setdefault(infohash,[None,None,None])[return_type]
783
if cache and ( not cache[1]
784
or (is_seed and len(cache[1]) < rsize)
785
or len(cache[1]) < l_get_size
786
or cache[0]+self.config['min_time_between_cache_refreshes'] < self.cachetime ):
789
peers = self.downloads[infohash]
790
if self.config['compact_reqd']:
793
vv = ([],[],[],[],[])
794
for key, ip, port in self.t2tlist.harvest(infohash): # empty if disabled
795
if not peers.has_key(key):
796
cp = compact_peer_info(ip, port)
798
vv[2].append((cp,'\x00'))
799
if not self.config['compact_reqd']:
800
vv[3].append({'ip': ip, 'port': port, 'peer id': key})
801
vv[4].append({'ip': ip, 'port': port})
802
cache = [ self.cachetime,
803
bc[return_type][0].values()+vv[return_type],
804
bc[return_type][1].values() ]
807
self.cached[infohash][return_type] = cache
808
for rr in xrange(len(self.cached[infohash])):
809
if rr != return_type:
811
self.cached[infohash][rr][1].extend(vv[rr])
814
if len(cache[1]) < l_get_size:
817
peerdata.extend(cache[2])
822
peerdata = cache[2][l_get_size-rsize:]
823
del cache[2][l_get_size-rsize:]
824
rsize -= len(peerdata)
828
peerdata.extend(cache[1][-rsize:])
829
del cache[1][-rsize:]
831
data['peers'] = ''.join(peerdata)
832
elif return_type == 1:
833
data['crypto_flags'] = "0x01"*len(peerdata)
834
data['peers'] = ''.join(peerdata)
835
elif return_type == 2:
836
data['crypto_flags'] = ''.join([p[1] for p in peerdata])
837
data['peers'] = ''.join([p[0] for p in peerdata])
839
data['peers'] = peerdata
843
def get(self, connection, path, headers):
844
real_ip = connection.get_ip()
850
ip = ipv6_to_ipv4(ip)
855
if ( (self.allowed_IPs and not self.allowed_IPs.includes(ip))
856
or (self.banned_IPs and self.banned_IPs.includes(ip)) ):
857
return (400, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
858
bencode({'failure reason':
859
'your IP is not allowed on this tracker'}))
861
nip = get_forwarded_ip(headers)
862
if nip and not self.only_local_override_ip:
871
def params(key, default = None, l = paramslist):
877
(scheme, netloc, path, pars, query, fragment) = urlparse(path)
878
if self.uq_broken == 1:
879
path = path.replace('+',' ')
880
query = query.replace('+',' ')
881
path = unquote(path)[1:]
882
for s in query.split('&'):
886
paramslist.setdefault(kw, [])
887
paramslist[kw] += [unquote(s[i+1:])]
889
if path == '' or path == 'index.html':
890
return self.get_infopage()
892
return self.get_file(params('info_hash'))
893
if path == 'favicon.ico' and self.favicon is not None:
894
return (200, 'OK', {'Content-Type' : 'image/x-icon'}, self.favicon)
896
# automated access from here on
898
if path in ('scrape', 'scrape.php', 'tracker.php/scrape'):
899
return self.get_scrape(paramslist)
901
if not path in ('announce', 'announce.php', 'tracker.php/announce'):
902
return (404, 'Not Found', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'}, alas)
904
# main tracker function
906
filtered = self.Filter.check(real_ip, paramslist, headers)
908
return (400, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
909
bencode({'failure reason': filtered}))
911
infohash = params('info_hash')
913
raise ValueError, 'no info hash'
915
notallowed = self.check_allowed(infohash, paramslist)
919
event = params('event')
921
rsize = self.add_data(infohash, event, ip, paramslist)
923
except ValueError, e:
924
return (400, 'Bad Request', {'Content-Type': 'text/plain'},
925
'you sent me garbage - ' + str(e))
927
if self.aggregate_forward and not paramslist.has_key('tracker'):
928
self.aggregate_senddata(query)
930
if self.is_aggregator: # don't return peer data here
931
return (200, 'OK', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
932
bencode({'response': 'OK'}))
934
if params('compact') and ipv4:
935
if params('requirecrypto'):
937
elif params('supportcrypto'):
941
elif self.config['compact_reqd'] and ipv4:
942
return (400, 'Bad Request', {'Content-Type': 'text/plain'},
943
'your client is outdated, please upgrade')
944
elif params('no_peer_id'):
949
data = self.peerlist(infohash, event=='stopped',
950
params('tracker'), not params('left'),
951
return_type, rsize, params('supportcrypto'))
953
if paramslist.has_key('scrape'): # deprecated
954
data['scrape'] = self.scrapedata(infohash, False)
956
if self.dedicated_seed_id:
957
if params('seed_id') == self.dedicated_seed_id and params('left') == 0:
958
self.is_seeded[infohash] = True
959
if params('check_seeded') and self.is_seeded.get(infohash):
962
return (200, 'OK', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'}, bencode(data))
965
def natcheckOK(self, infohash, peerid, ip, port, peer):
966
seed = not peer['left']
967
bc = self.becache.setdefault(infohash,self.cache_default())
968
cp = compact_peer_info(ip, port)
969
reqc = peer['requirecrypto']
970
bc[2][seed][peerid] = (cp,chr(reqc))
971
if peer['supportcrypto']:
972
bc[1][seed][peerid] = cp
974
bc[0][seed][peerid] = cp
975
if not self.config['compact_reqd']:
976
bc[3][seed][peerid] = Bencached(bencode({'ip': ip, 'port': port,
978
bc[4][seed][peerid] = Bencached(bencode({'ip': ip, 'port': port}))
981
def natchecklog(self, peerid, ip, port, result):
982
year, month, day, hour, minute, second, a, b, c = localtime(time())
983
print '%s - %s [%02d/%3s/%04d:%02d:%02d:%02d] "!natcheck-%s:%i" %i 0 - -' % (
984
ip, quote(peerid), day, months[month], year, hour, minute, second,
987
def connectback_result(self, result, downloadid, peerid, ip, port):
988
record = self.downloads.get(downloadid,{}).get(peerid)
990
or (record['ip'] != ip and record.get('given ip') != ip)
991
or record['port'] != port ):
992
if self.config['log_nat_checks']:
993
self.natchecklog(peerid, ip, port, 404)
995
if self.config['log_nat_checks']:
1000
self.natchecklog(peerid, ip, port, x)
1001
if not record.has_key('nat'):
1002
record['nat'] = int(not result)
1004
self.natcheckOK(downloadid,peerid,ip,port,record)
1005
elif result and record['nat']:
1007
self.natcheckOK(downloadid,peerid,ip,port,record)
1012
def remove_from_state(self, *l):
1019
def save_state(self):
1020
self.rawserver.add_task(self.save_state, self.save_dfile_interval)
1021
h = open(self.dfile, 'wb')
1022
h.write(bencode(self.state))
1026
def parse_allowed(self):
1027
self.rawserver.add_task(self.parse_allowed, self.parse_dir_interval)
1029
if self.config['allowed_dir']:
1030
r = parsedir( self.config['allowed_dir'], self.allowed,
1031
self.allowed_dir_files, self.allowed_dir_blocked,
1033
( self.allowed, self.allowed_dir_files, self.allowed_dir_blocked,
1034
added, garbage2 ) = r
1036
self.state['allowed'] = self.allowed
1037
self.state['allowed_dir_files'] = self.allowed_dir_files
1039
self.t2tlist.parse(self.allowed)
1042
f = self.config['allowed_list']
1043
if self.allowed_list_mtime == os.path.getmtime(f):
1046
r = parsetorrentlist(f, self.allowed)
1047
(self.allowed, added, garbage2) = r
1048
self.state['allowed_list'] = self.allowed
1049
except (IOError, OSError):
1050
print '**warning** unable to read allowed torrent list'
1052
self.allowed_list_mtime = os.path.getmtime(f)
1054
for infohash in added.keys():
1055
self.downloads.setdefault(infohash, {})
1056
self.completed.setdefault(infohash, 0)
1057
self.seedcount.setdefault(infohash, 0)
1060
def read_ip_lists(self):
1061
self.rawserver.add_task(self.read_ip_lists,self.parse_dir_interval)
1063
f = self.config['allowed_ips']
1064
if f and self.allowed_ip_mtime != os.path.getmtime(f):
1065
self.allowed_IPs = IP_List()
1067
self.allowed_IPs.read_fieldlist(f)
1068
self.allowed_ip_mtime = os.path.getmtime(f)
1069
except (IOError, OSError):
1070
print '**warning** unable to read allowed_IP list'
1072
f = self.config['banned_ips']
1073
if f and self.banned_ip_mtime != os.path.getmtime(f):
1074
self.banned_IPs = IP_Range_List()
1076
self.banned_IPs.read_rangelist(f)
1077
self.banned_ip_mtime = os.path.getmtime(f)
1078
except (IOError, OSError):
1079
print '**warning** unable to read banned_IP list'
1082
def delete_peer(self, infohash, peerid):
1083
dls = self.downloads[infohash]
1085
if not peer['left']:
1086
self.seedcount[infohash] -= 1
1087
if not peer.get('nat',-1):
1088
l = self.becache[infohash]
1089
y = not peer['left']
1091
if x[y].has_key(peerid):
1093
del self.times[infohash][peerid]
1096
def expire_downloaders(self):
1097
for x in self.times.keys():
1098
for myid, t in self.times[x].items():
1099
if t < self.prevtime:
1100
self.delete_peer(x,myid)
1101
self.prevtime = clock()
1102
if (self.keep_dead != 1):
1103
for key, value in self.downloads.items():
1104
if len(value) == 0 and (
1105
self.allowed is None or not self.allowed.has_key(key) ):
1107
del self.downloads[key]
1108
del self.seedcount[key]
1109
self.rawserver.add_task(self.expire_downloaders, self.timeout_downloaders_interval)
1114
print formatDefinitions(defaults, 80)
1117
config, files = parseargs(args, defaults, 0, 0)
1118
except ValueError, e:
1119
print 'error: ' + str(e)
1120
print 'run with no arguments for parameter explanations'
1122
r = RawServer(Event(), config['timeout_check_interval'],
1123
config['socket_timeout'], ipv6_enable = config['ipv6_enabled'])
1124
t = Tracker(config, r)
1125
r.bind(config['port'], config['bind'],
1126
reuse = True, ipv6_socket_style = config['ipv6_binds_v4'])
1127
r.listen_forever(HTTPHandler(t.get, config['min_time_between_log_flushes']))
1129
print '# Shutting down: ' + isotime()
1135
r = str(int(s/1024)) + 'KiB'
1136
elif (s < 1073741824L):
1137
r = str(int(s/1048576)) + 'MiB'
1138
elif (s < 1099511627776L):
1139
r = str(int((s/1073741824.0)*100.0)/100.0) + 'GiB'
1141
r = str(int((s/1099511627776.0)*100.0)/100.0) + 'TiB'