1
# Written by Bram Cohen
2
# see LICENSE.txt for license information
4
from parseargs import parseargs, formatDefinitions
5
from RawServer import RawServer
6
from HTTPHandler import HTTPHandler
7
from NatCheck import NatCheck
8
from threading import Event
9
from bencode import bencode, bdecode, Bencached
10
from zurllib import urlopen, quote, unquote
11
from urlparse import urlparse
13
from os.path import exists, isfile
14
from cStringIO import StringIO
15
from time import time, gmtime, strftime
16
from random import shuffle
18
from types import StringType, IntType, LongType, ListType, DictType
19
from binascii import b2a_hex, a2b_hex, a2b_base64
21
from __init__ import version
24
('port', 80, "Port to listen on."),
25
('dfile', None, 'file to store recent downloader info in'),
26
('bind', '', 'ip to bind to locally'),
27
('socket_timeout', 15, 'timeout for closing connections'),
28
('save_dfile_interval', 5 * 60, 'seconds between saving dfile'),
29
('timeout_downloaders_interval', 45 * 60, 'seconds between expiring downloaders'),
30
('reannounce_interval', 30 * 60, 'seconds downloaders should wait between reannouncements'),
31
('response_size', 50, 'number of peers to send in an info message'),
32
('timeout_check_interval', 5,
33
'time to wait between checking if any connections have timed out'),
35
"how many times to check if a downloader is behind a NAT (0 = don't check)"),
36
('min_time_between_log_flushes', 3.0,
37
'minimum time it must have been since the last flush to do another one'),
38
('allowed_dir', '', 'only allow downloads for .torrents in this dir'),
39
('parse_allowed_interval', 15, 'minutes between reloading of allowed_dir'),
40
('show_names', 1, 'whether to display names from allowed dir'),
41
('favicon', '', 'file containing x-icon data to return when browser requests favicon.ico'),
42
('only_local_override_ip', 1, "ignore the ip GET parameter from machines which aren't on local network IPs"),
43
('logfile', '', 'file to write the tracker logs, use - for stdout (default)'),
44
('allow_get', 0, 'use with allowed_dir; adds a /file?hash={hash} url that allows users to download the torrent file'),
45
('keep_dead', 0, 'keep dead torrents after they expire (so they still show up on your /scrape and web page)'),
46
('max_give', 200, 'maximum number of peers to give with any one request'),
49
def statefiletemplate(x):
50
if type(x) != DictType:
52
for cname, cinfo in x.items():
54
for y in cinfo.values(): # The 'peers' key is a dictionary of SHA hashes (torrent ids)
55
if type(y) != DictType: # ... for the active torrents, and each is a dictionary
57
for id, info in y.items(): # ... of client ids interested in that torrent
60
if type(info) != DictType: # ... each of which is also a dictionary
61
raise ValueError # ... which has an IP, a Port, and a Bytes Left count for that client for that torrent
62
if type(info.get('ip', '')) != StringType:
64
port = info.get('port')
65
if type(port) not in (IntType, LongType) or port < 0:
67
left = info.get('left')
68
if type(left) not in (IntType, LongType) or left < 0:
70
elif cname == 'completed':
71
if (type(cinfo) != DictType): # The 'completed' key is a dictionary of SHA hashes (torrent ids)
72
raise ValueError # ... for keeping track of the total completions per torrent
73
for y in cinfo.values(): # ... each torrent has an integer value
74
if type(y) not in (IntType, LongType): # ... for the number of reported completions for that torrent
77
def parseTorrents(dir):
80
for f in os.listdir(dir):
81
if f[-8:] == '.torrent':
83
p = os.path.join(dir,f)
84
d = bdecode(open(p, 'rb').read())
85
h = sha(bencode(d['info'])).digest()
88
a[h]['name'] = i.get('name', f)
92
if i.has_key('length'):
94
elif i.has_key('files'):
96
if li.has_key('length'):
101
print "Error parsing " + f, sys.exc_info()[0]
104
alas = 'your file may exist elsewhere in the universe\nbut alas, not here\n'
106
def isotime(secs = None):
109
return strftime('%Y-%m-%d %H:%M UTC', gmtime(secs))
111
def compact_peer_info(ip, port):
112
return ''.join([chr(int(i)) for i in ip.split('.')]) + chr((port & 0xFF00) >> 8) + chr(port & 0xFF)
115
def __init__(self, config, rawserver):
116
self.response_size = config['response_size']
117
self.dfile = config['dfile']
118
self.natcheck = config['nat_check']
119
self.max_give = config['max_give']
120
self.reannounce_interval = config['reannounce_interval']
121
self.save_dfile_interval = config['save_dfile_interval']
122
self.show_names = config['show_names']
123
self.only_local_override_ip = config['only_local_override_ip']
124
favicon = config['favicon']
128
h = open(favicon, 'rb')
129
self.favicon = h.read()
132
print "**warning** specified favicon file -- %s -- does not exist." % favicon
133
self.rawserver = rawserver
139
if exists(self.dfile):
140
h = open(self.dfile, 'rb')
143
tempstate = bdecode(ds)
146
if tempstate.has_key('peers'):
147
self.state = tempstate
150
self.state['peers'] = tempstate
151
self.downloads = self.state.setdefault('peers', {})
152
self.completed = self.state.setdefault('completed', {})
153
statefiletemplate(self.state)
154
for x, dl in self.downloads.items():
156
for y, dat in dl.items():
158
if not dat.get('nat',1):
160
gip = dat.get('given ip')
161
if gip and is_valid_ipv4(gip) and (not self.only_local_override_ip or is_local_ip(ip)):
163
self.becache1.setdefault(x,{})[y] = Bencached(bencode({'ip': ip,
164
'port': dat['port'], 'peer id': y}))
165
self.becache2.setdefault(x,{})[y] = compact_peer_info(ip, dat['port'])
166
rawserver.add_task(self.save_dfile, self.save_dfile_interval)
167
self.prevtime = time()
168
self.timeout_downloaders_interval = config['timeout_downloaders_interval']
169
rawserver.add_task(self.expire_downloaders, self.timeout_downloaders_interval)
172
if (config['logfile'] != '') and (config['logfile'] != '-'):
174
self.logfile = config['logfile']
175
self.log = open(self.logfile,'a')
176
sys.stdout = self.log
177
print "# Log Started: ", isotime()
179
print "Error trying to redirect stdout to log file:", sys.exc_info()[0]
180
self.allow_get = config['allow_get']
181
if config['allowed_dir'] != '':
182
self.allowed_dir = config['allowed_dir']
183
self.parse_allowed_interval = config['parse_allowed_interval']
187
if unquote('+') != ' ':
191
self.keep_dead = config['keep_dead']
193
def get(self, connection, path, headers):
195
(scheme, netloc, path, pars, query, fragment) = urlparse(path)
196
if self.uq_broken == 1:
197
path = path.replace('+',' ')
198
query = query.replace('+',' ')
199
path = unquote(path)[1:]
201
for s in query.split('&'):
204
params[unquote(s[:i])] = unquote(s[i+1:])
205
except ValueError, e:
206
return (400, 'Bad Request', {'Content-Type': 'text/plain'},
207
'you sent me garbage - ' + str(e))
208
if path == '' or path == 'index.html':
210
s.write('<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">\n' \
211
'<html><head><title>BitTorrent download info</title>\n')
212
if self.favicon != None:
213
s.write('<link rel="shortcut icon" href="/favicon.ico" />\n')
214
s.write('</head>\n<body>\n' \
215
'<h3>BitTorrent download info</h3>\n'\
217
'<li><strong>tracker version:</strong> %s</li>\n' \
218
'<li><strong>server time:</strong> %s</li>\n' \
219
'</ul>\n' % (version, isotime()))
220
names = self.downloads.keys()
226
tt = 0 # Total transferred
228
nf = 0 # Number of files displayed
231
if self.allowed != None and self.show_names:
232
s.write('<table summary="files" border="1">\n' \
233
'<tr><th>info hash</th><th>torrent name</th><th align="right">size</th><th align="right">complete</th><th align="right">downloading</th><th align="right">downloaded</th><th align="right">transferred</th></tr>\n')
235
s.write('<table summary="files">\n' \
236
'<tr><th>info hash</th><th align="right">complete</th><th align="right">downloading</th><th align="right">downloaded</th></tr>\n')
238
l = self.downloads[name]
239
n = self.completed.get(name, 0)
243
if type(i) == DictType:
253
if self.allowed != None and self.show_names:
254
if self.allowed.has_key(name):
256
sz = self.allowed[name]['length'] # size
258
szt = sz * n # Transferred for this torrent
260
if self.allow_get == 1:
261
linkname = '<a href="/file?info_hash=' + b2a_hex(name) + '">' + self.allowed[name]['name'] + '</a>'
263
linkname = self.allowed[name]['name']
264
s.write('<tr><td><code>%s</code></td><td>%s</td><td align="right">%s</td><td align="right">%i</td><td align="right">%i</td><td align="right">%i</td><td align="right">%s</td></tr>\n' \
265
% (b2a_hex(name), linkname, size_format(sz), c, d, n, size_format(szt)))
267
s.write('<tr><td><code>%s</code></td><td align="right"><code>%i</code></td><td align="right"><code>%i</code></td><td align="right"><code>%i</code></td></tr>\n' \
268
% (b2a_hex(name), c, d, n))
270
for i in self.completed.values():
272
if self.allowed != None and self.show_names:
273
s.write('<tr><td align="right" colspan="2">%i files</td><td align="right">%s</td><td align="right">%i/%i</td><td align="right">%i/%i</td><td align="right">%i/%i</td><td align="right">%s</td></tr>\n'
274
% (nf, size_format(ts), len(uc), tc, len(ud), td, tn, ttn, size_format(tt)))
276
s.write('<tr><td align="right">%i files</td><td align="right">%i/%i</td><td align="right">%i/%i</td><td align="right">%i/%i</td></tr>\n'
277
% (nf, len(uc), tc, len(ud), td, tn, ttn))
278
s.write('</table>\n' \
280
'<li><em>info hash:</em> SHA1 hash of the "info" section of the metainfo (*.torrent)</li>\n' \
281
'<li><em>complete:</em> number of connected clients with the complete file (total: unique IPs/total connections)</li>\n' \
282
'<li><em>downloading:</em> number of connected clients still downloading (total: unique IPs/total connections)</li>\n' \
283
'<li><em>downloaded:</em> reported complete downloads (total: current/all)</li>\n' \
284
'<li><em>transferred:</em> torrent size * total downloaded (does not include partial transfers)</li>\n' \
287
s.write('<p>not tracking any files yet...</p>\n')
288
s.write('</body>\n' \
290
return (200, 'OK', {'Content-Type': 'text/html; charset=iso-8859-1'}, s.getvalue())
291
elif path == 'scrape':
294
if params.has_key('info_hash'):
295
if self.downloads.has_key(params['info_hash']):
296
names = [ params['info_hash'] ]
297
# else return nothing
299
names = self.downloads.keys()
302
l = self.downloads[name]
303
n = self.completed.get(name, 0)
304
c = len([1 for i in l.values() if type(i) == DictType and i['left'] == 0])
306
fs[name] = {'complete': c, 'incomplete': d, 'downloaded': n}
307
if (self.allowed is not None) and self.allowed.has_key(name) and self.show_names:
308
fs[name]['name'] = self.allowed[name]['name']
310
return (200, 'OK', {'Content-Type': 'text/plain'}, bencode(r))
311
elif (path == 'file') and (self.allow_get == 1) and params.has_key('info_hash') and self.allowed.has_key(a2b_hex(params['info_hash'])):
312
hash = a2b_hex(params['info_hash'])
313
fname = self.allowed[hash]['file']
314
fpath = self.allowed[hash]['path']
315
return (200, 'OK', {'Content-Type': 'application/x-bittorrent', 'Content-Disposition': 'attachment; filename=' + fname}, open(fpath, 'rb').read())
316
elif path == 'favicon.ico' and self.favicon != None:
317
return (200, 'OK', {'Content-Type' : 'image/x-icon'}, self.favicon)
318
if path != 'announce':
319
return (404, 'Not Found', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'}, alas)
321
if not params.has_key('info_hash'):
322
raise ValueError, 'no info hash'
323
if params.has_key('ip') and not is_valid_ipv4(params['ip']):
324
raise ValueError('DNS name or invalid IP address given for IP')
325
infohash = params['info_hash']
326
if self.allowed != None:
327
if not self.allowed.has_key(infohash):
328
return (200, 'OK', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'}, bencode({'failure reason':
329
'Requested download is not authorized for use with this tracker.'}))
330
ip = connection.get_ip()
332
if params.has_key('ip') and is_valid_ipv4(params['ip']) and (
333
not self.only_local_override_ip or is_local_ip(ip)):
335
if params.has_key('event') and params['event'] not in ['started', 'completed', 'stopped']:
336
raise ValueError, 'invalid event'
337
port = long(params.get('port', ''))
338
uploaded = long(params.get('uploaded', ''))
339
downloaded = long(params.get('downloaded', ''))
340
left = long(params.get('left', ''))
341
myid = params.get('peer_id', '')
343
raise ValueError, 'id not of length 20'
344
rsize = self.response_size
345
if params.has_key('numwant'):
346
rsize = min(long(params['numwant']), self.max_give)
347
except ValueError, e:
348
return (400, 'Bad Request', {'Content-Type': 'text/plain'},
349
'you sent me garbage - ' + str(e))
350
peers = self.downloads.setdefault(infohash, {})
351
self.completed.setdefault(infohash, 0)
352
ts = self.times.setdefault(infohash, {})
354
if peers.has_key(myid):
356
if myinfo.has_key('key'):
357
if params.get('key') != myinfo['key']:
358
return (200, 'OK', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
359
bencode({'failure reason': 'key did not match key supplied earlier'}))
361
elif myinfo['ip'] == ip:
365
if params.get('event', '') != 'stopped' and confirm:
367
if not peers.has_key(myid):
368
peers[myid] = {'ip': ip, 'port': port, 'left': left}
369
if params.has_key('key'):
370
peers[myid]['key'] = params['key']
371
if params.has_key('ip') and is_valid_ipv4(params['ip']):
372
peers[myid]['given ip'] = params['ip']
376
if not self.natcheck or ip_override:
377
self.becache1.setdefault(infohash,{})[myid] = Bencached(bencode({'ip': mip, 'port': port, 'peer id': myid}))
378
self.becache2.setdefault(infohash,{})[myid] = compact_peer_info(mip, port)
380
peers[myid]['left'] = left
381
peers[myid]['ip'] = ip
382
if params.get('event', '') == 'completed':
383
self.completed[infohash] = 1 + self.completed[infohash]
385
peers[myid]['nat'] = 2**30
386
elif self.natcheck and not ip_override:
387
to_nat = peers[myid].get('nat', -1)
388
if to_nat and to_nat < self.natcheck:
389
NatCheck(self.connectback_result, infohash, myid, ip, port, self.rawserver)
391
peers[myid]['nat'] = 0
393
if peers.has_key(myid):
394
if self.becache1[infohash].has_key(myid):
395
del self.becache1[infohash][myid]
396
del self.becache2[infohash][myid]
399
data = {'interval': self.reannounce_interval}
400
if params.get('compact', 0):
404
cache = self.cache2.setdefault(infohash, [])
405
if len(cache) < rsize:
407
cache.extend(self.becache2.setdefault(infohash, {}).values())
409
del self.cache1.get(infohash, [])[:]
410
data['peers'] = ''.join(cache[-rsize:])
416
cache = self.cache1.setdefault(infohash, [])
417
if len(cache) < rsize:
419
cache.extend(self.becache1.setdefault(infohash, {}).values())
421
del self.cache2.get(infohash, [])[:]
422
data['peers'] = cache[-rsize:]
424
connection.answer((200, 'OK', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'}, bencode(data)))
426
def connectback_result(self, result, downloadid, peerid, ip, port):
427
record = self.downloads.get(downloadid, {}).get(peerid)
428
if record is None or record['ip'] != ip or record['port'] != port:
430
if not record.has_key('nat'):
431
record['nat'] = int(not result)
438
self.becache1.setdefault(downloadid,{})[peerid] = Bencached(bencode({'ip': ip, 'port': port, 'peer id': peerid}))
439
self.becache2.setdefault(downloadid,{})[peerid] = compact_peer_info(ip, port)
441
def save_dfile(self):
442
self.rawserver.add_task(self.save_dfile, self.save_dfile_interval)
443
h = open(self.dfile, 'wb')
444
h.write(bencode(self.state))
447
def parse_allowed(self):
448
self.rawserver.add_task(self.parse_allowed, self.parse_allowed_interval * 60)
449
self.allowed = parseTorrents(self.allowed_dir)
451
def expire_downloaders(self):
452
for x in self.times.keys():
453
for myid, t in self.times[x].items():
454
if t < self.prevtime:
455
if self.becache1.get(x, {}).has_key(myid):
456
del self.becache1[x][myid]
457
del self.becache2[x][myid]
458
del self.times[x][myid]
459
del self.downloads[x][myid]
460
self.prevtime = time()
461
if (self.keep_dead != 1):
462
for key, value in self.downloads.items():
465
del self.downloads[key]
466
self.rawserver.add_task(self.expire_downloaders, self.timeout_downloaders_interval)
468
def is_valid_ipv4(ip):
470
x = compact_peer_info(ip, 0)
473
except (ValueError, IndexError):
479
v = [long(x) for x in ip.split('.')]
480
if v[0] == 10 or v[0] == 127 or v[:2] in ([192, 168], [169, 254]):
482
if v[0] == 172 and v[1] >= 16 and v[1] <= 31:
489
print formatDefinitions(defaults, 80)
492
config, files = parseargs(args, defaults, 0, 0)
493
except ValueError, e:
494
print 'error: ' + str(e)
495
print 'run with no arguments for parameter explanations'
497
r = RawServer(Event(), config['timeout_check_interval'], config['socket_timeout'])
498
t = Tracker(config, r)
499
r.bind(config['port'], config['bind'], True)
500
r.listen_forever(HTTPHandler(t.get, config['min_time_between_log_flushes']))
502
print '# Shutting down: ' + isotime()
508
r = str(int(s/1024)) + 'KiB'
509
elif (s < 1073741824l):
510
r = str(int(s/1048576)) + 'MiB'
511
elif (s < 1099511627776l):
512
r = str(int((s/1073741824.0)*100.0)/100.0) + 'GiB'
514
r = str(int((s/1099511627776.0)*100.0)/100.0) + 'TiB'