1
# Written by Bram Cohen
2
# multitracker extensions by John Hoffman
3
# see LICENSE.txt for license information
5
from os.path import getsize, split, join, abspath, isdir
9
from string import strip
10
from BitTornado.bencode import bencode
11
from btformats import check_info
12
from threading import Event
14
from traceback import print_exc
16
from sys import getfilesystemencoding
17
ENCODING = getfilesystemencoding()
19
from sys import getdefaultencoding
20
ENCODING = getdefaultencoding()
24
'a list of announce URLs - explained below'),
26
'a list of http seed URLs - explained below'),
27
('piece_size_pow2', 0,
28
"which power of 2 to set the piece size to (0 = automatic)"),
30
"optional human-readable comment to put in .torrent"),
31
('filesystem_encoding', '',
32
"optional specification for filesystem encoding " +
33
"(set automatically in recent Python versions)"),
35
"optional target file for the torrent")
38
default_piece_len_exp = 18
40
ignore = ['core', 'CVS']
42
def print_announcelist_details():
43
print (' announce_list = optional list of redundant/backup tracker URLs, in the format:')
44
print (' url[,url...][|url[,url...]...]')
45
print (' where URLs separated by commas are all tried first')
46
print (' before the next group of URLs separated by the pipe is checked.')
47
print (" If none is given, it is assumed you don't want one in the metafile.")
48
print (' If announce_list is given, clients which support it')
49
print (' will ignore the <announce> value.')
51
print (' http://tracker1.com|http://tracker2.com|http://tracker3.com')
52
print (' (tries trackers 1-3 in order)')
53
print (' http://tracker1.com,http://tracker2.com,http://tracker3.com')
54
print (' (tries trackers 1-3 in a randomly selected order)')
55
print (' http://tracker1.com|http://backup1.com,http://backup2.com')
56
print (' (tries tracker 1 first, then tries between the 2 backups randomly)')
58
print (' httpseeds = optional list of http-seed URLs, in the format:')
59
print (' url[|url...]')
61
def make_meta_file(file, url, params = {}, flag = Event(),
62
progress = lambda x: None, progress_percent = 1):
63
if params.has_key('piece_size_pow2'):
64
piece_len_exp = params['piece_size_pow2']
66
piece_len_exp = default_piece_len_exp
67
if params.has_key('target') and params['target'] != '':
74
f = join(a, b + '.torrent')
76
if piece_len_exp == 0: # automatic
78
if size > 8L*1024*1024*1024: # > 8 gig =
79
piece_len_exp = 21 # 2 meg pieces
80
elif size > 2*1024*1024*1024: # > 2 gig =
81
piece_len_exp = 20 # 1 meg pieces
82
elif size > 512*1024*1024: # > 512M =
83
piece_len_exp = 19 # 512K pieces
84
elif size > 64*1024*1024: # > 64M =
85
piece_len_exp = 18 # 256K pieces
86
elif size > 16*1024*1024: # > 16M =
87
piece_len_exp = 17 # 128K pieces
88
elif size > 4*1024*1024: # > 4M =
89
piece_len_exp = 16 # 64K pieces
91
piece_len_exp = 15 # 32K pieces
92
piece_length = 2 ** piece_len_exp
95
if params.has_key('filesystem_encoding'):
96
encoding = params['filesystem_encoding']
102
info = makeinfo(file, piece_length, encoding, flag, progress, progress_percent)
107
data = {'info': info, 'announce': strip(url), 'creation date': long(time())}
109
if params.has_key('comment') and params['comment']:
110
data['comment'] = params['comment']
112
if params.has_key('real_announce_list'): # shortcut for progs calling in from outside
113
data['announce-list'] = params['real_announce_list']
114
elif params.has_key('announce_list') and params['announce_list']:
116
for tier in params['announce_list'].split('|'):
117
l.append(tier.split(','))
118
data['announce-list'] = l
120
if params.has_key('real_httpseeds'): # shortcut for progs calling in from outside
121
data['httpseeds'] = params['real_httpseeds']
122
elif params.has_key('httpseeds') and params['httpseeds']:
123
data['httpseeds'] = params['httpseeds'].split('|')
125
h.write(bencode(data))
132
for s in subfiles(abspath(file)):
133
total += getsize(s[1])
137
def uniconvertl(l, e):
141
r.append(uniconvert(s, e))
143
raise UnicodeError('bad filename: '+join(*l))
146
def uniconvert(s, e):
148
if s.__class__.__name__ != 'unicode':
151
raise UnicodeError('bad filename: '+s)
152
return s.encode('utf-8')
154
def makeinfo(file, piece_length, encoding, flag, progress, progress_percent=1):
157
subs = subfiles(file)
166
totalsize += getsize(f)
171
fs.append({'length': size, 'path': uniconvertl(p, encoding)})
174
a = min(size - pos, piece_length - done)
182
if done == piece_length:
183
pieces.append(sh.digest())
187
progress(totalhashed / totalsize)
192
pieces.append(sh.digest())
193
return {'pieces': ''.join(pieces),
194
'piece length': piece_length, 'files': fs,
195
'name': uniconvert(split(file)[1], encoding) }
202
x = h.read(min(piece_length, size - p))
205
pieces.append(sha(x).digest())
210
progress(float(p) / size)
212
progress(min(piece_length, size - p))
214
return {'pieces': ''.join(pieces),
215
'piece length': piece_length, 'length': size,
216
'name': uniconvert(split(file)[1], encoding) }
221
while len(stack) > 0:
225
if s not in ignore and s[:1] != '.':
226
stack.append((copy(p) + [s], join(n, s)))
232
def completedir(dir, url, params = {}, flag = Event(),
233
vc = lambda x: None, fc = lambda x: None):
237
if params.has_key('target'):
238
target = params['target']
244
if f[-len(ext):] != ext and (f + ext) not in files:
245
togen.append(join(dir, f))
252
def callback(x, subtotal = subtotal, total = total, vc = vc):
254
vc(float(subtotal[0]) / total)
259
if t not in ignore and t[0] != '.':
261
params['target'] = join(target,t+ext)
262
make_meta_file(i, url, params, flag, progress = callback, progress_percent = 0)