1
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
3
# Copyright 2002 Ben Escoto <ben@emerose.org>
4
# Copyright 2007 Kenneth Loafman <kenneth@loafman.com>
6
# This file is part of duplicity.
8
# Duplicity is free software; you can redistribute it and/or modify it
9
# under the terms of the GNU General Public License as published by the
10
# Free Software Foundation; either version 2 of the License, or (at your
11
# option) any later version.
13
# Duplicity is distributed in the hope that it will be useful, but
14
# WITHOUT ANY WARRANTY; without even the implied warranty of
15
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16
# General Public License for more details.
18
# You should have received a copy of the GNU General Public License
19
# along with duplicity; if not, write to the Free Software Foundation,
20
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
22
"""Define some lazy data structures and functions acting on them"""
26
from duplicity.static import * #@UnusedWildImport
30
"""Hold static methods for the manipulation of lazy iterators"""
32
def filter(predicate, iterator): #@NoSelf
33
"""Like filter in a lazy functional programming language"""
38
def map(function, iterator): #@NoSelf
39
"""Like map in a lazy functional programming language"""
43
def foreach(function, iterator): #@NoSelf
44
"""Run function on each element in iterator"""
48
def cat(*iters): #@NoSelf
49
"""Lazily concatenate iterators"""
54
def cat2(iter_of_iters): #@NoSelf
55
"""Lazily concatenate iterators, iterated by big iterator"""
56
for iter in iter_of_iters:
60
def empty(iter): #@NoSelf
61
"""True if iterator has length 0"""
62
for i in iter: #@UnusedVariable
66
def equal(iter1, iter2, verbose = None, operator = lambda x, y: x == y): #@NoSelf
67
"""True if iterator 1 has same elements as iterator 2
69
Use equality operator, or == if it is unspecified.
77
print "End when i1 = %s" % (i1,)
79
if not operator(i1, i2):
81
print "%s not equal to %s" % (i1, i2)
88
print "End when i2 = %s" % (i2,)
91
def Or(iter): #@NoSelf
92
"""True if any element in iterator is true. Short circuiting"""
99
def And(iter): #@NoSelf
100
"""True if all elements in iterator are true. Short circuiting"""
107
def len(iter): #@NoSelf
108
"""Return length of iterator"""
113
except StopIteration:
117
def foldr(f, default, iter): #@NoSelf
118
"""foldr the "fundamental list recursion operator"?"""
121
except StopIteration:
123
return f(next, Iter.foldr(f, default, iter))
125
def foldl(f, default, iter): #@NoSelf
126
"""the fundamental list iteration operator.."""
130
except StopIteration:
132
default = f(default, next)
134
def multiplex(iter, num_of_forks, final_func = None, closing_func = None): #@NoSelf
135
"""Split a single iterater into a number of streams
137
The return val will be a list with length num_of_forks, each
138
of which will be an iterator like iter. final_func is the
139
function that will be called on each element in iter just as
140
it is being removed from the buffer. closing_func is called
141
when all the streams are finished.
144
if num_of_forks == 2 and not final_func and not closing_func:
145
im2 = IterMultiplex2(iter)
146
return (im2.yielda(), im2.yieldb())
148
final_func = lambda i: None
150
closing_func = lambda: None
152
# buffer is a list of elements that some iterators need and others
156
# buffer[forkposition[i]] is the next element yieled by iterator
157
# i. If it is -1, yield from the original iter
158
starting_forkposition = [-1] * num_of_forks
159
forkposition = starting_forkposition[:]
160
called_closing_func = [None]
162
def get_next(fork_num):
163
"""Return the next element requested by fork_num"""
164
if forkposition[fork_num] == -1:
166
buffer.insert(0, iter.next())
167
except StopIteration:
168
# call closing_func if necessary
169
if (forkposition == starting_forkposition and
170
not called_closing_func[0]):
172
called_closing_func[0] = None
174
for i in range(num_of_forks):
177
return_val = buffer[forkposition[fork_num]]
178
forkposition[fork_num] -= 1
181
if not (blen-1) in forkposition:
182
# Last position in buffer no longer needed
183
assert forkposition[fork_num] == blen-2
184
final_func(buffer[blen-1])
188
def make_iterator(fork_num):
190
yield get_next(fork_num)
192
return tuple(map(make_iterator, range(num_of_forks)))
197
class IterMultiplex2:
198
"""Multiplex an iterator into 2 parts
200
This is a special optimized case of the Iter.multiplex function,
201
used when there is no closing_func or final_func, and we only want
202
to split it into 2. By profiling, this is a time sensitive class.
205
def __init__(self, iter):
206
self.a_leading_by = 0 # How many places a is ahead of b
211
"""Return first iterator"""
212
buf, iter = self.buffer, self.iter
214
if self.a_leading_by >= 0:
215
# a is in front, add new element
216
elem = iter.next() # exception will be passed
219
# b is in front, subtract an element
221
self.a_leading_by += 1
225
"""Return second iterator"""
226
buf, iter = self.buffer, self.iter
228
if self.a_leading_by <= 0:
229
# b is in front, add new element
230
elem = iter.next() # exception will be passed
233
# a is in front, subtract an element
235
self.a_leading_by -= 1
239
class IterTreeReducer:
240
"""Tree style reducer object for iterator - stolen from rdiff-backup
242
The indicies of a RORPIter form a tree type structure. This class
243
can be used on each element of an iter in sequence and the result
244
will be as if the corresponding tree was reduced. This tries to
245
bridge the gap between the tree nature of directories, and the
246
iterator nature of the connection between hosts and the temporal
247
order in which the files are processed.
249
This will usually be used by subclassing ITRBranch below and then
250
call the initializer below with the new class.
253
def __init__(self, branch_class, branch_args):
254
"""ITR initializer"""
255
self.branch_class = branch_class
256
self.branch_args = branch_args
258
self.root_branch = branch_class(*branch_args)
259
self.branches = [self.root_branch]
261
def finish_branches(self, index):
262
"""Run Finish() on all branches index has passed
264
When we pass out of a branch, delete it and process it with
265
the parent. The innermost branches will be the last in the
266
list. Return None if we are out of the entire tree, and 1
270
branches = self.branches
272
to_be_finished = branches[-1]
273
base_index = to_be_finished.base_index
274
if base_index != index[:len(base_index)]:
275
# out of the tree, finish with to_be_finished
276
to_be_finished.call_end_proc()
280
branches[-1].branch_process(to_be_finished)
284
def add_branch(self):
285
"""Return branch of type self.branch_class, add to branch list"""
286
branch = self.branch_class(*self.branch_args)
287
self.branches.append(branch)
290
def process_w_branch(self, index, branch, args):
291
"""Run start_process on latest branch"""
292
robust.check_common_error(branch.on_error,
293
branch.start_process, args)
294
if not branch.caught_exception:
295
branch.start_successful = 1
296
branch.base_index = index
299
"""Call at end of sequence to tie everything up"""
301
to_be_finished = self.branches.pop()
302
to_be_finished.call_end_proc()
303
if not self.branches:
305
self.branches[-1].branch_process(to_be_finished)
307
def __call__(self, *args):
308
"""Process args, where args[0] is current position in iterator
310
Returns true if args successfully processed, false if index is
311
not in the current tree and thus the final result is
314
Also note below we set self.index after doing the necessary
315
start processing, in case there is a crash in the middle.
319
if self.index is None:
320
self.process_w_branch(index, self.root_branch, args)
324
if index <= self.index:
325
log.Warn(_("Warning: oldindex %s >= newindex %s") %
329
if self.finish_branches(index) is None:
330
return None # We are no longer in the main tree
331
last_branch = self.branches[-1]
332
if last_branch.start_successful:
333
if last_branch.can_fast_process(*args):
334
robust.check_common_error(last_branch.on_error,
335
last_branch.fast_process, args)
337
branch = self.add_branch()
338
self.process_w_branch(index, branch, args)
340
last_branch.log_prev_error(index)
347
"""Helper class for IterTreeReducer above
349
There are five stub functions below: start_process, end_process,
350
branch_process, fast_process, and can_fast_process. A class that
351
subclasses this one will probably fill in these functions to do
355
base_index = index = None
357
caught_exception = start_successful = None
359
def call_end_proc(self):
360
"""Runs the end_process on self, checking for errors"""
361
if self.finished or not self.start_successful:
362
self.caught_exception = 1
364
# Since all end_process does is copy over attributes, might as
365
# well run it even if we did get errors earlier.
366
robust.check_common_error(self.on_error, self.end_process)
370
def start_process(self, *args):
371
"""Do some initial processing (stub)"""
374
def end_process(self):
375
"""Do any final processing before leaving branch (stub)"""
378
def branch_process(self, branch):
379
"""Process a branch right after it is finished (stub)"""
380
assert branch.finished
383
def can_fast_process(self, *args):
384
"""True if object can be processed without new branch (stub)"""
387
def fast_process(self, *args):
388
"""Process args without new child branch (stub)"""
391
def on_error(self, exc, *args):
392
"""This is run on any exception in start/end-process"""
393
self.caught_exception = 1
394
if args and args[0] and isinstance(args[0], tuple):
395
filename = os.path.join(*args[0])
397
filename = os.path.join(*self.index)
400
log.Warn(_("Error '%s' processing %s") % (exc, filename))
402
def log_prev_error(self, index):
403
"""Call function if no pending exception"""
407
index_str = os.path.join(*index)
408
log.Warn(_("Skipping %s because of previous error") % index_str)
411
from duplicity import log
412
from duplicity import robust