~rmcbride/ubuntu/lucid/ubuntuone-client/fixucg

« back to all changes in this revision

Viewing changes to tests/syncdaemon/test_hashqueue.py

  • Committer: Bazaar Package Importer
  • Author(s): Rodney Dawes
  • Date: 2009-08-26 12:15:00 UTC
  • mto: (17.1.1 ubuntuone-client)
  • mto: This revision was merged to the branch mainline in revision 7.
  • Revision ID: james.westby@ubuntu.com-20090826121500-56s42txajzxvgsdv
Tags: upstream-0.93.0
ImportĀ upstreamĀ versionĀ 0.93.0

Show diffs side-by-side

added added

removed removed

Lines of Context:
72
72
        # "I see dead threads"
73
73
        self.assertFalse(hasher.isAlive())
74
74
 
75
 
 
76
 
    def test_called_back(self):
 
75
    def test_called_back_ok(self):
77
76
        '''Tests that the hasher produces correct info.'''
78
77
        # create the hasher
79
78
        mark = object()
92
91
        testfile = os.path.join(self.test_dir, "testfile")
93
92
        with open(testfile, "w") as fh:
94
93
            fh.write("foobar")
95
 
        queue.put(testfile)
 
94
        queue.put((testfile, "mdid"))
96
95
 
97
96
        def check_info(args):
98
97
            """check the info pushed by the hasher"""
99
98
            # pylint: disable-msg=W0612
100
99
            event, path, hash, crc, size, stat = args
 
100
            self.assertEqual(event, "HQ_HASH_NEW")
101
101
            # calculate what we should receive
102
102
            realh = content_hash_factory()
103
103
            realh.hash_object.update("foobar")
113
113
        # release the processor and check
114
114
        return d
115
115
 
 
116
    def test_called_back_error(self):
 
117
        '''Tests that the hasher signals error when no file.'''
 
118
        # create the hasher
 
119
        mark = object()
 
120
        queue = hash_queue.UniqueQueue()
 
121
        d = defer.Deferred()
 
122
        class Helper(object):
 
123
            '''helper class'''
 
124
            def push(self, *args):
 
125
                '''callback'''
 
126
                d.callback(args)
 
127
        receiver = Helper()
 
128
        hasher = hash_queue._Hasher(queue, mark, receiver)
 
129
        hasher.start()
 
130
 
 
131
        # send what to hash
 
132
        queue.put(("not_to_be_found", "foo"))
 
133
 
 
134
        def check_info(args):
 
135
            """check the info pushed by the hasher"""
 
136
            event, mdid = args
 
137
            self.assertEqual(event, "HQ_HASH_ERROR")
 
138
            self.assertEqual(mdid, "foo")
 
139
 
 
140
 
 
141
        d.addCallback(check_info)
 
142
        # release the processor and check
 
143
        return d
 
144
 
116
145
    def test_order(self):
117
146
        '''The hasher should return in order.'''
118
147
        # calculate what we should receive
153
182
        # send what to hash
154
183
        for i in range(10):
155
184
            tfile = os.path.join(self.test_dir, "tfile"+str(i))
156
 
            queue.put(tfile)
 
185
            queue.put((tfile, "mdid"))
157
186
        return d
158
187
 
159
188
    def test_large_content(self):
189
218
        # send what to hash
190
219
        with open(testfile, "w") as fh:
191
220
            fh.write(testinfo)
192
 
        queue.put(testfile)
 
221
        queue.put((testfile, "mdid"))
193
222
        return d
194
223
 
195
224
 
211
240
                      self._testMethodName)
212
241
        testcase.BaseTwistedTestCase.tearDown(self)
213
242
 
214
 
    def test_called_back(self):
 
243
    def test_called_back_ok(self):
215
244
        '''Tests that the hasher produces correct info.'''
216
245
        # create the hasher
217
246
        d = defer.Deferred()
228
257
        testfile = os.path.join(self.test_dir, "testfile")
229
258
        with open(testfile, "w") as fh:
230
259
            fh.write("foobar")
231
 
        hq.insert(testfile)
 
260
        hq.insert(testfile, "mdid")
232
261
 
233
262
        def check_info(args):
234
263
            """check the info pushed by the hasher"""
235
264
            # pylint: disable-msg=W0612
236
265
            event, path, hash, crc, size, stat = args
 
266
            self.assertEqual(event, "HQ_HASH_NEW")
237
267
            # calculate what we should receive
238
268
            realh = content_hash_factory()
239
269
            realh.hash_object.update("foobar")
248
278
        d.addCallback(check_info)
249
279
        return d
250
280
 
 
281
    def test_called_back_error(self):
 
282
        '''Tests that the hasher generates an error when no file.'''
 
283
        # create the hasher
 
284
        d = defer.Deferred()
 
285
        class Helper(object):
 
286
            '''helper class'''
 
287
            def push(self, *args):
 
288
                '''callback'''
 
289
                d.callback(args)
 
290
        receiver = Helper()
 
291
        hq = hash_queue.HashQueue(receiver)
 
292
        self.addCleanup(hq.shutdown)
 
293
 
 
294
        # send what to hash
 
295
        hq.insert("not_to_be_found", "foo")
 
296
 
 
297
        def check_info(args):
 
298
            """check the info pushed by the hasher"""
 
299
            event, mdid = args
 
300
            self.assertEqual(event, "HQ_HASH_ERROR")
 
301
            self.assertEqual(mdid, "foo")
 
302
 
 
303
        d.addCallback(check_info)
 
304
        return d
 
305
 
251
306
    def test_order(self):
252
307
        '''The hasher should return in order.'''
253
308
        # calculate what we should receive
284
339
        # send what to hash
285
340
        for i in range(10):
286
341
            tfile = os.path.join(self.test_dir, "tfile"+str(i))
287
 
            hq.insert(tfile)
 
342
            hq.insert(tfile, "mdid")
288
343
        return d
289
344
 
290
345
    def test_unique(self):
335
390
        # send to hash twice
336
391
        for i in range(10):
337
392
            tfile = os.path.join(self.test_dir, "tfile"+str(i))
338
 
            hq.insert(tfile)
339
 
            hq.insert(tfile)
 
393
            hq.insert(tfile, "mdid")
 
394
            hq.insert(tfile, "mdid")
340
395
        # start the hasher
341
396
        self.log.debug('Hasher started (forced)')
342
397
        hq.hasher.start()
344
399
        # the hasher is running
345
400
        for i in range(9, 10):
346
401
            tfile = os.path.join(self.test_dir, "tfile"+str(i))
347
 
            hq.insert(tfile)
 
402
            hq.insert(tfile, "mdid")
348
403
        return d
349
404
 
350
405
    def test_interrupt_current(self):
380
435
 
381
436
        # read in small chunks, so we have more iterations
382
437
        hq.hasher.chunk_size = 2**8
383
 
        hq.insert(testfile)
 
438
        hq.insert(testfile, "mdid")
384
439
        # insert it again, to cancel the first one
385
 
        reactor.callLater(0.1, hq.insert, testfile)
 
440
        reactor.callLater(0.1, hq.insert, testfile, "mdid")
386
441
        return d
387
442
    # timeout is used by trial, pylint: disable-msg=W0612
388
443
    test_interrupt_current.timeout = 5
396
451
        receiver = Helper()
397
452
        hq = hash_queue.HashQueue(receiver)
398
453
        hq.shutdown()
399
 
        self.assertRaises(RuntimeError, hq.insert, 'foo')
 
454
        self.assertRaises(RuntimeError, hq.insert, 'foo', "mdid")
400
455
 
401
456
    def test_shutdown_while_hashing(self):
402
457
        """Test that the HashQueue is shutdown ASAP while it's hashing."""
419
474
        self.addCleanup(hq.shutdown)
420
475
        # read in small chunks, so we have more iterations
421
476
        hq.hasher.chunk_size = 2**10
422
 
        hq.insert(testfile)
 
477
        hq.insert(testfile, "mdid")
423
478
        time.sleep(0.1)
424
479
        hq.shutdown()
425
480
        # block until the hash is stopped and the queue is empty
439
494
        receiver = Helper()
440
495
        hq = hash_queue.HashQueue(receiver)
441
496
        hq.shutdown()
442
 
        self.assertRaises(RuntimeError, hq.insert, '/foo/bar')
 
497
        self.assertRaises(RuntimeError, hq.insert, '/foo/bar', "mdid")
443
498
 
444
499
 
445
500
class UniqueQueueTests(TwistedTestCase):
448
503
    def test_unique_elements(self):
449
504
        """Test that the queue actually holds unique elements."""
450
505
        queue = hash_queue.UniqueQueue()
451
 
        queue.put('item1')
452
 
        queue.put('item1')
 
506
        queue.put(('item1', "mdid"))
 
507
        queue.put(('item1', "mdid"))
453
508
        self.assertEquals(1, queue.qsize())
454
509
        self.assertEquals(1, len(queue._set))
455
510
        queue.get()
456
511
        self.assertEquals(0, queue.qsize())
457
512
        self.assertEquals(0, len(queue._set))
458
 
        queue.put('item1')
459
 
        queue.put('item2')
460
 
        queue.put('item1')
461
 
        queue.put('item2')
 
513
        queue.put(('item1', "mdid"))
 
514
        queue.put(('item2', "mdid"))
 
515
        queue.put(('item1', "mdid"))
 
516
        queue.put(('item2', "mdid"))
462
517
        self.assertEquals(2, queue.qsize())
463
518
        self.assertEquals(2, len(queue._set))
464
519
        queue.get()
469
524
    def test_clear(self):
470
525
        '''test clear method'''
471
526
        queue = hash_queue.UniqueQueue()
472
 
        queue.put('item1')
473
 
        queue.put('item2')
 
527
        queue.put(('item1', "mdid"))
 
528
        queue.put(('item2', "mdid"))
474
529
        self.assertEquals(2, queue.qsize())
475
530
        # check that queue.clear actually clear the queue
476
531
        queue.clear()
477
532
        self.assertEquals(0, queue.qsize())
478
 
        queue.put('item3')
479
 
        queue.put('item4')
 
533
        queue.put(('item3', "mdid"))
 
534
        queue.put(('item4', "mdid"))
480
535
        queue.get()
481
536
        self.assertEquals(2, queue.unfinished_tasks)
482
537
        self.assertEquals(1, queue.qsize())
502
557
        d.addCallback(check)
503
558
        t = threading.Thread(target=consumer, args=(queue, d))
504
559
        t.setDaemon(True)
505
 
        queue.put('item1')
 
560
        queue.put(('item1', "mdid"))
506
561
        t.start()
507
562
        reactor.callLater(0.1, queue.clear)
508
563
        return d