~free.ekanayaka/landscape-client/karmic-1.5.4-0ubuntu0.9.10.0

« back to all changes in this revision

Viewing changes to landscape/lib/tests/test_fetch.py

  • Committer: Bazaar Package Importer
  • Author(s): Free Ekanayaka
  • Date: 2009-12-16 10:50:05 UTC
  • mfrom: (1.2.2 upstream)
  • Revision ID: james.westby@ubuntu.com-20091216105005-bmki8i2of1dmcdkc
Tags: 1.4.0-0ubuntu0.9.10.0
* New upstream release (LP: #497351)

* Bug fixes:
  - Fix landscape daemons fail to start when too many groups are
    available (LP: #456124)
  - Fix landscape programs wake up far too much. (LP: #340843)
  - Fix Package manager fails with 'no such table: task' (LP #465846)
  - Fix test suite leaving temporary files around (LP #476418)
  - Fix the 1hr long wait for user data to be uploaded following a
    resynchronisation (LP #369000)

* Add support for Ubuntu release upgrades:
  - Add helper function to fetch many files at once (LP: #450629)
  - Handle release-upgrade messages in the packagemanager
    plugin (LP: #455217)
  - Add a release-upgrader task handler (LP: #462543)
  - Support upgrade-tool environment variables (LP: #463321)

* Add initial support for Smart package locking:
  - Detect and report changes about Smart package locks (#488108)

* Packaging fixes:
  - Turn unnecessary Pre-Depends on python-gobject into a regular Depends
  - If it's empty, remove /etc/landscape upon purge

Show diffs side-by-side

added added

removed removed

Lines of Context:
 
1
import os
 
2
 
1
3
import pycurl
2
4
 
3
 
from landscape.lib.fetch import fetch, fetch_async, HTTPCodeError, PyCurlError
 
5
from twisted.internet.defer import FirstError
 
6
 
 
7
from landscape.lib.fetch import (
 
8
    fetch, fetch_async, fetch_many_async, fetch_to_files,
 
9
    url_to_filename, HTTPCodeError, PyCurlError)
4
10
from landscape.tests.helpers import LandscapeTest
5
11
 
6
12
 
19
25
        raise RuntimeError("Stub doesn't know about %d info" % what)
20
26
 
21
27
    def setopt(self, option, value):
 
28
        if isinstance(value, unicode):
 
29
            raise AssertionError("setopt() doesn't accept unicode values")
22
30
        if self.performed:
23
31
            raise AssertionError("setopt() can't be called after perform()")
24
32
        self.options[option] = value
32
40
        self.performed = True
33
41
 
34
42
 
 
43
class CurlManyStub(object):
 
44
 
 
45
    def __init__(self, url_results):
 
46
        self.curls = {}
 
47
        for url in url_results:
 
48
            result = url_results[url]
 
49
            if isinstance(result, str):
 
50
                body = result
 
51
                http_code = 200
 
52
            else:
 
53
                body = result[0]
 
54
                http_code = result[1]
 
55
            self.curls[url] = CurlStub(body, http_code=http_code)
 
56
        self.current = None
 
57
 
 
58
    def getinfo(self, what):
 
59
        if not self.current.performed:
 
60
            raise AssertionError("getinfo() can't be called before perform()")
 
61
        result = self.current.getinfo(what)
 
62
        self.current = None
 
63
        return result
 
64
 
 
65
    def setopt(self, option, value):
 
66
        if option is pycurl.URL:
 
67
            self.current = self.curls[value]
 
68
        self.current.setopt(option, value)
 
69
 
 
70
    def perform(self):
 
71
        self.current.perform()
 
72
 
 
73
 
35
74
class Any(object):
 
75
 
36
76
    def __eq__(self, other):
37
77
        return True
38
78
 
109
149
 
110
150
    def test_headers(self):
111
151
        curl = CurlStub("result")
112
 
        result = fetch("http://example.com", headers={"a":"1", "b":"2"},
113
 
                       curl=curl)
 
152
        result = fetch("http://example.com",
 
153
                       headers={"a":"1", "b":"2"}, curl=curl)
114
154
        self.assertEquals(result, "result")
115
155
        self.assertEquals(curl.options,
116
156
                          {pycurl.URL: "http://example.com",
125
165
 
126
166
    def test_timeouts(self):
127
167
        curl = CurlStub("result")
128
 
        result = fetch("http://example.com", connect_timeout=5, total_timeout=30,
129
 
                       curl=curl)
 
168
        result = fetch("http://example.com", connect_timeout=5,
 
169
                       total_timeout=30, curl=curl)
130
170
        self.assertEquals(result, "result")
131
171
        self.assertEquals(curl.options,
132
172
                          {pycurl.URL: "http://example.com",
138
178
                           pycurl.NOSIGNAL: 1,
139
179
                           pycurl.WRITEFUNCTION: Any()})
140
180
 
 
181
    def test_unicode(self):
 
182
        """
 
183
        The L{fetch} function converts the C{url} parameter to C{str} before
 
184
        passing it to curl.
 
185
        """
 
186
        curl = CurlStub("result")
 
187
        result = fetch(u"http://example.com", curl=curl)
 
188
        self.assertEquals(result, "result")
 
189
        self.assertEquals(curl.options[pycurl.URL], "http://example.com")
 
190
        self.assertTrue(isinstance(curl.options[pycurl.URL], str))
 
191
 
141
192
    def test_non_200_result(self):
142
193
        curl = CurlStub("result", http_code=404)
143
194
        try:
216
267
        d.addErrback(got_error)
217
268
        self.assertFailure(d, HTTPCodeError)
218
269
        return d
 
270
 
 
271
    def test_fetch_many_async(self):
 
272
        """
 
273
        L{fetch_many_async} retrieves multiple URLs, and returns a
 
274
        C{DeferredList} firing its callback when all the URLs have
 
275
        successfully completed.
 
276
        """
 
277
        url_results = {"http://good/": "good",
 
278
                       "http://better/": "better"}
 
279
 
 
280
        def callback(result, url):
 
281
            self.assertIn(result, url_results.values())
 
282
            self.assertIn(url, url_results)
 
283
            url_results.pop(url)
 
284
 
 
285
        def errback(failure, url):
 
286
            self.fail()
 
287
 
 
288
        curl = CurlManyStub(url_results)
 
289
        d = fetch_many_async(url_results.keys(), callback=callback,
 
290
                             errback=errback, curl=curl)
 
291
 
 
292
        def completed(result):
 
293
            self.assertEquals(url_results, {})
 
294
 
 
295
        return d.addCallback(completed)
 
296
 
 
297
    def test_fetch_many_async_with_error(self):
 
298
        """
 
299
        L{fetch_many_async} aborts as soon as one URL fails.
 
300
        """
 
301
        url_results = {"http://right/": "right",
 
302
                       "http://wrong/": ("wrong", 501),
 
303
                       "http://impossilbe/": "impossible"}
 
304
        failed_urls = []
 
305
 
 
306
        def errback(failure, url):
 
307
            failed_urls.append(url)
 
308
            self.assertEquals(failure.value.body, "wrong")
 
309
            self.assertEquals(failure.value.http_code, 501)
 
310
            return failure
 
311
 
 
312
        curl = CurlManyStub(url_results)
 
313
        result = fetch_many_async(url_results.keys(), callback=None,
 
314
                                  errback=errback, curl=curl)
 
315
 
 
316
        def check_failure(failure):
 
317
            self.assertTrue(isinstance(failure.subFailure.value,
 
318
                                       HTTPCodeError))
 
319
            self.assertEquals(failed_urls, ["http://wrong/"])
 
320
 
 
321
        self.assertFailure(result, FirstError)
 
322
        return result.addCallback(check_failure)
 
323
 
 
324
    def test_url_to_filename(self):
 
325
        """
 
326
        L{url_to_filename} extracts the filename part of an URL, optionally
 
327
        prepending a directory path to it.
 
328
        """
 
329
        self.assertEquals(url_to_filename("http://some/file"), "file")
 
330
        self.assertEquals(url_to_filename("http://some/file/"), "file")
 
331
        self.assertEquals(url_to_filename("http://some/file", directory="dir"),
 
332
                          os.path.join("dir", "file"))
 
333
 
 
334
    def test_fetch_to_files(self):
 
335
        """
 
336
        L{fetch_to_files} fetches a list of URLs and save their content
 
337
        in the given directory.
 
338
        """
 
339
        url_results = {"http://good/file": "file",
 
340
                       "http://even/better-file": "better-file"}
 
341
        directory = self.makeDir()
 
342
        curl = CurlManyStub(url_results)
 
343
 
 
344
        result = fetch_to_files(url_results.keys(), directory, curl=curl)
 
345
 
 
346
        def check_files(ignored):
 
347
            for result in url_results.itervalues():
 
348
                fd = open(os.path.join(directory, result))
 
349
                self.assertEquals(fd.read(), result)
 
350
                fd.close()
 
351
 
 
352
        result.addCallback(check_files)
 
353
        return result
 
354
 
 
355
    def test_fetch_to_files_with_trailing_slash(self):
 
356
        """
 
357
        L{fetch_to_files} discards trailing slashes from the final component
 
358
        of the given URLs when saving them as files.
 
359
        """
 
360
        directory = self.makeDir()
 
361
        curl = CurlStub("data")
 
362
 
 
363
        result = fetch_to_files(["http:///with/slash/"], directory, curl=curl)
 
364
 
 
365
        def check_files(ignored):
 
366
            os.path.exists(os.path.join(directory, "slash"))
 
367
 
 
368
        result.addCallback(check_files)
 
369
        return result
 
370
 
 
371
    def test_fetch_to_files_with_errors(self):
 
372
        """
 
373
        L{fetch_to_files} optionally logs an error message as soon as one URL
 
374
        fails, and aborts.
 
375
        """
 
376
        url_results = {"http://im/right": "right",
 
377
                       "http://im/wrong": ("wrong", 404),
 
378
                       "http://im/not": "not"}
 
379
        directory = self.makeDir()
 
380
        messages = []
 
381
        logger = lambda message: messages.append(message)
 
382
        curl = CurlManyStub(url_results)
 
383
        failed_urls = []
 
384
 
 
385
        result = fetch_to_files(url_results.keys(), directory, logger=logger,
 
386
                                curl=curl)
 
387
 
 
388
        def check_messages(failure):
 
389
            self.assertEquals(len(messages), 1)
 
390
            self.assertEquals(messages[0],
 
391
                              "Couldn't fetch file from http://im/wrong "
 
392
                              "(Server returned HTTP code 404)")
 
393
            messages.pop()
 
394
 
 
395
        def check_files(ignored):
 
396
            self.assertEquals(messages, [])
 
397
            self.assertFalse(os.path.exists(os.path.join(directory, "wrong")))
 
398
 
 
399
        result.addErrback(check_messages)
 
400
        result.addCallback(check_files)
 
401
        return result
 
402
 
 
403
    def test_fetch_to_files_with_non_existing_directory(self):
 
404
        """
 
405
        The deferred list returned by L{fetch_to_files} results in a failure
 
406
        if the destination directory doesn't exist.
 
407
        """
 
408
        url_results = {"http://im/right": "right"}
 
409
        directory = "i/dont/exist/"
 
410
        curl = CurlManyStub(url_results)
 
411
 
 
412
        result = fetch_to_files(url_results.keys(), directory, curl=curl)
 
413
 
 
414
        def check_error(failure):
 
415
            error = str(failure.value.subFailure.value)
 
416
            self.assertEquals(error, "[Errno 2] No such file or directory: "
 
417
                              "'i/dont/exist/right'")
 
418
            self.assertFalse(os.path.exists(os.path.join(directory, "right")))
 
419
 
 
420
        result.addErrback(check_error)
 
421
        return result