~hopem/charms/trusty/cinder/ensure-apache-restart

« back to all changes in this revision

Viewing changes to hooks/charmhelpers/fetch/archiveurl.py

  • Committer: Liam Young
  • Date: 2015-01-09 16:02:39 UTC
  • mfrom: (65 cinder.next)
  • mto: This revision was merged to the branch mainline in revision 67.
  • Revision ID: liam.young@canonical.com-20150109160239-qldk423wxfno2ao3
Merged next in and resolved conflicts

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
1
import os
2
 
import urllib2
3
 
from urllib import urlretrieve
4
 
import urlparse
5
2
import hashlib
 
3
import re
 
4
 
 
5
import six
 
6
if six.PY3:
 
7
    from urllib.request import (
 
8
        build_opener, install_opener, urlopen, urlretrieve,
 
9
        HTTPPasswordMgrWithDefaultRealm, HTTPBasicAuthHandler,
 
10
    )
 
11
    from urllib.parse import urlparse, urlunparse, parse_qs
 
12
    from urllib.error import URLError
 
13
else:
 
14
    from urllib import urlretrieve
 
15
    from urllib2 import (
 
16
        build_opener, install_opener, urlopen,
 
17
        HTTPPasswordMgrWithDefaultRealm, HTTPBasicAuthHandler,
 
18
        URLError
 
19
    )
 
20
    from urlparse import urlparse, urlunparse, parse_qs
6
21
 
7
22
from charmhelpers.fetch import (
8
23
    BaseFetchHandler,
15
30
from charmhelpers.core.host import mkdir, check_hash
16
31
 
17
32
 
 
33
def splituser(host):
 
34
    '''urllib.splituser(), but six's support of this seems broken'''
 
35
    _userprog = re.compile('^(.*)@(.*)$')
 
36
    match = _userprog.match(host)
 
37
    if match:
 
38
        return match.group(1, 2)
 
39
    return None, host
 
40
 
 
41
 
 
42
def splitpasswd(user):
 
43
    '''urllib.splitpasswd(), but six's support of this is missing'''
 
44
    _passwdprog = re.compile('^([^:]*):(.*)$', re.S)
 
45
    match = _passwdprog.match(user)
 
46
    if match:
 
47
        return match.group(1, 2)
 
48
    return user, None
 
49
 
 
50
 
18
51
class ArchiveUrlFetchHandler(BaseFetchHandler):
19
52
    """
20
53
    Handler to download archive files from arbitrary URLs.
42
75
        """
43
76
        # propogate all exceptions
44
77
        # URLError, OSError, etc
45
 
        proto, netloc, path, params, query, fragment = urlparse.urlparse(source)
 
78
        proto, netloc, path, params, query, fragment = urlparse(source)
46
79
        if proto in ('http', 'https'):
47
 
            auth, barehost = urllib2.splituser(netloc)
 
80
            auth, barehost = splituser(netloc)
48
81
            if auth is not None:
49
 
                source = urlparse.urlunparse((proto, barehost, path, params, query, fragment))
50
 
                username, password = urllib2.splitpasswd(auth)
51
 
                passman = urllib2.HTTPPasswordMgrWithDefaultRealm()
 
82
                source = urlunparse((proto, barehost, path, params, query, fragment))
 
83
                username, password = splitpasswd(auth)
 
84
                passman = HTTPPasswordMgrWithDefaultRealm()
52
85
                # Realm is set to None in add_password to force the username and password
53
86
                # to be used whatever the realm
54
87
                passman.add_password(None, source, username, password)
55
 
                authhandler = urllib2.HTTPBasicAuthHandler(passman)
56
 
                opener = urllib2.build_opener(authhandler)
57
 
                urllib2.install_opener(opener)
58
 
        response = urllib2.urlopen(source)
 
88
                authhandler = HTTPBasicAuthHandler(passman)
 
89
                opener = build_opener(authhandler)
 
90
                install_opener(opener)
 
91
        response = urlopen(source)
59
92
        try:
60
93
            with open(dest, 'w') as dest_file:
61
94
                dest_file.write(response.read())
91
124
        url_parts = self.parse_url(source)
92
125
        dest_dir = os.path.join(os.environ.get('CHARM_DIR'), 'fetched')
93
126
        if not os.path.exists(dest_dir):
94
 
            mkdir(dest_dir, perms=0755)
 
127
            mkdir(dest_dir, perms=0o755)
95
128
        dld_file = os.path.join(dest_dir, os.path.basename(url_parts.path))
96
129
        try:
97
130
            self.download(source, dld_file)
98
 
        except urllib2.URLError as e:
 
131
        except URLError as e:
99
132
            raise UnhandledSource(e.reason)
100
133
        except OSError as e:
101
134
            raise UnhandledSource(e.strerror)
102
 
        options = urlparse.parse_qs(url_parts.fragment)
 
135
        options = parse_qs(url_parts.fragment)
103
136
        for key, value in options.items():
104
 
            if key in hashlib.algorithms:
 
137
            if not six.PY3:
 
138
                algorithms = hashlib.algorithms
 
139
            else:
 
140
                algorithms = hashlib.algorithms_available
 
141
            if key in algorithms:
105
142
                check_hash(dld_file, value, key)
106
143
        if checksum:
107
144
            check_hash(dld_file, checksum, hash_type)