7
from urllib.request import (
8
build_opener, install_opener, urlopen, urlretrieve,
9
HTTPPasswordMgrWithDefaultRealm, HTTPBasicAuthHandler,
11
from urllib.parse import urlparse, urlunparse, parse_qs
12
from urllib.error import URLError
14
from urllib import urlretrieve
16
build_opener, install_opener, urlopen,
17
HTTPPasswordMgrWithDefaultRealm, HTTPBasicAuthHandler,
20
from urlparse import urlparse, urlunparse, parse_qs
5
22
from charmhelpers.fetch import (
10
27
get_archive_handler,
13
from charmhelpers.core.host import mkdir
30
from charmhelpers.core.host import mkdir, check_hash
34
'''urllib.splituser(), but six's support of this seems broken'''
35
_userprog = re.compile('^(.*)@(.*)$')
36
match = _userprog.match(host)
38
return match.group(1, 2)
42
def splitpasswd(user):
43
'''urllib.splitpasswd(), but six's support of this is missing'''
44
_passwdprog = re.compile('^([^:]*):(.*)$', re.S)
45
match = _passwdprog.match(user)
47
return match.group(1, 2)
16
51
class ArchiveUrlFetchHandler(BaseFetchHandler):
17
"""Handler for archives via generic URLs"""
53
Handler to download archive files from arbitrary URLs.
55
Can fetch from http, https, ftp, and file URLs.
57
Can install either tarballs (.tar, .tgz, .tbz2, etc) or zip files.
59
Installs the contents of the archive in $CHARM_DIR/fetched/.
18
61
def can_handle(self, source):
19
62
url_parts = self.parse_url(source)
20
63
if url_parts.scheme not in ('http', 'https', 'ftp', 'file'):
26
69
def download(self, source, dest):
71
Download an archive file.
73
:param str source: URL pointing to an archive file.
74
:param str dest: Local path location to download archive file to.
27
76
# propogate all exceptions
28
77
# URLError, OSError, etc
29
proto, netloc, path, params, query, fragment = urlparse.urlparse(source)
78
proto, netloc, path, params, query, fragment = urlparse(source)
30
79
if proto in ('http', 'https'):
31
auth, barehost = urllib2.splituser(netloc)
80
auth, barehost = splituser(netloc)
32
81
if auth is not None:
33
source = urlparse.urlunparse((proto, barehost, path, params, query, fragment))
34
username, password = urllib2.splitpasswd(auth)
35
passman = urllib2.HTTPPasswordMgrWithDefaultRealm()
82
source = urlunparse((proto, barehost, path, params, query, fragment))
83
username, password = splitpasswd(auth)
84
passman = HTTPPasswordMgrWithDefaultRealm()
36
85
# Realm is set to None in add_password to force the username and password
37
86
# to be used whatever the realm
38
87
passman.add_password(None, source, username, password)
39
authhandler = urllib2.HTTPBasicAuthHandler(passman)
40
opener = urllib2.build_opener(authhandler)
41
urllib2.install_opener(opener)
42
response = urllib2.urlopen(source)
88
authhandler = HTTPBasicAuthHandler(passman)
89
opener = build_opener(authhandler)
90
install_opener(opener)
91
response = urlopen(source)
44
93
with open(dest, 'w') as dest_file:
45
94
dest_file.write(response.read())
51
def install(self, source):
100
# Mandatory file validation via Sha1 or MD5 hashing.
101
def download_and_validate(self, url, hashsum, validate="sha1"):
102
tempfile, headers = urlretrieve(url)
103
check_hash(tempfile, hashsum, validate)
106
def install(self, source, dest=None, checksum=None, hash_type='sha1'):
108
Download and install an archive file, with optional checksum validation.
110
The checksum can also be given on the `source` URL's fragment.
113
handler.install('http://example.com/file.tgz#sha1=deadbeef')
115
:param str source: URL pointing to an archive file.
116
:param str dest: Local destination path to install to. If not given,
117
installs to `$CHARM_DIR/archives/archive_file_name`.
118
:param str checksum: If given, validate the archive file after download.
119
:param str hash_type: Algorithm used to generate `checksum`.
120
Can be any hash alrgorithm supported by :mod:`hashlib`,
121
such as md5, sha1, sha256, sha512, etc.
52
124
url_parts = self.parse_url(source)
53
125
dest_dir = os.path.join(os.environ.get('CHARM_DIR'), 'fetched')
54
126
if not os.path.exists(dest_dir):
55
mkdir(dest_dir, perms=0755)
127
mkdir(dest_dir, perms=0o755)
56
128
dld_file = os.path.join(dest_dir, os.path.basename(url_parts.path))
58
130
self.download(source, dld_file)
59
except urllib2.URLError as e:
131
except URLError as e:
60
132
raise UnhandledSource(e.reason)
61
133
except OSError as e:
62
134
raise UnhandledSource(e.strerror)
63
return extract(dld_file)
135
options = parse_qs(url_parts.fragment)
136
for key, value in options.items():
138
algorithms = hashlib.algorithms
140
algorithms = hashlib.algorithms_available
141
if key in algorithms:
142
check_hash(dld_file, value, key)
144
check_hash(dld_file, checksum, hash_type)
145
return extract(dld_file, dest)