1
# Copyright 2014-2015 Canonical Limited.
3
# This file is part of charm-helpers.
5
# charm-helpers is free software: you can redistribute it and/or modify
6
# it under the terms of the GNU Lesser General Public License version 3 as
7
# published by the Free Software Foundation.
9
# charm-helpers is distributed in the hope that it will be useful,
10
# but WITHOUT ANY WARRANTY; without even the implied warranty of
11
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12
# GNU Lesser General Public License for more details.
14
# You should have received a copy of the GNU Lesser General Public License
15
# along with charm-helpers. If not, see <http://www.gnu.org/licenses/>.
21
from charmhelpers.fetch import (
25
from charmhelpers.payload.archive import (
29
from charmhelpers.core.host import mkdir, check_hash
33
from urllib.request import (
34
build_opener, install_opener, urlopen, urlretrieve,
35
HTTPPasswordMgrWithDefaultRealm, HTTPBasicAuthHandler,
37
from urllib.parse import urlparse, urlunparse, parse_qs
38
from urllib.error import URLError
40
from urllib import urlretrieve
42
build_opener, install_opener, urlopen,
43
HTTPPasswordMgrWithDefaultRealm, HTTPBasicAuthHandler,
46
from urlparse import urlparse, urlunparse, parse_qs
50
'''urllib.splituser(), but six's support of this seems broken'''
51
_userprog = re.compile('^(.*)@(.*)$')
52
match = _userprog.match(host)
54
return match.group(1, 2)
58
def splitpasswd(user):
59
'''urllib.splitpasswd(), but six's support of this is missing'''
60
_passwdprog = re.compile('^([^:]*):(.*)$', re.S)
61
match = _passwdprog.match(user)
63
return match.group(1, 2)
67
class ArchiveUrlFetchHandler(BaseFetchHandler):
69
Handler to download archive files from arbitrary URLs.
71
Can fetch from http, https, ftp, and file URLs.
73
Can install either tarballs (.tar, .tgz, .tbz2, etc) or zip files.
75
Installs the contents of the archive in $CHARM_DIR/fetched/.
77
def can_handle(self, source):
78
url_parts = self.parse_url(source)
79
if url_parts.scheme not in ('http', 'https', 'ftp', 'file'):
80
return "Wrong source type"
81
if get_archive_handler(self.base_url(source)):
85
def download(self, source, dest):
87
Download an archive file.
89
:param str source: URL pointing to an archive file.
90
:param str dest: Local path location to download archive file to.
92
# propogate all exceptions
93
# URLError, OSError, etc
94
proto, netloc, path, params, query, fragment = urlparse(source)
95
if proto in ('http', 'https'):
96
auth, barehost = splituser(netloc)
98
source = urlunparse((proto, barehost, path, params, query, fragment))
99
username, password = splitpasswd(auth)
100
passman = HTTPPasswordMgrWithDefaultRealm()
101
# Realm is set to None in add_password to force the username and password
102
# to be used whatever the realm
103
passman.add_password(None, source, username, password)
104
authhandler = HTTPBasicAuthHandler(passman)
105
opener = build_opener(authhandler)
106
install_opener(opener)
107
response = urlopen(source)
109
with open(dest, 'w') as dest_file:
110
dest_file.write(response.read())
111
except Exception as e:
112
if os.path.isfile(dest):
116
# Mandatory file validation via Sha1 or MD5 hashing.
117
def download_and_validate(self, url, hashsum, validate="sha1"):
118
tempfile, headers = urlretrieve(url)
119
check_hash(tempfile, hashsum, validate)
122
def install(self, source, dest=None, checksum=None, hash_type='sha1'):
124
Download and install an archive file, with optional checksum validation.
126
The checksum can also be given on the `source` URL's fragment.
129
handler.install('http://example.com/file.tgz#sha1=deadbeef')
131
:param str source: URL pointing to an archive file.
132
:param str dest: Local destination path to install to. If not given,
133
installs to `$CHARM_DIR/archives/archive_file_name`.
134
:param str checksum: If given, validate the archive file after download.
135
:param str hash_type: Algorithm used to generate `checksum`.
136
Can be any hash alrgorithm supported by :mod:`hashlib`,
137
such as md5, sha1, sha256, sha512, etc.
140
url_parts = self.parse_url(source)
141
dest_dir = os.path.join(os.environ.get('CHARM_DIR'), 'fetched')
142
if not os.path.exists(dest_dir):
143
mkdir(dest_dir, perms=0o755)
144
dld_file = os.path.join(dest_dir, os.path.basename(url_parts.path))
146
self.download(source, dld_file)
147
except URLError as e:
148
raise UnhandledSource(e.reason)
150
raise UnhandledSource(e.strerror)
151
options = parse_qs(url_parts.fragment)
152
for key, value in options.items():
154
algorithms = hashlib.algorithms
156
algorithms = hashlib.algorithms_available
157
if key in algorithms:
158
check_hash(dld_file, value, key)
160
check_hash(dld_file, checksum, hash_type)
161
return extract(dld_file, dest)