1
# Copyright 2014-2015 Canonical Limited.
3
# This file is part of charm-helpers.
5
# charm-helpers is free software: you can redistribute it and/or modify
6
# it under the terms of the GNU Lesser General Public License version 3 as
7
# published by the Free Software Foundation.
9
# charm-helpers is distributed in the hope that it will be useful,
10
# but WITHOUT ANY WARRANTY; without even the implied warranty of
11
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12
# GNU Lesser General Public License for more details.
14
# You should have received a copy of the GNU Lesser General Public License
15
# along with charm-helpers. If not, see <http://www.gnu.org/licenses/>.
3
from urllib import urlretrieve
7
21
from charmhelpers.fetch import (
15
29
from charmhelpers.core.host import mkdir, check_hash
33
from urllib.request import (
34
build_opener, install_opener, urlopen, urlretrieve,
35
HTTPPasswordMgrWithDefaultRealm, HTTPBasicAuthHandler,
37
from urllib.parse import urlparse, urlunparse, parse_qs
38
from urllib.error import URLError
40
from urllib import urlretrieve
42
build_opener, install_opener, urlopen,
43
HTTPPasswordMgrWithDefaultRealm, HTTPBasicAuthHandler,
46
from urlparse import urlparse, urlunparse, parse_qs
50
'''urllib.splituser(), but six's support of this seems broken'''
51
_userprog = re.compile('^(.*)@(.*)$')
52
match = _userprog.match(host)
54
return match.group(1, 2)
58
def splitpasswd(user):
59
'''urllib.splitpasswd(), but six's support of this is missing'''
60
_passwdprog = re.compile('^([^:]*):(.*)$', re.S)
61
match = _passwdprog.match(user)
63
return match.group(1, 2)
18
67
class ArchiveUrlFetchHandler(BaseFetchHandler):
43
92
# propogate all exceptions
44
93
# URLError, OSError, etc
45
proto, netloc, path, params, query, fragment = urlparse.urlparse(source)
94
proto, netloc, path, params, query, fragment = urlparse(source)
46
95
if proto in ('http', 'https'):
47
auth, barehost = urllib2.splituser(netloc)
96
auth, barehost = splituser(netloc)
48
97
if auth is not None:
49
source = urlparse.urlunparse((proto, barehost, path, params, query, fragment))
50
username, password = urllib2.splitpasswd(auth)
51
passman = urllib2.HTTPPasswordMgrWithDefaultRealm()
98
source = urlunparse((proto, barehost, path, params, query, fragment))
99
username, password = splitpasswd(auth)
100
passman = HTTPPasswordMgrWithDefaultRealm()
52
101
# Realm is set to None in add_password to force the username and password
53
102
# to be used whatever the realm
54
103
passman.add_password(None, source, username, password)
55
authhandler = urllib2.HTTPBasicAuthHandler(passman)
56
opener = urllib2.build_opener(authhandler)
57
urllib2.install_opener(opener)
58
response = urllib2.urlopen(source)
104
authhandler = HTTPBasicAuthHandler(passman)
105
opener = build_opener(authhandler)
106
install_opener(opener)
107
response = urlopen(source)
60
109
with open(dest, 'w') as dest_file:
61
110
dest_file.write(response.read())
91
140
url_parts = self.parse_url(source)
92
141
dest_dir = os.path.join(os.environ.get('CHARM_DIR'), 'fetched')
93
142
if not os.path.exists(dest_dir):
94
mkdir(dest_dir, perms=0755)
143
mkdir(dest_dir, perms=0o755)
95
144
dld_file = os.path.join(dest_dir, os.path.basename(url_parts.path))
97
146
self.download(source, dld_file)
98
except urllib2.URLError as e:
147
except URLError as e:
99
148
raise UnhandledSource(e.reason)
100
149
except OSError as e:
101
150
raise UnhandledSource(e.strerror)
102
options = urlparse.parse_qs(url_parts.fragment)
151
options = parse_qs(url_parts.fragment)
103
152
for key, value in options.items():
104
if key in hashlib.algorithms:
154
algorithms = hashlib.algorithms
156
algorithms = hashlib.algorithms_available
157
if key in algorithms:
105
158
check_hash(dld_file, value, key)
107
160
check_hash(dld_file, checksum, hash_type)