3
from urllib import urlretrieve
7
from urllib.request import (
8
build_opener, install_opener, urlopen, urlretrieve,
9
HTTPPasswordMgrWithDefaultRealm, HTTPBasicAuthHandler,
11
from urllib.parse import urlparse, urlunparse, parse_qs
12
from urllib.error import URLError
14
from urllib import urlretrieve
16
build_opener, install_opener, urlopen,
17
HTTPPasswordMgrWithDefaultRealm, HTTPBasicAuthHandler,
20
from urlparse import urlparse, urlunparse, parse_qs
7
22
from charmhelpers.fetch import (
15
30
from charmhelpers.core.host import mkdir, check_hash
34
'''urllib.splituser(), but six's support of this seems broken'''
35
_userprog = re.compile('^(.*)@(.*)$')
36
match = _userprog.match(host)
38
return match.group(1, 2)
42
def splitpasswd(user):
43
'''urllib.splitpasswd(), but six's support of this is missing'''
44
_passwdprog = re.compile('^([^:]*):(.*)$', re.S)
45
match = _passwdprog.match(user)
47
return match.group(1, 2)
18
51
class ArchiveUrlFetchHandler(BaseFetchHandler):
20
53
Handler to download archive files from arbitrary URLs.
43
76
# propogate all exceptions
44
77
# URLError, OSError, etc
45
proto, netloc, path, params, query, fragment = urlparse.urlparse(source)
78
proto, netloc, path, params, query, fragment = urlparse(source)
46
79
if proto in ('http', 'https'):
47
auth, barehost = urllib2.splituser(netloc)
80
auth, barehost = splituser(netloc)
48
81
if auth is not None:
49
source = urlparse.urlunparse((proto, barehost, path, params, query, fragment))
50
username, password = urllib2.splitpasswd(auth)
51
passman = urllib2.HTTPPasswordMgrWithDefaultRealm()
82
source = urlunparse((proto, barehost, path, params, query, fragment))
83
username, password = splitpasswd(auth)
84
passman = HTTPPasswordMgrWithDefaultRealm()
52
85
# Realm is set to None in add_password to force the username and password
53
86
# to be used whatever the realm
54
87
passman.add_password(None, source, username, password)
55
authhandler = urllib2.HTTPBasicAuthHandler(passman)
56
opener = urllib2.build_opener(authhandler)
57
urllib2.install_opener(opener)
58
response = urllib2.urlopen(source)
88
authhandler = HTTPBasicAuthHandler(passman)
89
opener = build_opener(authhandler)
90
install_opener(opener)
91
response = urlopen(source)
60
93
with open(dest, 'w') as dest_file:
61
94
dest_file.write(response.read())
91
124
url_parts = self.parse_url(source)
92
125
dest_dir = os.path.join(os.environ.get('CHARM_DIR'), 'fetched')
93
126
if not os.path.exists(dest_dir):
94
mkdir(dest_dir, perms=0755)
127
mkdir(dest_dir, perms=0o755)
95
128
dld_file = os.path.join(dest_dir, os.path.basename(url_parts.path))
97
130
self.download(source, dld_file)
98
except urllib2.URLError as e:
131
except URLError as e:
99
132
raise UnhandledSource(e.reason)
100
133
except OSError as e:
101
134
raise UnhandledSource(e.strerror)
102
options = urlparse.parse_qs(url_parts.fragment)
135
options = parse_qs(url_parts.fragment)
103
136
for key, value in options.items():
104
if key in hashlib.algorithms:
138
algorithms = hashlib.algorithms
140
algorithms = hashlib.algorithms_available
141
if key in algorithms:
105
142
check_hash(dld_file, value, key)
107
144
check_hash(dld_file, checksum, hash_type)