5
from charmhelpers.fetch import (
9
from charmhelpers.payload.archive import (
13
from charmhelpers.core.host import mkdir
16
class ArchiveUrlFetchHandler(BaseFetchHandler):
17
"""Handler for archives via generic URLs"""
18
def can_handle(self, source):
19
url_parts = self.parse_url(source)
20
if url_parts.scheme not in ('http', 'https', 'ftp', 'file'):
21
return "Wrong source type"
22
if get_archive_handler(self.base_url(source)):
26
def download(self, source, dest):
27
# propogate all exceptions
28
# URLError, OSError, etc
29
proto, netloc, path, params, query, fragment = urlparse.urlparse(source)
30
if proto in ('http', 'https'):
31
auth, barehost = urllib2.splituser(netloc)
33
source = urlparse.urlunparse((proto, barehost, path, params, query, fragment))
34
username, password = urllib2.splitpasswd(auth)
35
passman = urllib2.HTTPPasswordMgrWithDefaultRealm()
36
# Realm is set to None in add_password to force the username and password
37
# to be used whatever the realm
38
passman.add_password(None, source, username, password)
39
authhandler = urllib2.HTTPBasicAuthHandler(passman)
40
opener = urllib2.build_opener(authhandler)
41
urllib2.install_opener(opener)
42
response = urllib2.urlopen(source)
44
with open(dest, 'w') as dest_file:
45
dest_file.write(response.read())
46
except Exception as e:
47
if os.path.isfile(dest):
51
def install(self, source):
52
url_parts = self.parse_url(source)
53
dest_dir = os.path.join(os.environ.get('CHARM_DIR'), 'fetched')
54
if not os.path.exists(dest_dir):
55
mkdir(dest_dir, perms=0755)
56
dld_file = os.path.join(dest_dir, os.path.basename(url_parts.path))
58
self.download(source, dld_file)
59
except urllib2.URLError as e:
60
raise UnhandledSource(e.reason)
62
raise UnhandledSource(e.strerror)
63
return extract(dld_file)