~nskaggs/juju-ci-tools/add-essential-operations

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
#!/usr/bin/env python
"""Manage pip dependencies for juju qa using a cache in S3."""

from __future__ import print_function

import argparse
import os
import platform
import subprocess
import sys

import boto.s3.connection
import boto.s3.key

import utility


BUCKET = "juju-pip-archives"
PREFIX = "juju-ci-tools/"
REQUIREMENTS = os.path.join(os.path.realpath(os.path.dirname(__file__)),
                            "requirements.txt")
MAC_WIN_REQS = os.path.join(os.path.realpath(os.path.dirname(__file__)),
                            "mac-win-requirements.txt")
OBSOLETE = os.path.join(os.path.realpath(os.path.dirname(__file__)),
                        "obsolete-requirements.txt")


def get_requirements():
    if platform.dist()[0] in ('Ubuntu', 'debian'):
        return REQUIREMENTS
    else:
        return MAC_WIN_REQS


def s3_anon():
    """Gives an unauthenticated S3 connection."""
    return boto.s3.connection.S3Connection(anon=True)


def s3_auth_with_rc(cloud_city):
    """Gives authenticated S3 connection using cloud-city credentials."""
    access_key = secret_key = None
    with open(os.path.join(cloud_city, "ec2rc")) as rc:
        for line in rc:
            parts = line.rstrip().split("=", 1)
            if parts[0] == "AWS_ACCESS_KEY":
                access_key = parts[1]
            elif parts[0] == "AWS_SECRET_KEY":
                secret_key = parts[1]
    return boto.s3.connection.S3Connection(access_key, secret_key)


def run_pip_install(extra_args, requirements, verbose=False):
    """Run pip install in a subprocess with given additional arguments."""
    cmd = ["pip"]
    if not verbose:
        cmd.append("-q")
    cmd.extend(["install", "-r", requirements])
    cmd.extend(extra_args)
    subprocess.check_call(cmd)


def run_pip_uninstall(obsolete_requirements):
    """Run pip uninstall for each package version in obsolete_requirements.

    pip uninstall the package without regard to its version. In most cases,
    calling install with a new package version implicitly upgrades.
    There are only a few package version that cannot by upgraded, they must
    be removed before install. This function uninstalls packages only when
    their version matches the obsolete.

    The obsolete_requirements entries must match the output of pip list. eg:
        azure (0.8.0)
        bibbel (1.2.3)
    """
    pip_cmd = ['pip']
    list_cmd = pip_cmd + ['list']
    installed_packages = set(subprocess.check_output(list_cmd).splitlines())
    with open(obsolete_requirements, 'r') as o_file:
        obsolete_packages = o_file.read().splitlines()
    removable = installed_packages.intersection(obsolete_packages)
    for package_version in removable:
        package, version = package_version.split()
        uninstall_cmd = pip_cmd + ['uninstall', '-y', package]
        subprocess.check_call(uninstall_cmd)


def command_install(bucket, requirements, verbose=False):
    with utility.temp_dir() as archives_dir:
        for key in bucket.list(prefix=PREFIX):
            archive = key.name[len(PREFIX):]
            key.get_contents_to_filename(os.path.join(archives_dir, archive))
        archives_url = "file://" + archives_dir
        run_pip_uninstall(OBSOLETE)
        run_pip_install(["--user", "--no-index", "--find-links", archives_url],
                        requirements, verbose=verbose)


def command_update(s3, requirements, verbose=False):
    bucket = s3.lookup(BUCKET)
    if bucket is None:
        if verbose:
            print("Creating bucket {}".format(BUCKET))
        bucket = s3.create_bucket(BUCKET, policy="public-read")
    with utility.temp_dir() as archives_dir:
        run_pip_install(
            ["--download", archives_dir], requirements, verbose=verbose)
        for archive in os.listdir(archives_dir):
            filename = os.path.join(archives_dir, archive)
            key = boto.s3.key.Key(bucket)
            key.key = PREFIX + archive
            key.set_contents_from_filename(filename, policy="public-read")


def command_list(bucket, verbose=False):
    for key in bucket.list(prefix=PREFIX):
        print(key.name[len(PREFIX):])


def command_delete(bucket, verbose=False):
    for key in bucket.list(prefix=PREFIX):
        if verbose:
            print("Deleting {}".format(key.name))
        key.delete()


def get_parser(argv0):
    """Return parser for program arguments."""
    parser = argparse.ArgumentParser(
        prog=argv0, description="Manage pip dependencies")
    parser.add_argument(
        "-v", "--verbose", action="store_true", help="Show more output.")
    parser.add_argument(
        "--cloud-city", default="~/cloud-city", type=os.path.expanduser,
        help="Location of cloud-city repository for credentials.")
    parser.add_argument(
        "--requirements", default=get_requirements(), type=os.path.expanduser,
        help="Location requirements file to use.")
    subparsers = parser.add_subparsers(dest="command")
    subparsers.add_parser("install", help="Download deps from S3 and install.")
    subparsers.add_parser(
        "update", help="Get latest deps from PyPI and upload to S3.")
    subparsers.add_parser("list", help="Show packages currently in S3.")
    subparsers.add_parser("delete", help="Delete packages currently in S3.")
    return parser


def main(argv):
    parser = get_parser(argv[0])
    args = parser.parse_args(argv[1:])
    use_auth = os.path.isdir(args.cloud_city)
    if not use_auth and args.command in ("update", "delete"):
        parser.error("Need cloud-city credentials to modify S3 cache.")
    s3 = s3_auth_with_rc(args.cloud_city) if use_auth else s3_anon()
    if args.command == "update":
        command_update(s3, args.requirements, args.verbose)
    else:
        bucket = s3.get_bucket(BUCKET)
        if args.command == "install":
            command_install(bucket, args.requirements, args.verbose)
        elif args.command == "list":
            command_list(bucket, args.verbose)
        elif args.command == "delete":
            command_delete(bucket, args.verbose)
    return 0


if __name__ == "__main__":
    sys.exit(main(sys.argv))