2
"""Manage the blessed juju revision testing candiates."""
4
from __future__ import print_function
6
from argparse import ArgumentParser
33
def get_build_parameters(build_data):
35
action_list = build_data['actions']
36
for acts in action_list:
37
for key in acts.keys():
38
if key == 'parameters':
40
parameters[act['name']] = act['value']
45
def find_publish_revision_number(credentials, br_number, limit=20):
46
"""Return the publish-revsion number paired with build-revision number."""
48
job_number = 'lastSuccessfulBuild'
49
for i in range(limit):
50
build_data = get_build_data(
51
JENKINS_URL, credentials, PUBLISH_REVISION, build=job_number)
54
# Ensure we have the real job number (an int), not an alias.
55
job_number = build_data['number']
56
parameters = get_build_parameters(build_data)
57
if parameters['revision_build'] == str(br_number):
58
found_number = job_number
60
job_number = job_number - 1
64
def prepare_dir(dir_path, dry_run=False, verbose=False):
65
"""Create to clean a directory."""
66
if os.path.isdir(dir_path):
68
print('Cleaning %s' % dir_path)
70
shutil.rmtree(dir_path)
73
print('Creating %s' % dir_path)
78
def download_candidate_files(credentials, release_number, path, br_number,
79
pr_number=None, dry_run=False, verbose=False):
80
"""Download the files from the build-revision and publish-revision jobs.
82
The buildvars.json for the specific build-revision number is downloaded.
83
All the binary and source packages from the last successful build of
84
publish revision are downloaded.
86
artifact_dir_name = '%s-artifacts' % release_number
87
candidate_dir = os.path.join(path, artifact_dir_name)
88
prepare_dir(candidate_dir, dry_run, verbose)
90
credentials, BUILD_REVISION, br_number, 'buildvars.json',
91
candidate_dir, dry_run=dry_run, verbose=verbose)
93
pr_number = find_publish_revision_number(credentials, br_number)
95
credentials, PUBLISH_REVISION, pr_number, 'juju-core*', candidate_dir,
96
dry_run=dry_run, verbose=verbose)
99
def get_artifact_dirs(path):
100
"""List the directories that contain artifacts."""
102
for name in os.listdir(path):
103
artifacts_path = os.path.join(path, name)
104
if name.endswith('-artifacts') and os.path.isdir(artifacts_path):
109
def get_package(artifacts_path, version):
110
"""Return the path to the expected juju-core package for the localhost."""
111
release = subprocess.check_output(['lsb_release', '-sr']).strip()
112
arch = get_deb_arch()
113
package_name = 'juju-core_{}-0ubuntu1~{}.1~juju1_{}.deb'.format(
114
version, release, arch)
115
package_path = os.path.join(artifacts_path, package_name)
119
def extract_candidates(path, dry_run=False, verbose=False):
120
"""Extract all the candidate juju binaries for the local machine.
122
Each candidate will be extracted to a directory named after the version
123
the artifacts (packages) were made from. Thus the package that matches
124
the localhost's series and architecture in the master-artifacts/ directory
125
will be extracted to a sibling directory named "master/" The buildvars.json
126
data will be copied to the top of "master" to provide information about
127
the origin of the binaries.
129
for dir_name in get_artifact_dirs(path):
130
artifacts_path = os.path.join(path, dir_name)
131
buildvars_path = os.path.join(artifacts_path, 'buildvars.json')
132
with open(buildvars_path) as bf:
133
buildvars = json.load(bf)
134
version = buildvars['version']
135
package_path = get_package(artifacts_path, version)
136
candidate_path = os.path.join(path, version)
138
print('extracting %s to %s' % (package_path, candidate_path))
139
prepare_dir(candidate_path, dry_run, verbose)
141
extract_deb(package_path, candidate_path)
143
print('Copying %s to %s' % (buildvars_path, candidate_path))
145
new_path = os.path.join(candidate_path, 'buildvars.json')
146
shutil.copyfile(buildvars_path, new_path)
147
shutil.copystat(buildvars_path, new_path)
150
def get_scripts(juju_release_tools=None):
151
"""Return a tuple paths to the assemble_script and publish_scripts."""
152
assemble_script = 'assemble-streams.bash'
153
publish_script = 'publish-public-tools.bash'
154
if juju_release_tools:
155
assemble_script = os.path.join(
156
juju_release_tools, assemble_script)
157
publish_script = os.path.join(
158
juju_release_tools, publish_script)
159
return assemble_script, publish_script
162
def publish_candidates(path, streams_path,
163
juju_release_tools=None, dry_run=False, verbose=False):
164
"""Assemble and publish weekly streams from the candidates."""
165
timestamp = datetime.datetime.utcnow().strftime('%Y_%m_%dT%H_%M_%S')
166
with temp_dir() as debs_path:
167
for dir_name in get_artifact_dirs(path):
168
artifacts_path = os.path.join(path, dir_name)
169
branch_name = dir_name.split('-')[0]
170
for deb_name in os.listdir(artifacts_path):
171
deb_path = os.path.join(artifacts_path, deb_name)
173
print('Copying %s' % deb_path)
174
new_path = os.path.join(debs_path, deb_name)
175
shutil.copyfile(deb_path, new_path)
176
if deb_name == 'buildvars.json':
177
# buildvars.json is also in the artifacts_path; copied by
178
# download_candidate_files(). Set it aside so it can be
179
# sync'd to S3 as a record of what was published.
180
buildvar_dir = '{}/weekly/{}/{}'.format(
181
path, timestamp, branch_name)
182
if not os.path.isdir(buildvar_dir):
183
os.makedirs(buildvar_dir)
184
buildvar_path = '{}/{}'.format(buildvar_dir, deb_name)
185
shutil.copyfile(deb_path, buildvar_path)
186
assemble_script, publish_script = get_scripts(juju_release_tools)
187
# XXX sinzui 2014-12-01: IGNORE uses the local juju, but when
188
# testing juju's that change generate-tools, we may need to use
189
# the highest version.
191
assemble_script, '-t', debs_path, 'weekly', 'IGNORE',
193
run_command(command, dry_run=dry_run, verbose=verbose)
194
publish(streams_path, publish_script, dry_run=dry_run, verbose=verbose)
195
# Sync buildvars.json files out to s3.
196
url = 's3://juju-qa-data/juju-releases/weekly/'
197
s3_path = '{}/weekly/{}'.format(path, timestamp)
199
print('Calling s3cmd to sync %s out to %s' % (s3_path, url))
201
s3_cmd(['sync', s3_path, url])
202
extract_candidates(path, dry_run=dry_run, verbose=verbose)
205
def publish(streams_path, publish_script, dry_run=False, verbose=False):
206
juju_dist_path = os.path.join(streams_path, 'juju-dist')
207
command = [publish_script, 'weekly', juju_dist_path, 'cpc']
208
for attempt in range(3):
210
run_command(command, dry_run=dry_run, verbose=verbose)
212
except subprocess.CalledProcessError:
213
# Raise an error when the third attempt fails; the cloud is ill.
218
def parse_args(args=None):
219
"""Return the argument parser for this program."""
220
parser = ArgumentParser("Manage the successful Juju CI candidates.")
222
'-d', '--dry-run', action='store_true', default=False,
223
help='Do not make changes.')
225
'-v', '--verbose', action='store_true', default=False,
226
help='Increase verbosity.')
227
subparsers = parser.add_subparsers(help='sub-command help', dest="command")
228
# ./candidate download -b 1234 master ~/candidate
229
parser_update = subparsers.add_parser(
230
'download', help='download a candidate')
231
parser_update.add_argument(
232
'-b', '--br-number', default='lastSuccessfulBuild',
233
help="The specific build-revision number.")
234
parser_update.add_argument(
236
help="The specific publish-revision-revision number.")
237
parser_update.add_argument(
238
'release_number', help='The successfully test branch release number.')
239
parser_update.add_argument(
240
'path', help='The path to save the candiate data to.')
241
add_credential_args(parser_update)
242
# ./candidate extract master ~/candidate
243
parser_extract = subparsers.add_parser(
245
help='extract candidates that match the local series and arch.')
246
parser_extract.add_argument(
247
'path', help='The path to the candiate data dir.')
248
# ./candidate --juju-release-tools $JUJU_RELEASE_TOOLS \
249
# publish ~/candidate ~/streams
250
parser_publish = subparsers.add_parser(
251
'publish', help='Publish streams for the candidates')
252
parser_publish.add_argument(
253
'-t', '--juju-release-tools',
254
help='The path to the juju-release-tools dir.')
255
parser_publish.add_argument(
256
'path', help='The path to the candiate data dir.')
257
parser_publish.add_argument(
258
'streams_path', help='The path to the streams data dir.')
259
parsed_args = parser.parse_args(args)
260
return parsed_args, get_credentials(parsed_args)
264
"""Manage successful Juju CI candiates."""
265
args, credentials = parse_args(argv)
267
if args.command == 'download':
268
download_candidate_files(
269
credentials, args.release_number, args.path, args.br_number,
270
args.pr_number, dry_run=args.dry_run, verbose=args.verbose)
271
elif args.command == 'extract':
273
args.path, dry_run=args.dry_run, verbose=args.verbose)
274
elif args.command == 'publish':
276
args.path, args.streams_path,
277
juju_release_tools=args.juju_release_tools,
278
dry_run=args.dry_run, verbose=args.verbose)
279
except Exception as e:
282
traceback.print_tb(sys.exc_info()[2])
289
if __name__ == '__main__':
290
sys.exit(main(sys.argv[1:]))