~ubuntu-archive/ubuntu-archive-tools/trunk

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
#!/usr/bin/python3
# Generate a list of autopkgtest request.cgi URLs to
# re-run all autopkgtests which regressed
# Copyright (C) 2015-2016 Canonical Ltd.
# Author: Martin Pitt <martin.pitt@ubuntu.com>

# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.

# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
# Lesser General Public License for more details.

# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301
# USA

from datetime import datetime, timedelta
import dateutil.parser
from dateutil.tz import tzutc
import urllib.request
import urllib.parse
import argparse
import io
import lzma
import os
import re
import yaml
import json
import gzip

request_url = 'https://autopkgtest.ubuntu.com/request.cgi'
running_url = 'https://autopkgtest.ubuntu.com/static/running.json'
queues_url = 'https://autopkgtest.ubuntu.com/queues.json'
default_series = 'hirsute'
args = None


def get_cache_dir():
    cache_dir = os.environ.get('XDG_CACHE_HOME',
                               os.path.expanduser(os.path.join('~', '.cache')))
    uat_cache = os.path.join(cache_dir, 'ubuntu-archive-tools')
    os.makedirs(uat_cache, exist_ok=True)
    return uat_cache


def parse_args():
    parser = argparse.ArgumentParser(
        'Generate %s URLs to re-run regressions' % request_url,
        formatter_class=argparse.RawDescriptionHelpFormatter,
        description='''Typical workflow:
 - export autopkgtest.ubuntu.com session cookie into ~/.cache/autopkgtest.cookie
   Use a browser plugin or get the value from the settings and create it with
   printf "autopkgtest.ubuntu.com\\tTRUE\\t/\\tTRUE\\t0\\tsession\\tVALUE\\n" > ~/.cache/autopkgtest.cookie
   (The cookie is valid for one month)

 - retry-autopkgtest-regressions [opts...] | vipe | xargs -rn1 -P10 wget --load-cookies ~/.cache/autopkgtest.cookie -O-
   edit URL list to pick/remove requests as desired, then close editor to let it run
''')
    parser.add_argument('-s', '--series', default=default_series,
                        help='Ubuntu series (default: %(default)s)')
    parser.add_argument('--bileto', metavar='TICKETNUMBER',
                        help='Run for bileto ticket')
    parser.add_argument('--all-proposed', action='store_true',
                        help='run tests against all of proposed, i. e. with disabling apt pinning')
    parser.add_argument('--state', default=[],
                        help='Generate commands for given test state (default: %(default)s). '
                        '--state=RUNNING also enables triggering already queued and running tests',
                        nargs='+', action='extend')
    parser.add_argument('--max-age', type=float, metavar='DAYS',
                        help='only consider candidates which are at most '
                        'this number of days old (float allowed)')
    parser.add_argument('--min-age', type=float, metavar='DAYS',
                        help='only consider candidates which are at least '
                        'this number of days old (float allowed)')
    parser.add_argument('--blocks',
                        help='rerun only those tests that were triggered '
                        'by the named package')
    parser.add_argument('--blocked-by',
                        help='rerun only those tests that are blocked by (rdeps of) '
                        'the named package')
    parser.add_argument('--no-proposed', action='store_true',
                        help='run tests against release+updates instead of '
                        'against proposed, to re-establish a baseline for the '
                        'test.  This currently only works for packages that '
                        'do not themselves have a newer version in proposed.')
    parser.add_argument('--only-unknown', action='store_true',
                        help='only include tests with version of "unknown". '
                        'This version can be returned in the case of breakage '
                        'in the base system, or some types of infrastructure '
                        'issues, and it can be helpful to mass retry these '
                        'only.')
    parser.add_argument('--log-regex', default=None,
                        help='only consider tests with logs matching the '
                        '(Python) regular expression')
    parser.add_argument('--run-autopkgtest', action='store_true',
                        help='for administrators: Generate run-autopkgtest '
                        'commands to be run on the controller host.')
    parser.add_argument('--force-cached', action='store_true',
                        help='Do not try to download files again, use cached'
                        'version when it is present. This is useful '
                        'when triggering various subset of tests in a row.')
    args = parser.parse_args()

    if not args.state:
        args.state = ['REGRESSION']

    return args


def get_url(url, force_cached):
    ''' Return file to the URL, possibly caching it
    '''
    cache_file = None

    # ignore bileto urls wrt caching, they're usually too small to matter
    # and we don't do proper cache expiry
    m = re.search('people.canonical.com/~ubuntu-archive/proposed-migration/'
                  '([^/]*)/([^/]*)',
                  url)
    if m:
        cache_dir = get_cache_dir()
        cache_file = os.path.join(cache_dir, '%s_%s' % (m.group(1), m.group(2)))
    else:
        # test logs can be cached, too
        m = re.search(
            'https://objectstorage.prodstack[-0-9]*.canonical.com/v1/'
            'AUTH_[a-f0-9]*/autopkgtest-[a-z]*/([^/]*)/([^/]*)'
            '/[a-z0-9]*/([^/]*)/([_a-f0-9]*)@/log.gz',
            url)
        if m:
            cache_dir = get_cache_dir()
            cache_file = os.path.join(
                cache_dir, '%s_%s_%s_%s.gz' % (
                    m.group(1), m.group(2), m.group(3), m.group(4)))

    if cache_file:
        try:
            prev_mtime = os.stat(cache_file).st_mtime
        except FileNotFoundError:
            prev_mtime = 0
        prev_timestamp = datetime.fromtimestamp(prev_mtime, tz=tzutc())
        new_timestamp = datetime.now(tz=tzutc()).timestamp()
        if force_cached:
            return open(cache_file, 'rb')

    f = urllib.request.urlopen(url)

    if cache_file:
        remote_ts = dateutil.parser.parse(f.headers['last-modified'])
        if remote_ts > prev_timestamp:
            with open('%s.new' % cache_file, 'wb') as new_cache:
                for line in f:
                    new_cache.write(line)
            os.rename('%s.new' % cache_file, cache_file)
            os.utime(cache_file, times=(new_timestamp, new_timestamp))
        f.close()
        f = open(cache_file, 'rb')
    return f


def already_triggered(release, arch, pkg, triggers, extra_params, running, queues):

    # check queues
    for queue_name, releases in queues.items():
        if queue_name == 'upstream':
            continue
        for queue_release, arches in releases.items():
            if not queue_release == release:
                continue
            for queue_arch, queue in arches.items():
                if not queue or not queue_arch == arch:
                    continue
                for raw_item in queue:
                    buf = io.StringIO(raw_item)
                    queued_pkg = buf.readline()
                    item_params_raw = buf.readline()
                    if not item_params_raw:
                        # it was only one line
                        m = re.search('([^ ^{]*) (.*)', queued_pkg)
                        queued_pkg = m.group(1)
                        item_params_raw = m.group(2)
                    item_params_json = json.loads(item_params_raw)
                    queued_triggers = item_params_json['triggers']
                    queued_extra_params = []
                    try:
                        queued_extra_params.append(('all-proposed', item_params_json['all-proposed']))
                    except KeyError:
                        pass

                    if queued_pkg == pkg \
                       and sorted(queued_triggers) == sorted(triggers) \
                       and sorted(queued_extra_params) == sorted(extra_params):
                        return True

    # check running
    for running_pkg, submissions in running.items():
        for submission in submissions.values():
            for running_release, arches in submission.items():
                if not running_release == release:
                    continue
                for running_arch, running_params in arches.items():
                    params = running_params[0]
                    if not running_arch == arch:
                        continue
                    # upstream CI builds (systemd, ubuntu-image etc)
                    if "build-git" in params:
                        continue
                    # everything else should have a trigger
                    running_triggers = params['triggers']
                    running_extra_params = []
                    try:
                        running_extra_params.append(('all-proposed', running_params[0]['all-proposed']))
                    except KeyError:
                        pass
                    if running_pkg == pkg \
                       and sorted(running_triggers) == sorted(triggers) \
                       and sorted(running_extra_params) == sorted(extra_params):
                        return True
    return False


def get_regressions(excuses_url, release, retry_states, min_age, max_age,
                    blocks, blocked_by, no_proposed, log_regex, force_cached):
    '''Return dictionary with regressions

    Return dict: release → pkg → arch → [trigger, ...]
    '''

    # load YAML excuses

    try:
        f = get_url(excuses_url, force_cached)
        lzma_f = lzma.open(f)
        excuses = yaml.load(lzma_f, Loader=yaml.CSafeLoader)
        lzma_f.close()
    except urllib.error.HTTPError as e:
        if e.code == 404:
            # some versions of britney output this file uncompressed, try that
            # location too
            f = get_url(excuses_url.rstrip('.xz'), force_cached)
            excuses = yaml.load(f, Loader=yaml.CSafeLoader)
            f.close()
        else:
            raise

    regressions = {}

    for excuse in excuses['sources']:
        matches_blocks = blocks and blocks == excuse['source']
        try:
            matches_blocked_by = blocked_by and blocked_by in excuse['dependencies']['migrate-after']
        except KeyError:
            matches_blocked_by = False

        if (blocks or blocked_by) and not (matches_blocks or matches_blocked_by):
                continue

        try:
            age = excuse['policy_info']['age']['current-age']
        except KeyError:
            age = None

        # excuses are sorted by ascending age
        if min_age is not None and age is not None and age < min_age:
            continue
        if max_age is not None and age is not None and age > max_age:
            break
        for pkg, archinfo in excuse.get('policy_info', {}).get('autopkgtest', {}).items():
            try:
                pkg, pkg_ver = re.split('[ /]+', pkg, 1)  # split off version (either / or space separated)
            # no package version, happens when we are still RUNNING
            except ValueError:
                if no_proposed:
                    continue
                pkg_ver = None
            if no_proposed:
                trigger = pkg + '/' + pkg_ver
            else:
                trigger = excuse['source'] + '/' + excuse['new-version']
            if args.only_unknown and (not pkg_ver or pkg_ver != 'unknown'):
                continue
            # there's an entry called "verdict" for the overall verdict of the
            # policy, which is at the same level as the results
            if pkg == 'verdict':
                continue
            for arch, state in archinfo.items():
                if state[0] in retry_states:
                    if log_regex and state[1].endswith(".gz"):
                        log = gzip.open(get_url(state[1], force_cached),
                                        mode='rt', errors='replace')
                        if not re.findall(log_regex, log.read(), re.MULTILINE):
                            continue
                    if log_regex and state[1].endswith(".gz"):
                        log = gzip.open(get_url(state[1], force_cached),
                                        mode='rt', errors='replace')
                        if not re.findall(log_regex, log.read(), re.MULTILINE):
                            continue
                    regressions.setdefault(release, {}).setdefault(
                        pkg, {}).setdefault(arch, []).append(trigger)

    return regressions


args = parse_args()

extra_params = []
if args.all_proposed:
    extra_params.append(('all-proposed', '1'))

if args.log_regex:
    # expire old cache
    cache_dir = get_cache_dir()
    mtime_limit = datetime.now() - timedelta(weeks=4)
    for entry in os.listdir(cache_dir):
        cache_file = os.path.join(cache_dir, entry)
        try:
            mtime = os.stat(cache_file).st_mtime
            if datetime.fromtimestamp(mtime) < mtime_limit:
                os.remove(cache_file)
        except FileNotFoundError:
            pass

if args.log_regex:
    # expire old cache
    cache_dir = get_cache_dir()
    mtime_limit = datetime.now() - timedelta(weeks=4)
    for entry in os.listdir(cache_dir):
        cache_file = os.path.join(cache_dir, entry)
        try:
            mtime = os.stat(cache_file).st_mtime
            if datetime.fromtimestamp(mtime) < mtime_limit:
                os.remove(cache_file)
        except FileNotFoundError:
            pass

if args.bileto:
    url_root = 'https://bileto.ubuntu.com'
    ticket_url = url_root + '/v2/ticket/%s' % args.bileto
    excuses_url = None
    with urllib.request.urlopen(ticket_url) as f:
        ticket = json.loads(f.read().decode('utf-8'))['tickets'][0]
    ppa_name = ticket.get('ppa', '')
    for line in ticket.get('autopkgtest', '').splitlines():
        if args.series in line:
            excuses_url = line
            break
    if excuses_url.startswith('/'):
        excuses_url = url_root + excuses_url
    excuses_url = excuses_url.replace('.html', '.yaml.xz')
    extra_params += [('ppa', 'ci-train-ppa-service/%s' % ppa_name)]
else:
    excuses_url = 'http://people.canonical.com/~ubuntu-archive/proposed-migration/%s/update_excuses.yaml.xz' % args.series
regressions = get_regressions(excuses_url, args.series, args.state,
                              args.min_age, args.max_age, args.blocks,
                              args.blocked_by, args.no_proposed,
                              args.log_regex, args.force_cached)


# load JSON running and queued tests
running = json.loads(
    get_url(running_url, args.force_cached).read().decode('utf-8'))
queues = json.loads(
    get_url(queues_url, args.force_cached).read().decode('utf-8'))

for release, pkgmap in regressions.items():
    for pkg, archmap in pkgmap.items():
        for arch, triggers in archmap.items():
            if 'RUNNING' not in args.state \
               and already_triggered(release, arch, pkg, triggers,
                                     extra_params, running, queues):
                continue
            if not args.run_autopkgtest:
                params = [('release', release), ('arch', arch), ('package', pkg)]
                params += [('trigger', t) for t in triggers]
                params += extra_params
                url = request_url + '?' + urllib.parse.urlencode(params)
                print(url)
            else:
                print("run-autopkgtest {allproposed}-s {release} -a {arch} {triggers} {pkg}".format(
                    allproposed='--all-proposed ' if args.all_proposed else '',
                    arch=arch,
                    pkg=pkg,
                    release=release,
                    triggers=" ".join(["--trigger=%s" % trigger for trigger in triggers]))
                    )