~xnox/ubuntu-archive-tools/sru-report-autopkgtest-vomit

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
#!/usr/bin/python3
# Generate a list of autopkgtest request.cgi URLs to
# re-run all autopkgtests which regressed
# Copyright (C) 2015-2016 Canonical Ltd.
# Author: Martin Pitt <martin.pitt@ubuntu.com>

# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.

# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
# Lesser General Public License for more details.

# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301
# USA

from datetime import datetime
import dateutil.parser
from dateutil.tz import tzutc
import urllib.request
import urllib.parse
import argparse
import os
import re
import yaml
import json

request_url = 'https://autopkgtest.ubuntu.com/request.cgi'
default_series = 'bionic'
args = None


def get_cache_dir():
    cache_dir = os.environ.get('XDG_CACHE_HOME',
                               os.path.expanduser(os.path.join('~', '.cache')))
    uat_cache = os.path.join(cache_dir, 'ubuntu-archive-tools')
    os.makedirs(uat_cache, exist_ok=True)
    return uat_cache


def parse_args():
    parser = argparse.ArgumentParser(
        'Generate %s URLs to re-run regressions' % request_url,
        formatter_class=argparse.RawDescriptionHelpFormatter,
        description='''Typical workflow:
 - export autopkgtest.ubuntu.com session cookie into ~/.cache/autopkgtest.cookie
   Use a browser plugin or get the value from the settings and create it with
   printf "autopkgtest.ubuntu.com\\tTRUE\\t/\\tTRUE\\t0\\tsession\\tVALUE\\n" > ~/.cache/autopkgtest.cookie
   (The cookie is valid for one month)

 - retry-autopkgtest-regressions [opts...] | vipe | xargs -rn1 -P10 wget --load-cookies ~/.cache/autopkgtest.cookie -O-
   edit URL list to pick/remove requests as desired, then close editor to let it run
''')
    parser.add_argument('-s', '--series', default=default_series,
                        help='Ubuntu series (default: %(default)s)')
    parser.add_argument('--bileto', metavar='TICKETNUMBER',
                        help='Run for bileto ticket')
    parser.add_argument('--all-proposed', action='store_true',
                        help='run tests against all of proposed, i. e. with disabling apt pinning')
    parser.add_argument('--state', default='REGRESSION',
                        help='generate commands for given test state (default: %(default)s)')
    parser.add_argument('--max-age', type=float, metavar='DAYS',
                        help='only consider candiates which are at most '
                        'this number of days old (float allowed)')
    parser.add_argument('--min-age', type=float, metavar='DAYS',
                        help='only consider candiates which are at least '
                        'this number of days old (float allowed)')
    parser.add_argument('--blocks',
                        help='rerun only those tests that were triggered '
                        'by the named package')
    parser.add_argument('--no-proposed', action='store_true',
                        help='run tests against release+updates instead of '
                        'against proposed, to re-establish a baseline for the '
                        'test.  This currently only works for packages that '
                        'do not themselves have a newer version in proposed.')
    args = parser.parse_args()

    return args


def get_regressions(excuses_url, release, retry_state, min_age, max_age,
                    blocks, no_proposed):
    '''Return dictionary with regressions

    Return dict: release → pkg → arch → [trigger, ...]
    '''
    cache_file = None

    # load YAML excuses

    # ignore bileto urls wrt caching, they're usually too small to matter
    # and we don't do proper cache expiry
    m = re.search('people.canonical.com/~ubuntu-archive/proposed-migration/'
                  '([^/]*)/([^/]*)',
                  excuses_url)
    if m:
        cache_dir = get_cache_dir()
        cache_file = os.path.join(cache_dir, '%s_%s' % (m.group(1), m.group(2)))
        try:
            prev_mtime = os.stat(cache_file).st_mtime
        except FileNotFoundError:
            prev_mtime = 0
        prev_timestamp = datetime.fromtimestamp(prev_mtime, tz=tzutc())
        new_timestamp = datetime.now(tz=tzutc()).timestamp()

    f = urllib.request.urlopen(excuses_url)
    if cache_file:
        remote_ts = dateutil.parser.parse(f.headers['last-modified'])
        if remote_ts > prev_timestamp:
            with open('%s.new' % cache_file, 'wb') as new_cache:
                for line in f:
                    new_cache.write(line)
            os.rename('%s.new' % cache_file, cache_file)
            os.utime(cache_file, times=(new_timestamp, new_timestamp))
        f.close()
        f = open(cache_file, 'rb')

    excuses = yaml.load(f, Loader=yaml.CSafeLoader)
    f.close()
    regressions = {}
    for excuse in excuses['sources']:
        if blocks and blocks != excuse['source']:
            continue
        try:
            age = excuse['policy_info']['age']['current-age']
        except KeyError:
            age = None

        # excuses are sorted by ascending age
        if min_age is not None and age is not None and age < min_age:
            continue
        if max_age is not None and age is not None and age > max_age:
            break
        for pkg, archinfo in excuse.get('policy_info', {}).get('autopkgtest', {}).items():
            try:
                pkg, pkg_ver = re.split('[ /]+', pkg, 1) # split off version (either / or space separated)
            # error and the package version is unknown
            except ValueError:
                pass
            if no_proposed:
                trigger = pkg + '/' + pkg_ver
            else:
                trigger = excuse['source'] + '/' + excuse['new-version']
            for arch, state in archinfo.items():
                if state[0] == retry_state:
                    regressions.setdefault(release, {}).setdefault(
                        pkg, {}).setdefault(arch, []).append(trigger)

    return regressions


args = parse_args()

extra_params = []
if args.all_proposed:
    extra_params.append(('all-proposed', '1'))

if args.bileto:
    url_root = 'https://bileto.ubuntu.com'
    ticket_url = url_root + '/v2/ticket/%s' % args.bileto
    excuses_url = None
    with urllib.request.urlopen(ticket_url) as f:
        ticket = json.loads(f.read().decode('utf-8'))['tickets'][0]
    ppa_name = ticket.get('ppa', '')
    for line in ticket.get('autopkgtest', '').splitlines():
        if args.series in line:
            excuses_url = line
            break
    if excuses_url.startswith('/'):
        excuses_url = url_root + excuses_url
    excuses_url = excuses_url.replace('.html', '.yaml')
    extra_params += [('ppa', 'ci-train-ppa-service/stable-phone-overlay'),
                     ('ppa', 'ci-train-ppa-service/%s' % ppa_name)]
else:
    excuses_url = 'http://people.canonical.com/~ubuntu-archive/proposed-migration/%s/update_excuses.yaml' % args.series
regressions = get_regressions(excuses_url, args.series, args.state,
                              args.min_age, args.max_age, args.blocks,
                              args.no_proposed)

for release, pkgmap in regressions.items():
    for pkg, archmap in pkgmap.items():
        for arch, triggers in archmap.items():
            params = [('release', release), ('arch', arch), ('package', pkg)]
            params += [('trigger', t) for t in triggers]
            params += extra_params
            url = request_url + '?' + urllib.parse.urlencode(params)
            print(url)