~ubuntu-archive/ubuntu-archive-scripts/trunk

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
#!/usr/bin/python3

# This script highlights packages that are being kept out of Debian
# testing by release critical bugs whose removal from the Ubuntu devel
# series would aid the migration of other packages out of proposed.
#
# The packages that are being kept out of Debian testing by release
# critical bugs can be found fairly easily by looking at the output
# from Debian's britney runs. We do this first (and call these
# "rc-gone packages").
#
# Such packages can inhibit the migration of other packages in two
# ways:
#
# 1) autopkgtest regressions
# 2) by becoming uninstallable
#
# The first is fairly easy to find: scan through Ubuntu's excuses.yaml
# and look for the rc-gone package in the list of autopkgtest
# regressions for any package.
#
# The second is a bit more mind-bending to detect. If the package is
# caught up in a transition, a rebuild will be attempted as a matter
# of course, and if this succeeds and passes its autopkgtests then
# removing the package will not aid proposed migration. So we look for
# rc-gone packages in proposed that are missing builds or failing
# autopkgtests. For such source packages, we see if any of their
# binary packages are reported as being made uninstallable by the
# migration of any source package in proposed. If so, removing the
# rc-gone package will help proposed migration.

import argparse
import os
import shlex
import subprocess

import attr
from jinja2 import Environment, FileSystemLoader
import yaml

env = Environment(
    loader=FileSystemLoader(os.path.dirname(os.path.abspath(__file__)) + '/templates'),
    autoescape=True,
    extensions=['jinja2.ext.i18n'],
)
env.install_null_translations(True)

def parse_args():
    parser = argparse.ArgumentParser()
    parser.add_argument('--ubuntu-excuses', action='store')
    parser.add_argument('--ubuntu-update_output', action='store')
    parser.add_argument('--debian-excuses', action='store')
    parser.add_argument('--output', action='store')
    return parser.parse_args()

args = parse_args()

def run_output(*cmd, **extra):
    encoding = extra.pop('encoding', 'ascii')
    kw = dict(check=True, stdout=subprocess.PIPE)
    kw.update(extra)
    cp = subprocess.run(cmd, **kw)
    return cp.stdout.decode(encoding).strip()

all_arches = set()

def extract_bin_pkg_arch_to_blocked_src_pkgs(output_fp):
    # Extract a mapping from binary package name / architectures to
    # source package the migration of which would make that package
    # uninstallable (and for convenience, return all architectures
    # used as keys -- usually there will only be one or two).

    # We're looking for sequences of lines like this:

    # skipped: camlp5 (0, 3, 57)
    #     got: 13+0: a-1:a-0:a-0:i-6:p-0:s-6
    #      * s390x: hol-light, libaac-tactics-ocaml-dev, libcoq-ocaml-dev, libledit-ocaml-dev, ocaml-ulex08

    # (Britney tries to migrate batches of packages but it always
    # tries each package on its own as well).

    # For each potential migration, britney checks architectures in
    # sequence and stops when it finds one that regresses (or proceeds
    # with the migration if it doesn't find one). This means that we
    # can miss blocking packages here --- for example if migrating $src
    # would make say $binpkg/amd64 uninstallable, but britney happens
    # to check arm64 -- where $binpkg does not exist -- and there are
    # regressions there, we will never find out about the problem
    # $binpkg causes.  This isn't really too bad because clearly in
    # this case the migration of $src is blocked by other things that
    # need to be resolved, but it does mean that packages might appear
    # and disapper from the report depending on the order that britney
    # checks architectures in (which is not consistent from run to
    # run). C'est la vie.

    bin_pkg_arch_to_blocked_src_pkgs = {}
    srcpkg = None
    arch_prefix = "Arch order is: "
    for line in output_fp:
        line = line.strip()
        if line.startswith(arch_prefix):
            all_arches.update(line[len(arch_prefix):].split(', '))
        parts = line.split(None, 2)
        if len(parts) >= 2:
            if parts[0] in {"Trying", "trying"}:
                srcpkg = None
            if parts[0] == 'skipped:':
                srcpkg = None
                # If parts[2] is '(' then this line is about trying to
                # migrate a single package, which is what we are
                # looking for.
                if parts[2].startswith('('):
                    srcpkg = parts[1]
            if srcpkg is not None and srcpkg[0] != '-' and parts[0] == '*':
                # parts[1] is "${arch}:"
                # parts[2:] is a comma+space separated list of binary package names.
                arch = parts[1][:-1]
                for binpkg in parts[2].split(', '):
                    bin_pkg_arch_to_blocked_src_pkgs.setdefault(
                        (binpkg, arch), set()).add(srcpkg)
    return bin_pkg_arch_to_blocked_src_pkgs


def chdist_grep_dctrl_packages(arch, *args):
    return run_output(
        "chdist", "grep-dctrl-packages", "{}-{}".format(series, arch),
        "-nsPackage", *args, check=False).splitlines()


def chdist_grep_dctrl_sources(arch, *args):
    return run_output(
        "chdist", "grep-dctrl-sources", "{}-{}".format(series, arch),
        "-nsPackage", *args, check=False).splitlines()


@attr.s
class RCGone:
    source_package_name = attr.ib()
    bugs = attr.ib()
    block_by_regression = attr.ib(default=attr.Factory(set))
    block_by_uninstallability = attr.ib(default=attr.Factory(set))
    suites = attr.ib(default=attr.Factory(set))
    _rdeps_lines = attr.ib(default=None)
    _binpkgs = attr.ib(default=None)

    def binary_pkgs(self):
        if self._binpkgs is None:
            self._binpkgs = set()
            for arch in all_arches:
                arch_binpkgs = chdist_grep_dctrl_packages(
                    arch, "-wS", self.source_package_name)
                self._binpkgs.update({(binpkg, arch) for binpkg in arch_binpkgs})
        return self._binpkgs

    def reverse_depends(self):
        if self._rdeps_lines is None:
            # These are maps rdep -> binpkg -> arches
            reverse_recommends = {}
            reverse_depends = {}
            # This just maps rbdep -> binpkgs
            reverse_build_depends = {}
            for binpkg, arch in self.binary_pkgs():
                for rec in chdist_grep_dctrl_packages(
                        arch, "-wFRecommends", binpkg):
                    if (rec, arch) in self.binary_pkgs():
                        continue
                    reverse_recommends.setdefault(rec, {}).setdefault(binpkg, set()).add(arch)
                for dep in chdist_grep_dctrl_packages(
                        arch, "-wFDepends", binpkg):
                    if (dep, arch) in self.binary_pkgs():
                        continue
                    reverse_depends.setdefault(dep, {}).setdefault(binpkg, set()).add(arch)
                for bdeb in chdist_grep_dctrl_sources(
                    arch, "-wFBuild-Depends", binpkg, "--or", "-wFBuild-Depends-Indep", binpkg):
                    reverse_build_depends.setdefault(bdeb, set()).add(binpkg)
            self._rdeps_lines = []
            if reverse_depends:
                self._rdeps_lines.append("Reverse-Depends")
                for rdep in sorted(reverse_depends):
                    for binpkg in sorted(reverse_depends[rdep]):
                        if reverse_depends[rdep][binpkg] == all_arches:
                            arches = "all architectures"
                        else:
                            arches = ", ".join(reverse_depends[rdep][binpkg])
                        self._rdeps_lines.append(" ".join(["*", rdep, "for", binpkg, "on", arches]))
            if reverse_recommends:
                self._rdeps_lines.append("Reverse-Recommends")
                for rdep in sorted(reverse_recommends):
                    for binpkg in sorted(reverse_recommends[rdep]):
                        if reverse_recommends[rdep][binpkg] == all_arches:
                            arches = "all architectures"
                        else:
                            arches = ", ".join(reverse_recommends[rdep][binpkg])
                        self._rdeps_lines.append(" ".join(["*", rdep, "for", binpkg, "on", arches]))
            if reverse_build_depends:
                self._rdeps_lines.append("Reverse-Build-Depends")
                for rdep in sorted(reverse_build_depends):
                    for binpkg in sorted(reverse_build_depends[rdep]):
                        self._rdeps_lines.append(" ".join(["*", rdep, "for", binpkg]))
        return self._rdeps_lines

    @property
    def comment(self):
        comment = "removed from testing (Debian bug"
        if len(self.bugs) > 1:
            comment += "s"
        comment += " " + ", ".join('#' + b for b in self.bugs) + ")"
        if self.block_by_regression:
            comment += ', blocks {} by autopkgtest regression'.format(', '.join(sorted(self.block_by_regression)))
        if self.block_by_uninstallability:
            comment += ', blocks {} by uninstallability'.format(', '.join(sorted(self.block_by_uninstallability)))
        return comment

    @property
    def rdeps_text_short(self):
        return "\n".join(self.reverse_depends()[:10])

    @property
    def rdeps_text_more(self):
        return "\n".join(self.reverse_depends()[10:])

    @property
    def removal_commands(self):
        suites = self.suites
        if not suites:
            suites = [series]
        cmds = []
        for suite in suites:
            cmd = " ".join(["remove-package", "-s", suite, "-y", "-m", shlex.quote(self.comment), self.source_package_name])
            if self.reverse_depends():
                cmd = '#' + cmd
            cmds.append(cmd)
        return cmds

    @property
    def css_class(self):
        if self.reverse_depends():
            return 'removal-not-ok'
        else:
            return 'removal-ok'


if 'SERIES' in os.environ:
    series = os.environ['SERIES']
else:
    series = run_output('distro-info', '-d')


def main():
    print("loading data")
    if args.ubuntu_excuses.endswith('.xz'):
        import lzma
        excuses_opener = lzma.open
    else:
        excuses_opener = open
    with excuses_opener(args.ubuntu_excuses) as fp:
        ubuntu_excuses = yaml.load(fp, Loader=yaml.CSafeLoader)
    with open(args.ubuntu_update_output) as fp:
        bin_pkg_arch_to_blocked_src_pkgs = extract_bin_pkg_arch_to_blocked_src_pkgs(fp)
    with open(args.debian_excuses) as fp:
        debian_excuses = yaml.load(fp, Loader=yaml.CSafeLoader)

    print("finding rcgone packages")
    rc_gones = {}

    for source in debian_excuses['sources']:
        if source['old-version'] == '-':
            info = source['policy_info']
            rc_bugs = info.get('rc-bugs', {})
            if rc_bugs.get('verdict') == "REJECTED_PERMANENTLY":
                bugs = []
                for k in 'shared-bugs', 'unique-source-bugs', 'unique-target-bugs':
                    bugs.extend(rc_bugs[k])
                rc_gones[source['item-name']] = RCGone(
                    source_package_name=source['item-name'],
                    bugs=bugs)
    in_proposed_by_autopkgtest_or_missing_binaries = set()
    print("checking autopkgtests")
    for source in ubuntu_excuses['sources']:
        item = source['item-name']
        if 'autopkgtest' in source['reason']:
            in_proposed_by_autopkgtest_or_missing_binaries.add(item)
            for package, results in sorted(source['policy_info']['autopkgtest'].items()):
                if '/' not in package:
                    # This only happens when all tests are still running
                    continue
                package, version = package.split('/')
                if package not in rc_gones or '-0ubuntu' in version:
                    continue
                for arch, result in sorted(results.items()):
                    outcome, log, history, wtf1, wtf2 = result
                    if outcome == "REGRESSION" and package != item:
                        rc_gones[package].block_by_regression.add(item)
                        break
        if 'missing-builds' in source and '-0ubuntu' not in source['new-version']:
            in_proposed_by_autopkgtest_or_missing_binaries.add(item)
        if item in rc_gones:
            if source['new-version'] != '-':
                rc_gones[item].suites.add(series+"-proposed")
            if source['old-version'] != '-':
                rc_gones[item].suites.add(series)
    print("checking uninstallability")
    for rc_gone in rc_gones.values():
        if rc_gone.source_package_name not in in_proposed_by_autopkgtest_or_missing_binaries:
            continue
        for bin_pkg, arch in set(rc_gone.binary_pkgs()):
            rc_gone.block_by_uninstallability.update(bin_pkg_arch_to_blocked_src_pkgs.get((bin_pkg, arch), set()))
    print("finding reverse-deps")
    packages = []
    for _, rc_gone in sorted(rc_gones.items()):
        if not rc_gone.block_by_regression and not rc_gone.block_by_uninstallability:
            continue
        rc_gone.reverse_depends()
        packages.append(rc_gone)

    print("rendering")
    t = env.get_template('rcbuggy-problem-packages.html')
    with open(args.output, 'w', encoding='utf-8') as fp:
        fp.write(t.render(
            packages=packages,
            now=ubuntu_excuses["generated-date"].strftime("%Y.%m.%d %H:%M:%S")))


if __name__ == '__main__':
    main()