1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
|
'''Functions to manage sandboxes'''
# Copyright (C) 2006 - 2013 Canonical Ltd.
# Author: Martin Pitt <martin.pitt@ubuntu.com>
# Kyle Nitzsche <kyle.nitzsche@canonical.com>
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 2 of the License, or (at your
# option) any later version. See http://www.gnu.org/copyleft/gpl.html for
# the full text of the license.
import atexit, os, os.path, re, shutil, tempfile
import apport
def needed_packages(report):
'''Determine necessary packages for given report.
Return list of (pkgname, version) pairs. version might be None for unknown
package versions.
'''
pkgs = {}
# first, grab the versions that we captured at crash time
for l in (report.get('Package', '') + '\n' + report.get('Dependencies', '')).splitlines():
if not l.strip():
continue
try:
(pkg, version) = l.split()[:2]
except ValueError:
apport.warning('invalid Package/Dependencies line: %s', l)
# invalid line, ignore
continue
pkgs[pkg] = version
return [(p, v) for (p, v) in pkgs.items()]
def report_package_versions(report):
'''Return package -> version dictionary from report'''
pkg_vers = {}
for l in (report.get('Package', '') + '\n' + report.get('Dependencies', '')).splitlines():
if not l.strip():
continue
try:
(pkg, version) = l.split()[:2]
except ValueError:
apport.warning('invalid Package/Dependencies line: %s', l)
# invalid line, ignore
continue
pkg_vers[pkg] = version
return pkg_vers
def needed_runtime_packages(report, sandbox, pkgmap_cache_dir, pkg_versions, verbose=False):
'''Determine necessary runtime packages for given report.
This determines libraries dynamically loaded at runtime in two cases:
1. The executable has already run: /proc/pid/maps is used, from the report
2. The executable has not already run: shared_libraries() is used
The libraries are resolved to the packages that installed them.
Return list of (pkgname, None) pairs.
When pkgmap_cache_dir is specified, it is used as a cache for
get_file_package().
'''
# check list of libraries that the crashed process referenced at
# runtime and warn about those which are not available
pkgs = set()
libs = set()
if 'ProcMaps' in report:
for l in report['ProcMaps'].splitlines():
if not l.strip():
continue
cols = l.split()
if len(cols) in (6, 7) and 'x' in cols[1] and '.so' in cols[5]:
lib = os.path.realpath(cols[5])
libs.add(lib)
else:
# 'ProcMaps' key is absent in apport-valgrind use case
libs = apport.fileutils.shared_libraries(report['ExecutablePath']).values()
if not os.path.exists(pkgmap_cache_dir):
os.makedirs(pkgmap_cache_dir)
# grab as much as we can
for l in libs:
pkg = apport.packaging.get_file_package(l, True, pkgmap_cache_dir,
release=report['DistroRelease'],
arch=report.get('Architecture'))
if pkg:
if verbose:
apport.log('dynamically loaded %s needs package %s, queueing' % (l, pkg))
pkgs.add(pkg)
else:
apport.warning('%s is needed, but cannot be mapped to a package', l)
return [(p, pkg_versions.get(p)) for p in pkgs]
def _move_base_files_first(pkgs):
"""Move base-files to the front or add it if missing."""
base_files_version = None
for i, (pkg, version) in enumerate(pkgs):
if pkg == "base-files":
base_files_version = version
pkgs.pop(i)
break
pkgs.insert(0, ("base-files", base_files_version))
def make_sandbox(report, config_dir, cache_dir=None, sandbox_dir=None,
extra_packages=[], verbose=False, log_timestamps=False,
dynamic_origins=False):
'''Build a sandbox with the packages that belong to a particular report.
This downloads and unpacks all packages from the report's Package and
Dependencies fields, plus all packages that ship the files from ProcMaps
(often, runtime plugins do not appear in Dependencies), plus optionally
some extra ones, for the distro release and architecture of the report.
For unpackaged executables, there are no Dependencies. Packages for shared
libaries are unpacked.
report is an apport.Report object to build a sandbox for. Presence of the
Package field determines whether to determine dependencies through
packaging (via the optional report['Dependencies'] field), or through ldd
via needed_runtime_packages() -> shared_libraries(). Usually
report['Architecture'] and report['Uname'] are present.
config_dir points to a directory with by-release configuration files for
the packaging system, or "system"; this is passed to
apport.packaging.install_packages(), see that method for details.
cache_dir points to a directory where the downloaded packages and debug
symbols are kept, which is useful if you create sandboxes very often. If
not given, the downloaded packages get deleted at program exit.
sandbox_dir points to a directory with a permanently unpacked sandbox with
the already unpacked packages. This speeds up operations even further if
you need to create sandboxes for different reports very often; but the
sandboxes can become very big over time, and you must ensure that an
already existing sandbox matches the DistroRelease: and Architecture: of
report. If not given, a temporary directory will be created which gets
deleted at program exit.
extra_packages can specify a list of additional packages to install which
are not derived from the report and will be installed along with their
dependencies.
If verbose is True (False by default), this will write some additional
logging to stdout.
If log_timestamps is True, these log messages will be prefixed with the
current time.
If dynamic_origins is True (False by default), the sandbox will be built
with packages from foreign origins that appear in the report's
Packages:/Dependencies:.
Return a tuple (sandbox_dir, cache_dir, outdated_msg).
'''
# sandbox
if sandbox_dir:
sandbox_dir = os.path.abspath(sandbox_dir)
if not os.path.isdir(sandbox_dir):
os.makedirs(sandbox_dir)
permanent_rootdir = True
else:
sandbox_dir = tempfile.mkdtemp(prefix='apport_sandbox_')
atexit.register(shutil.rmtree, sandbox_dir)
permanent_rootdir = False
# cache
if cache_dir:
cache_dir = os.path.abspath(cache_dir)
else:
cache_dir = tempfile.mkdtemp(prefix='apport_cache_')
atexit.register(shutil.rmtree, cache_dir)
pkgmap_cache_dir = os.path.join(cache_dir, report['DistroRelease'])
pkgs = []
# when ProcMaps is available and we don't have any third-party packages, it
# is enough to get the libraries in it and map their files to packages;
# otherwise, get Package/Dependencies
if 'ProcMaps' not in report or '[origin' in (report.get('Package', '') + report.get('Dependencies', '')):
pkgs = needed_packages(report)
# add user-specified extra packages, if any
extra_pkgs = []
for p in extra_packages:
extra_pkgs.append((p, None))
if config_dir == 'system':
config_dir = None
origins = None
if dynamic_origins:
pkg_list = report.get('Package', '') + '\n' + report.get('Dependencies', '')
m = re.compile(r'\[origin: ([a-zA-Z0-9][a-zA-Z0-9\+\.\-]+)\]')
origins = set(m.findall(pkg_list))
if origins:
apport.log("Origins: %s" % origins)
# Install base-files first to get correct usrmerge
_move_base_files_first(pkgs)
# unpack packages, if any, using cache and sandbox
try:
outdated_msg = apport.packaging.install_packages(
sandbox_dir, config_dir, report['DistroRelease'], pkgs,
verbose, cache_dir, permanent_rootdir,
architecture=report.get('Architecture'), origins=origins)
except SystemError as e:
apport.fatal(str(e))
# install the extra packages and their deps
if extra_pkgs:
try:
outdated_msg += apport.packaging.install_packages(
sandbox_dir, config_dir, report['DistroRelease'], extra_pkgs,
verbose, cache_dir, permanent_rootdir,
architecture=report.get('Architecture'), origins=origins,
install_dbg=False, install_deps=True)
except SystemError as e:
apport.fatal(str(e))
pkg_versions = report_package_versions(report)
pkgs = needed_runtime_packages(report, sandbox_dir, pkgmap_cache_dir, pkg_versions, verbose)
# package hooks might reassign Package:, check that we have the originally
# crashing binary
for path in ('InterpreterPath', 'ExecutablePath'):
if path in report:
pkg = apport.packaging.get_file_package(report[path], True, pkgmap_cache_dir,
release=report['DistroRelease'],
arch=report.get('Architecture'))
# Because of UsrMerge the two systemctl's may share the same
# location, however since systemd and systemctl conflict we can
# assume that if the SourcePackage was set to systemd it is
# correct. For an example see LP: #1872211.
if pkg == 'systemctl':
if report['SourcePackage'] == 'systemd':
report['ExecutablePath'] = '/bin/systemctl'
pkg = 'systemd'
if pkg:
apport.log('Installing extra package %s to get %s' % (pkg, path), log_timestamps)
pkgs.append((pkg, pkg_versions.get(pkg)))
else:
apport.fatal('Cannot find package which ships %s %s', path, report[path])
# unpack packages for executable using cache and sandbox
if pkgs:
try:
outdated_msg += apport.packaging.install_packages(
sandbox_dir, config_dir, report['DistroRelease'], pkgs,
verbose, cache_dir, permanent_rootdir,
architecture=report.get('Architecture'), origins=origins)
except SystemError as e:
apport.fatal(str(e))
# sanity check: for a packaged binary we require having the executable in
# the sandbox; TODO: for an unpackage binary we don't currently copy its
# potential local library dependencies (like those in build trees) into the
# sandbox, and we call gdb/valgrind on the binary outside the sandbox.
if 'Package' in report:
for path in ('InterpreterPath', 'ExecutablePath'):
if path in report and not os.path.exists(sandbox_dir + report[path]):
if report[path].startswith('/usr'):
if os.path.exists(sandbox_dir + report[path][4:]):
report[path] = report[path][4:]
else:
apport.fatal('%s %s does not exist (report specified package %s)',
path, sandbox_dir + report[path], report['Package'])
else:
apport.fatal('%s %s does not exist (report specified package %s)',
path, sandbox_dir + report[path], report['Package'])
if outdated_msg:
report['RetraceOutdatedPackages'] = outdated_msg
apport.memdbg('built sandbox')
return sandbox_dir, cache_dir, outdated_msg
|