1
by kklimonda at syntaxhighlighted
initial piuparts 0.36 import |
1 |
#!/usr/bin/python
|
2 |
#
|
|
3 |
# Copyright 2005 Lars Wirzenius (liw@iki.fi)
|
|
4 |
#
|
|
5 |
# This program is free software; you can redistribute it and/or modify it
|
|
6 |
# under the terms of the GNU General Public License as published by the
|
|
7 |
# Free Software Foundation; either version 2 of the License, or (at your
|
|
8 |
# option) any later version.
|
|
9 |
#
|
|
10 |
# This program is distributed in the hope that it will be useful, but
|
|
11 |
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
12 |
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
|
|
13 |
# Public License for more details.
|
|
14 |
#
|
|
15 |
# You should have received a copy of the GNU General Public License along with
|
|
16 |
# this program; if not, write to the Free Software Foundation, Inc.,
|
|
17 |
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
|
|
18 |
||
19 |
||
20 |
"""Debian package installation and uninstallation tester.
|
|
21 |
||
22 |
This program sets up a minimal Debian system in a chroot, and installs
|
|
23 |
and uninstalls packages and their dependencies therein, looking for
|
|
24 |
problems.
|
|
25 |
||
26 |
See the manual page (piuparts.1, generated from piuparts.docbook) for
|
|
27 |
more usage information.
|
|
28 |
||
29 |
Lars Wirzenius <liw@iki.fi>
|
|
30 |
"""
|
|
31 |
||
32 |
||
33 |
VERSION = "__PIUPARTS_VERSION__" |
|
34 |
||
35 |
||
36 |
import time |
|
37 |
import logging |
|
38 |
import optparse |
|
39 |
import sys |
|
40 |
import commands |
|
41 |
import tempfile |
|
42 |
import shutil |
|
43 |
import os |
|
44 |
import tarfile |
|
45 |
import stat |
|
46 |
import re |
|
47 |
import pickle |
|
48 |
import sets |
|
49 |
import subprocess |
|
50 |
import unittest |
|
51 |
import urllib |
|
52 |
from debian_bundle import deb822 |
|
53 |
||
54 |
||
55 |
class Defaults: |
|
56 |
||
57 |
"""Default settings which depend on flavor of Debian.
|
|
58 |
|
|
59 |
Some settings, such as the default mirror and distribution, depend on
|
|
60 |
which flavor of Debian we run under: Debian itself, or a derived
|
|
61 |
distribution such as Ubuntu. This class abstracts away the defaults
|
|
62 |
so that the rest of the code can just refer to the values defined
|
|
63 |
herein.
|
|
64 |
|
|
65 |
"""
|
|
66 |
||
67 |
def get_components(self): |
|
68 |
"""Return list of default components for a mirror."""
|
|
69 |
||
70 |
def get_mirror(self): |
|
71 |
"""Return default mirror."""
|
|
72 |
||
73 |
def get_distribution(self): |
|
74 |
"""Return default distribution."""
|
|
5
by kklimonda at syntaxhighlighted
Abstract find_default_debian_mirrors() as Ubuntu require slightly |
75 |
|
76 |
def find_default_mirrors(self): |
|
77 |
"""Return list of default mirrors."""
|
|
1
by kklimonda at syntaxhighlighted
initial piuparts 0.36 import |
78 |
|
79 |
||
80 |
class DebianDefaults(Defaults): |
|
81 |
||
82 |
def get_components(self): |
|
83 |
return ["main", "contrib", "non-free"] |
|
84 |
||
85 |
def get_mirror(self): |
|
86 |
return [("http://ftp.debian.org/debian", self.get_components())] |
|
87 |
||
88 |
def get_distribution(self): |
|
89 |
return ["sid"] |
|
90 |
||
5
by kklimonda at syntaxhighlighted
Abstract find_default_debian_mirrors() as Ubuntu require slightly |
91 |
def find_default_mirrors(self): |
8
by Krzysztof Klimonda
Move find_default_debian_mirrors() to DebianDefaults |
92 |
"""Find the default Debian mirrors."""
|
93 |
mirrors = [] |
|
94 |
try: |
|
95 |
f = file("/etc/apt/sources.list", "r") |
|
96 |
for line in f: |
|
97 |
parts = line.split() |
|
9
by Krzysztof Klimonda
Fixed indentation |
98 |
if len(parts) > 2 and parts[0] == "deb": |
99 |
mirrors.append((parts[1], parts[3:])) |
|
100 |
break # Only use the first one, at least for now. |
|
8
by Krzysztof Klimonda
Move find_default_debian_mirrors() to DebianDefaults |
101 |
f.close() |
102 |
except IOError: |
|
103 |
return None |
|
104 |
return mirrors |
|
1
by kklimonda at syntaxhighlighted
initial piuparts 0.36 import |
105 |
|
106 |
class UbuntuDefaults(Defaults): |
|
107 |
||
108 |
def get_components(self): |
|
109 |
return ["main", "universe", "restricted", "multiverse"] |
|
110 |
||
111 |
def get_mirror(self): |
|
112 |
return [("http://archive.ubuntu.com/ubuntu", self.get_components())] |
|
113 |
||
114 |
def get_distribution(self): |
|
7
by Krzysztof Klimonda
change default ubuntu release to lucid |
115 |
return ["lucid"] |
1
by kklimonda at syntaxhighlighted
initial piuparts 0.36 import |
116 |
|
5
by kklimonda at syntaxhighlighted
Abstract find_default_debian_mirrors() as Ubuntu require slightly |
117 |
def find_default_mirrors(self): |
118 |
mirrors = [] |
|
119 |
try: |
|
120 |
f = open("/etc/apt/sources.list", "r") |
|
121 |
for line in f: |
|
122 |
parts = line.split() |
|
123 |
if len(parts) > 2 and parts[0] == "deb": |
|
124 |
mirrors.append((parts[1], self.get_components())) |
|
125 |
break # Only use the first one, at least for now. |
|
126 |
f.close() |
|
127 |
except IOError: |
|
128 |
return None |
|
129 |
return mirrors |
|
130 |
||
1
by kklimonda at syntaxhighlighted
initial piuparts 0.36 import |
131 |
|
132 |
class DefaultsFactory: |
|
133 |
||
134 |
"""Instantiate the right defaults class."""
|
|
135 |
||
136 |
def guess_flavor(self): |
|
137 |
p = subprocess.Popen(["lsb_release", "-i", "-s"], |
|
138 |
stdout=subprocess.PIPE) |
|
139 |
stdout, stderr = p.communicate() |
|
140 |
return stdout.strip().lower() |
|
141 |
||
142 |
def new_defaults(self): |
|
143 |
if not settings.defaults: |
|
144 |
settings.defaults = self.guess_flavor() |
|
145 |
print "Guessed:", settings.defaults |
|
146 |
if settings.defaults.lower() == "debian": |
|
147 |
return DebianDefaults() |
|
148 |
if settings.defaults.lower() == "ubuntu": |
|
149 |
return UbuntuDefaults() |
|
150 |
logging.error("Unknown set of defaults: %s" % settings.defaults) |
|
151 |
panic() |
|
152 |
||
153 |
||
154 |
class Settings: |
|
155 |
||
156 |
"""Global settings for this program."""
|
|
157 |
||
158 |
def __init__(self): |
|
159 |
self.defaults = None |
|
160 |
self.tmpdir = None |
|
161 |
self.scriptsdir = None |
|
162 |
self.keep_tmpdir = False |
|
163 |
self.single_changes_list = False |
|
164 |
# limit output of logfiles to the last megabyte:
|
|
165 |
self.max_command_output_size = 1024 * 1024 |
|
166 |
self.args_are_package_files = True |
|
167 |
self.debian_mirrors = [] |
|
168 |
self.debian_distros = [] |
|
169 |
self.bindmounts = [] |
|
170 |
self.basetgz = None |
|
171 |
self.savetgz = None |
|
172 |
self.endmeta = None |
|
173 |
self.saveendmeta = None |
|
174 |
self.warn_on_others = False |
|
175 |
self.keep_sources_list = False |
|
176 |
self.skip_minimize = False |
|
177 |
self.list_installed_files = False |
|
178 |
self.no_upgrade_test = False |
|
179 |
self.skip_cronfiles_test = False |
|
180 |
self.check_broken_symlinks = True |
|
181 |
self.debfoster_options = None |
|
182 |
self.ignored_files = [ |
|
183 |
"/dev/MAKEDEV", |
|
184 |
"/etc/aliases", |
|
185 |
"/etc/apt/apt.conf", |
|
186 |
"/etc/apt/secring.gpg", |
|
187 |
"/etc/apt/trustdb.gpg", |
|
188 |
"/etc/apt/trusted.gpg", |
|
189 |
"/etc/apt/trusted.gpg~", |
|
190 |
"/etc/crypttab", |
|
191 |
"/etc/exports", |
|
192 |
"/etc/group", |
|
193 |
"/etc/group-", |
|
194 |
"/etc/gshadow", |
|
195 |
"/etc/gshadow-", |
|
196 |
"/etc/hosts.allow.bak", |
|
197 |
"/etc/inetd.conf", |
|
198 |
"/etc/init.d/gnocatan-meta-server", |
|
199 |
"/etc/inittab", |
|
200 |
"/etc/inputrc", |
|
201 |
"/etc/keys", |
|
202 |
"/etc/ld.so.cache", |
|
203 |
"/etc/ld.so.conf", |
|
204 |
"/etc/ld.so.conf.old", |
|
205 |
"/etc/mailname", |
|
206 |
"/etc/modprobe.d", |
|
207 |
"/etc/modules.conf", |
|
208 |
"/etc/modules.conf.old", |
|
209 |
"/etc/mtab", |
|
210 |
"/etc/news", |
|
211 |
"/etc/news/organization", |
|
212 |
"/etc/news/server", |
|
213 |
"/etc/news/servers", |
|
214 |
"/etc/nologin", |
|
215 |
"/etc/passwd", |
|
216 |
"/etc/passwd-", |
|
217 |
"/etc/printcap", |
|
218 |
"/etc/shadow", |
|
219 |
"/etc/shadow-", |
|
220 |
"/etc/shells", |
|
221 |
"/etc/skel/.zshrc", |
|
222 |
"/home/ftp", |
|
223 |
"/usr/sbin/policy-rc.d", |
|
224 |
"/usr/share/doc/base-config", |
|
225 |
"/usr/share/doc/base-config/README.Debian", |
|
226 |
"/usr/share/doc/base-config/changelog.gz", |
|
227 |
"/usr/share/doc/base-config/copyright", |
|
228 |
"/usr/share/info/dir", |
|
229 |
"/usr/share/info/dir.old", |
|
230 |
"/var/cache/apt/archives/lock", |
|
231 |
"/var/cache/apt/pkgcache.bin", |
|
232 |
"/var/cache/apt/srcpkgcache.bin", |
|
233 |
"/var/cache/debconf", |
|
234 |
"/var/cache/debconf/config.dat", |
|
235 |
"/var/cache/debconf/config.dat-old", |
|
236 |
"/var/cache/debconf/passwords.dat", |
|
237 |
"/var/cache/debconf/templates.dat", |
|
238 |
"/var/cache/debconf/templates.dat-old", |
|
239 |
"/var/cache/ldconfig/aux-cache", |
|
240 |
"/var/cache/man/index.db", |
|
241 |
"/var/games", |
|
242 |
"/var/lib/apt/extended_states", |
|
243 |
"/var/lib/dpkg/available", |
|
244 |
"/var/lib/dpkg/available-old", |
|
245 |
"/var/lib/dpkg/diversions", |
|
246 |
"/var/lib/dpkg/diversions-old", |
|
247 |
"/var/lib/dpkg/info/base-config.conffiles", |
|
248 |
"/var/lib/dpkg/info/base-config.list", |
|
249 |
"/var/lib/dpkg/info/base-config.md5sums", |
|
250 |
"/var/lib/dpkg/info/base-config.postinst", |
|
251 |
"/var/lib/dpkg/lock", |
|
252 |
"/var/lib/dpkg/status", |
|
253 |
"/var/lib/dpkg/status-old", |
|
254 |
"/var/lib/dpkg/statoverride", |
|
255 |
"/var/lib/dpkg/statoverride-old", |
|
256 |
"/var/lib/dpkg/firebird", |
|
257 |
"/var/lib/logrotate/status", |
|
258 |
"/var/lib/rbldns", |
|
259 |
"/var/log/apt/term.log", |
|
260 |
"/var/log/dpkg.log", |
|
261 |
"/var/log/faillog", |
|
262 |
"/var/log/lastlog", |
|
263 |
"/", |
|
3
by kklimonda at syntaxhighlighted
* piuparts.py: |
264 |
# Ubuntu specific files
|
265 |
"/etc/blkid.tab", # LP: FIXME |
|
266 |
"/usr/lib/python2.5/site-packages/python-support.pth", # LP: FIXME |
|
1
by kklimonda at syntaxhighlighted
initial piuparts 0.36 import |
267 |
]
|
268 |
self.ignored_patterns = [ |
|
269 |
"/dev/", |
|
270 |
"/etc/ssl/certs(/.*)?", |
|
271 |
"/lib/modules/.*/modules.*", |
|
272 |
"/usr/lib/python2\../site-packages/debconf.py[co]", |
|
273 |
"/var/backups/.*", |
|
274 |
"/var/lib/cvs(/.*)?", |
|
275 |
"/var/lib/dpkg/alternatives", |
|
276 |
"/var/lib/dpkg/triggers/.*", |
|
277 |
"/var/lib/ldap(/.*)?", |
|
278 |
"/var/lib/maxdb(/.*)?", |
|
279 |
"/var/lib/onak(/.*)?", |
|
280 |
"/var/lib/papercut(/.*)?", |
|
281 |
"/var/log/exim/.*", |
|
282 |
"/var/log/exim4/.*", |
|
283 |
"/var/mail/.*", |
|
284 |
"/var/spool/exim/.*", |
|
285 |
"/var/spool/exim4/.*", |
|
286 |
"/var/spool/news(/.*)?", |
|
287 |
"/var/spool/squid(/.*)?", |
|
288 |
"/var/run/.*", |
|
289 |
"/var/www(/.*)?", |
|
3
by kklimonda at syntaxhighlighted
* piuparts.py: |
290 |
"/tmp/.*", |
291 |
# Ubuntu specific patterns
|
|
292 |
"/lib/udev/devices/std(err|in|out)", # LP: FIXME |
|
1
by kklimonda at syntaxhighlighted
initial piuparts 0.36 import |
293 |
]
|
294 |
||
295 |
||
296 |
settings = Settings() |
|
297 |
||
298 |
||
299 |
on_panic_hooks = {} |
|
300 |
counter = 0 |
|
301 |
||
302 |
||
303 |
def do_on_panic(hook): |
|
304 |
global counter |
|
305 |
id = counter |
|
306 |
counter += 1 |
|
307 |
on_panic_hooks[id] = hook |
|
308 |
return id |
|
309 |
||
310 |
||
311 |
def dont_do_on_panic(id): |
|
312 |
del on_panic_hooks[id] |
|
313 |
||
314 |
||
315 |
class TimeOffsetFormatter(logging.Formatter): |
|
316 |
||
317 |
def __init__(self, fmt=None, datefmt=None): |
|
318 |
self.startup_time = time.time() |
|
319 |
logging.Formatter.__init__(self, fmt, datefmt) |
|
320 |
||
321 |
def formatTime(self, record, datefmt): |
|
322 |
t = time.time() - self.startup_time |
|
323 |
min = int(t / 60) |
|
324 |
s = t % 60.0 |
|
325 |
return "%dm%.1fs" % (min, s) |
|
326 |
||
327 |
||
328 |
DUMP = logging.DEBUG - 1 |
|
329 |
HANDLERS = [] |
|
330 |
||
331 |
def setup_logging(log_level, log_file_name): |
|
332 |
logging.addLevelName(DUMP, "DUMP") |
|
333 |
||
334 |
logger = logging.getLogger() |
|
335 |
logger.setLevel(log_level) |
|
336 |
formatter = TimeOffsetFormatter("%(asctime)s %(levelname)s: %(message)s") |
|
337 |
||
338 |
handler = logging.StreamHandler(sys.stdout) |
|
339 |
handler.setFormatter(formatter) |
|
340 |
logger.addHandler(handler) |
|
341 |
HANDLERS.append(handler) |
|
342 |
||
343 |
if log_file_name: |
|
344 |
handler = logging.FileHandler(log_file_name) |
|
345 |
handler.setFormatter(formatter) |
|
346 |
logger.addHandler(handler) |
|
347 |
HANDLERS.append(handler) |
|
348 |
||
349 |
||
350 |
def dump(msg): |
|
351 |
logger = logging.getLogger() |
|
352 |
logger.log(DUMP, msg) |
|
353 |
for handler in HANDLERS: |
|
354 |
handler.flush() |
|
355 |
||
356 |
||
357 |
def panic(exit=1): |
|
358 |
for i in range(counter): |
|
359 |
if i in on_panic_hooks: |
|
360 |
on_panic_hooks[i]() |
|
361 |
sys.exit(exit) |
|
362 |
||
363 |
||
364 |
def indent_string(str): |
|
365 |
"""Indent all lines in a string with two spaces and return result."""
|
|
366 |
return "\n".join([" " + line for line in str.split("\n")]) |
|
367 |
||
368 |
||
369 |
safechars = ("abcdefghijklmnopqrstuvwxyz" + |
|
370 |
"ABCDEFGHIJKLMNOPQRSTUVWXYZ" + |
|
371 |
"0123456789" + |
|
372 |
",.-_!%/=+:") |
|
373 |
||
374 |
def shellquote(str): |
|
375 |
if str == "": |
|
376 |
return "''" |
|
377 |
||
378 |
result = [] |
|
379 |
for c in str: |
|
380 |
if c == "'": |
|
381 |
result.append("\"'\"") |
|
382 |
elif c in safechars: |
|
383 |
result.append(c) |
|
384 |
else: |
|
385 |
result.append("\\" + c) |
|
386 |
return "".join(result) |
|
387 |
||
388 |
||
389 |
def run(command, ignore_errors=False): |
|
390 |
"""Run an external command and die with error message if it fails."""
|
|
391 |
assert type(command) == type([]) |
|
392 |
logging.debug("Starting command: %s" % command) |
|
393 |
env = os.environ.copy() |
|
394 |
env["LC_ALL"] = "C" |
|
395 |
env["LANGUAGES"] = "" |
|
396 |
p = subprocess.Popen(command, env=env, stdin=subprocess.PIPE, |
|
397 |
stdout=subprocess.PIPE, stderr=subprocess.STDOUT) |
|
398 |
(output, _) = p.communicate() |
|
399 |
||
400 |
if output: |
|
401 |
dump("\n" + indent_string(output.rstrip("\n"))) |
|
402 |
||
403 |
if p.returncode == 0: |
|
404 |
logging.debug("Command ok: %s" % repr(command)) |
|
405 |
elif ignore_errors: |
|
406 |
logging.debug("Command failed (status=%d), but ignoring error: %s" % |
|
407 |
(p.returncode, repr(command))) |
|
408 |
else: |
|
409 |
logging.error("Command failed (status=%d): %s\n%s" % |
|
410 |
(p.returncode, repr(command), indent_string(output))) |
|
411 |
panic() |
|
412 |
return p.returncode, output |
|
413 |
||
414 |
||
415 |
def create_temp_file(): |
|
416 |
"""Create a temporary file and return its full path."""
|
|
417 |
(fd, path) = tempfile.mkstemp(dir=settings.tmpdir) |
|
418 |
logging.debug("Created temporary file %s" % path) |
|
419 |
return (fd, path) |
|
420 |
||
421 |
||
422 |
def create_file(name, contents): |
|
423 |
"""Create a new file with the desired name and contents."""
|
|
424 |
try: |
|
425 |
f = file(name, "w") |
|
426 |
f.write(contents) |
|
427 |
f.close() |
|
428 |
except IOError, detail: |
|
429 |
logging.error("Couldn't create file %s: %s" % (name, detail)) |
|
430 |
panic() |
|
431 |
||
432 |
||
433 |
def remove_files(filenames): |
|
434 |
"""Remove some files."""
|
|
435 |
for filename in filenames: |
|
436 |
logging.debug("Removing %s" % filename) |
|
437 |
try: |
|
438 |
os.remove(filename) |
|
439 |
except OSError, detail: |
|
440 |
logging.error("Couldn't remove %s: %s" % (filename, detail)) |
|
441 |
panic() |
|
442 |
||
443 |
||
444 |
def make_metapackage(name, depends, conflicts): |
|
445 |
"""Return the path to a .deb created just for satisfying dependencies
|
|
446 |
|
|
447 |
Caller is responsible for removing the temporary directory containing the
|
|
448 |
.deb when finished.
|
|
449 |
"""
|
|
450 |
# Inspired by pbuilder's pbuilder-satisfydepends-aptitude
|
|
451 |
||
452 |
tmpdir = tempfile.mkdtemp(dir=settings.tmpdir) |
|
453 |
os.makedirs(os.path.join(tmpdir, name, 'DEBIAN')) |
|
454 |
control = deb822.Deb822() |
|
455 |
control['Package'] = name |
|
456 |
control['Version'] = '0.invalid.0' |
|
457 |
control['Architecture'] = 'all' |
|
458 |
control['Maintainer'] = ('piuparts developers team ' |
|
459 |
'<piuparts-devel@lists.alioth.debian.org>') |
|
460 |
control['Description'] = ('Dummy package to satisfy dependencies - ' |
|
461 |
'created by piuparts\n' |
|
462 |
' This package was created automatically by '
|
|
463 |
'piuparts and can safely be removed') |
|
464 |
if depends: |
|
465 |
control['Depends'] = depends |
|
466 |
if conflicts: |
|
467 |
control['Conflicts'] = conflicts |
|
468 |
||
469 |
create_file(os.path.join(tmpdir, name, 'DEBIAN', 'control'), |
|
470 |
control.dump()) |
|
471 |
||
472 |
run(['dpkg-deb', '-b', os.path.join(tmpdir, name)]) |
|
473 |
return os.path.join(tmpdir, name) + '.deb' |
|
474 |
||
475 |
||
476 |
def is_broken_symlink(root, dirpath, filename): |
|
477 |
"""Is symlink dirpath+filename broken?
|
|
478 |
|
|
479 |
When resolving the symlink, pretend (similar to chroot) that root is
|
|
480 |
the root of the filesystem. Note that this does NOT work completely
|
|
481 |
correctly if the symlink target contains .. path components. This is
|
|
482 |
good enough for my immediate purposes, but nowhere near good enough
|
|
483 |
for anything that needs to be secure. For that, use chroot and have
|
|
484 |
the kernel resolve symlinks instead.
|
|
485 |
||
486 |
"""
|
|
487 |
||
488 |
pathname = os.path.join(dirpath, filename) |
|
489 |
i = 0 |
|
490 |
while os.path.islink(pathname): |
|
491 |
if i >= 10: # let's avoid infinite loops... |
|
492 |
return True |
|
493 |
i += 1 |
|
494 |
target = os.readlink(pathname) |
|
495 |
if os.path.isabs(target): |
|
496 |
pathname = os.path.join(root, target[1:]) # Assume Unix filenames |
|
497 |
else: |
|
498 |
pathname = os.path.join(os.path.dirname(pathname), target) |
|
499 |
||
500 |
# The symlink chain, if any, has now been resolved. Does the target
|
|
501 |
# exist?
|
|
502 |
return not os.path.exists(pathname) |
|
503 |
||
504 |
||
505 |
class IsBrokenSymlinkTests(unittest.TestCase): |
|
506 |
||
507 |
testdir = "is-broken-symlink-testdir" |
|
508 |
||
509 |
def symlink(self, target, name): |
|
510 |
pathname = os.path.join(self.testdir, name) |
|
511 |
os.symlink(target, pathname) |
|
512 |
self.symlinks.append(pathname) |
|
513 |
||
514 |
def setUp(self): |
|
515 |
self.symlinks = [] |
|
516 |
os.mkdir(self.testdir) |
|
517 |
self.symlink("notexist", "relative-broken") |
|
518 |
self.symlink("relative-broken", "relative-broken-to-symlink") |
|
519 |
self.symlink(".", "relative-works") |
|
520 |
self.symlink("relative-works", "relative-works-to-symlink") |
|
521 |
self.symlink("/etc", "absolute-broken") |
|
522 |
self.symlink("absolute-broken", "absolute-broken-to-symlink") |
|
523 |
self.symlink("/", "absolute-works") |
|
524 |
self.symlink("/absolute-works", "absolute-works-to-symlink") |
|
525 |
||
526 |
def tearDown(self): |
|
527 |
shutil.rmtree(self.testdir) |
|
528 |
||
529 |
def testRelativeBroken(self): |
|
530 |
self.failUnless(is_broken_symlink(self.testdir, self.testdir, |
|
531 |
"relative-broken")) |
|
532 |
||
533 |
def testRelativeBrokenToSymlink(self): |
|
534 |
self.failUnless(is_broken_symlink(self.testdir, self.testdir, |
|
535 |
"relative-broken-to-symlink")) |
|
536 |
||
537 |
def testAbsoluteBroken(self): |
|
538 |
self.failUnless(is_broken_symlink(self.testdir, self.testdir, |
|
539 |
"absolute-broken")) |
|
540 |
||
541 |
def testAbsoluteBrokenToSymlink(self): |
|
542 |
self.failUnless(is_broken_symlink(self.testdir, self.testdir, |
|
543 |
"absolute-broken-to-symlink")) |
|
544 |
||
545 |
def testRelativeWorks(self): |
|
546 |
self.failIf(is_broken_symlink(self.testdir, self.testdir, |
|
547 |
"relative-works")) |
|
548 |
||
549 |
def testRelativeWorksToSymlink(self): |
|
550 |
self.failIf(is_broken_symlink(self.testdir, self.testdir, |
|
551 |
"relative-works-to-symlink")) |
|
552 |
||
553 |
def testAbsoluteWorks(self): |
|
554 |
self.failIf(is_broken_symlink(self.testdir, self.testdir, |
|
555 |
"absolute-works")) |
|
556 |
||
557 |
def testAbsoluteWorksToSymlink(self): |
|
558 |
self.failIf(is_broken_symlink(self.testdir, self.testdir, |
|
559 |
"absolute-works-to-symlink")) |
|
560 |
||
561 |
def testMultiLevelNestedSymlinks(self): |
|
562 |
# target/first-link -> ../target/second-link -> ../target
|
|
563 |
||
564 |
os.mkdir(os.path.join(self.testdir, "target")) |
|
565 |
self.symlink("../target", "target/second-link") |
|
566 |
self.symlink("../target/second-link", "target/first-link") |
|
567 |
self.failIf(is_broken_symlink(self.testdir, self.testdir, |
|
568 |
"target/first-link")) |
|
569 |
||
570 |
||
571 |
class Chroot: |
|
572 |
||
573 |
"""A chroot for testing things in."""
|
|
574 |
||
575 |
def __init__(self): |
|
576 |
self.name = None |
|
577 |
||
578 |
def create_temp_dir(self): |
|
579 |
"""Create a temporary directory for the chroot."""
|
|
580 |
self.name = tempfile.mkdtemp(dir=settings.tmpdir) |
|
581 |
os.chmod(self.name, 0755) |
|
582 |
logging.debug("Created temporary directory %s" % self.name) |
|
583 |
||
584 |
def create(self): |
|
585 |
"""Create a chroot according to user's wishes."""
|
|
586 |
self.create_temp_dir() |
|
587 |
id = do_on_panic(self.remove) |
|
588 |
||
589 |
if settings.basetgz: |
|
590 |
self.unpack_from_tgz(settings.basetgz) |
|
591 |
else: |
|
592 |
self.setup_minimal_chroot() |
|
593 |
||
594 |
self.configure_chroot() |
|
595 |
self.mount_proc() |
|
596 |
self.mount_selinux() |
|
597 |
if settings.basetgz: |
|
598 |
self.run(["apt-get", "-yf", "upgrade"]) |
|
599 |
self.minimize() |
|
600 |
self.run(["apt-get", "clean"]) |
|
601 |
||
602 |
#copy scripts dir into the chroot
|
|
603 |
if settings.scriptsdir is not None: |
|
604 |
dest = self.relative("tmp/scripts/") |
|
605 |
if not os.path.exists(self.relative("tmp/scripts/")): |
|
606 |
os.mkdir(dest) |
|
607 |
logging.debug("Copying scriptsdir to %s" % dest) |
|
608 |
for file in os.listdir(settings.scriptsdir): |
|
609 |
if (file.startswith("post_") or file.startswith("pre_")) and os.path.isfile(os.path.join((settings.scriptsdir), file)): |
|
610 |
shutil.copy(os.path.join((settings.scriptsdir), file), dest) |
|
611 |
||
612 |
if settings.savetgz: |
|
613 |
self.pack_into_tgz(settings.savetgz) |
|
614 |
||
615 |
dont_do_on_panic(id) |
|
616 |
||
617 |
def remove(self): |
|
618 |
"""Remove a chroot and all its contents."""
|
|
619 |
if not settings.keep_tmpdir and os.path.exists(self.name): |
|
620 |
self.unmount_proc() |
|
621 |
self.unmount_selinux() |
|
622 |
shutil.rmtree(self.name) |
|
623 |
logging.debug("Removed directory tree at %s" % self.name) |
|
624 |
elif settings.keep_tmpdir: |
|
625 |
logging.debug("Keeping directory tree at %s" % self.name) |
|
626 |
||
627 |
def create_temp_tgz_file(self): |
|
628 |
"""Return the path to a file to be used as a temporary tgz file"""
|
|
629 |
# Yes, create_temp_file() would work just as well, but putting it in
|
|
630 |
# the interface for Chroot allows the VirtServ hack to work.
|
|
631 |
(fd, temp_tgz) = create_temp_file() |
|
632 |
return temp_tgz |
|
633 |
||
634 |
def pack_into_tgz(self, result): |
|
635 |
"""Tar and compress all files in the chroot."""
|
|
636 |
logging.debug("Saving %s to %s." % (self.name, result)) |
|
637 |
||
638 |
run(['tar', '--exclude', './proc/*', '-czf', result, '-C', self.name, './']) |
|
639 |
||
640 |
def unpack_from_tgz(self, tarball): |
|
641 |
"""Unpack a tarball to a chroot."""
|
|
642 |
logging.debug("Unpacking %s into %s" % (tarball, self.name)) |
|
643 |
run(["tar", "-C", self.name, "-zxf", tarball]) |
|
644 |
||
645 |
def run(self, command, ignore_errors=False): |
|
646 |
return run(["chroot", self.name] + command, |
|
647 |
ignore_errors=ignore_errors) |
|
648 |
||
649 |
def create_apt_sources(self, distro): |
|
650 |
"""Create an /etc/apt/sources.list with a given distro."""
|
|
651 |
lines = [] |
|
652 |
for mirror, components in settings.debian_mirrors: |
|
653 |
lines.append("deb %s %s %s\n" % |
|
654 |
(mirror, distro, " ".join(components))) |
|
655 |
create_file(os.path.join(self.name, "etc/apt/sources.list"), |
|
656 |
"".join(lines)) |
|
657 |
||
658 |
def create_apt_conf(self): |
|
659 |
"""Create /etc/apt/apt.conf inside the chroot."""
|
|
660 |
create_file(self.relative("etc/apt/apt.conf"), |
|
661 |
'APT::Get::AllowUnauthenticated "yes";\n' + |
|
662 |
'APT::Get::Assume-Yes "yes";\n' + |
|
663 |
'APT::Install-Recommends "0";\n' + |
|
664 |
'APT::Install-Suggests "0";\n') |
|
665 |
||
666 |
def create_policy_rc_d(self): |
|
667 |
"""Create a policy-rc.d that prevents daemons from running."""
|
|
668 |
full_name = os.path.join(self.name, "usr/sbin/policy-rc.d") |
|
669 |
create_file(full_name, "#!/bin/sh\nexit 101\n") |
|
670 |
os.chmod(full_name, 0777) |
|
671 |
logging.debug("Created policy-rc.d and chmodded it.") |
|
672 |
||
673 |
def setup_minimal_chroot(self): |
|
674 |
"""Set up a minimal Debian system in a chroot."""
|
|
675 |
logging.debug("Setting up minimal chroot for %s at %s." % |
|
676 |
(settings.debian_distros[0], self.name)) |
|
677 |
run(["debootstrap", "--resolve-deps", settings.debian_distros[0], |
|
678 |
self.name, settings.debian_mirrors[0][0]]) |
|
679 |
||
680 |
def minimize(self): |
|
681 |
"""Minimize a chroot by removing (almost all) unnecessary packages"""
|
|
682 |
if False: |
|
683 |
logging.debug("NOT minimizing chroot because of dpkg bug") |
|
684 |
return
|
|
685 |
||
686 |
if settings.skip_minimize: |
|
687 |
return
|
|
688 |
||
689 |
self.run(["apt-get", "install", "debfoster"]) |
|
690 |
self.run(["debfoster"] + settings.debfoster_options) |
|
691 |
remove_files([self.relative("var/lib/debfoster/keepers")]) |
|
692 |
self.run(["dpkg", "--purge", "debfoster"]) |
|
693 |
||
694 |
def configure_chroot(self): |
|
695 |
"""Configure a chroot according to current settings"""
|
|
696 |
if not settings.keep_sources_list: |
|
697 |
self.create_apt_sources(settings.debian_distros[0]) |
|
698 |
self.create_apt_conf() |
|
699 |
self.create_policy_rc_d() |
|
700 |
for bindmount in settings.bindmounts: |
|
701 |
run(["mkdir", "-p", self.relative(bindmount)]) |
|
702 |
run(["mount", "-obind", bindmount, self.relative(bindmount)]) |
|
703 |
self.run(["apt-get", "update"]) |
|
704 |
||
705 |
def upgrade_to_distros(self, distros, packages): |
|
706 |
"""Upgrade a chroot installation to each successive distro."""
|
|
707 |
for distro in distros: |
|
708 |
logging.debug("Upgrading %s to %s" % (self.name, distro)) |
|
709 |
self.create_apt_sources(distro) |
|
710 |
# Run custom scripts before upgrade
|
|
711 |
if settings.scriptsdir is not None: |
|
712 |
self.run_scripts("pre_distupgrade") |
|
713 |
self.run(["apt-get", "update"]) |
|
714 |
self.run(["apt-get", "-yf", "dist-upgrade"]) |
|
715 |
# Sometimes dist-upgrade won't upgrade the packages we want
|
|
716 |
# to test because the new version depends on a newer library,
|
|
717 |
# and installing that would require removing the old version
|
|
718 |
# of the library, and we've told apt-get not to remove
|
|
719 |
# packages. So, we force the installation like this.
|
|
720 |
self.install_packages_by_name(packages) |
|
721 |
# Run custom scripts after upgrade
|
|
722 |
if settings.scriptsdir is not None: |
|
723 |
self.run_scripts("post_distupgrade") |
|
724 |
self.check_for_no_processes() |
|
725 |
||
726 |
def apt_get_knows(self, package_names): |
|
727 |
"""Does apt-get (or apt-cache) know about a set of packages?"""
|
|
728 |
for name in package_names: |
|
729 |
(status, output) = self.run(["apt-cache", "show", name], |
|
730 |
ignore_errors=True) |
|
731 |
if not os.WIFEXITED(status): |
|
732 |
logging.error("Error occurred when running apt-cache " + |
|
733 |
" in chroot:\n" + output) |
|
734 |
panic() |
|
735 |
if os.WEXITSTATUS(status) != 0 or not output.strip(): |
|
736 |
return False |
|
737 |
return True |
|
738 |
||
739 |
def copy_files(self, source_names, target_name): |
|
740 |
"""Copy files in 'source_name' to file/dir 'target_name', relative
|
|
741 |
to the root of the chroot."""
|
|
742 |
target_name = os.path.join(self.name, target_name) |
|
743 |
logging.debug("Copying %s to %s" % |
|
744 |
(", ".join(source_names), target_name)) |
|
745 |
for source_name in source_names: |
|
746 |
try: |
|
747 |
shutil.copy(source_name, target_name) |
|
748 |
except IOError, detail: |
|
749 |
logging.error("Error copying %s to %s: %s" % |
|
750 |
(source_name, target_name, detail)) |
|
751 |
panic() |
|
752 |
||
753 |
def list_installed_files (self, pre_info, post_info): |
|
754 |
"""List the new files installed, removed and modified between two dir trees.
|
|
755 |
Actually, it is a nice output of the funcion diff_meta_dat."""
|
|
756 |
(new, removed, modified) = diff_meta_data(pre_info, post_info) |
|
757 |
file_owners = self.get_files_owned_by_packages() |
|
758 |
||
759 |
if new: |
|
760 |
logging.debug("New installed files on system:\n" + file_list(new, file_owners)) |
|
761 |
else: |
|
762 |
logging.debug("The package did not install any new file.\n") |
|
763 |
||
764 |
if removed: |
|
765 |
logging.debug("The following files have disappeared:\n" + |
|
766 |
file_list(removed, file_owners)) |
|
767 |
||
768 |
if modified: |
|
769 |
logging.debug("The following files have been modified:\n" + |
|
770 |
file_list(modified, file_owners)) |
|
771 |
else: |
|
772 |
logging.debug("The package did not modify any file.\n") |
|
773 |
||
774 |
||
775 |
def install_package_files(self, filenames): |
|
776 |
if filenames: |
|
777 |
self.copy_files(filenames, "tmp") |
|
778 |
tmp_files = [os.path.basename(a) for a in filenames] |
|
779 |
tmp_files = [os.path.join("tmp", name) for name in tmp_files] |
|
780 |
||
781 |
if settings.scriptsdir is not None: |
|
782 |
self.run_scripts("pre_install") |
|
783 |
||
784 |
if settings.list_installed_files: |
|
785 |
pre_info = self.save_meta_data() |
|
786 |
||
787 |
self.run(["dpkg", "-i"] + tmp_files, ignore_errors=True) |
|
788 |
self.list_installed_files (pre_info, self.save_meta_data()) |
|
789 |
||
790 |
self.run(["apt-get", "-yf", "--no-remove", "install"]) |
|
791 |
self.list_installed_files (pre_info, self.save_meta_data()) |
|
792 |
||
793 |
else: |
|
794 |
self.run(["dpkg", "-i"] + tmp_files, ignore_errors=True) |
|
795 |
self.run(["apt-get", "-yf", "--no-remove", "install"]) |
|
796 |
||
797 |
if settings.scriptsdir is not None: |
|
798 |
self.run_scripts("post_install") |
|
799 |
||
800 |
self.run(["apt-get", "clean"]) |
|
801 |
remove_files([os.path.join(self.name, name) |
|
802 |
for name in tmp_files]) |
|
803 |
||
804 |
def get_selections(self): |
|
805 |
"""Get current package selections in a chroot."""
|
|
806 |
(status, output) = self.run(["dpkg", "--get-selections", "*"]) |
|
807 |
list = [line.split() for line in output.split("\n") if line.strip()] |
|
808 |
dict = {} |
|
809 |
for name, status in list: |
|
810 |
dict[name] = status |
|
811 |
return dict |
|
812 |
||
813 |
def remove_or_purge(self, operation, packages): |
|
814 |
"""Remove or purge packages in a chroot."""
|
|
815 |
for name in packages: |
|
816 |
self.run(["dpkg", "--" + operation, name], ignore_errors=True) |
|
817 |
self.run(["dpkg", "--remove", "--pending"], ignore_errors=True) |
|
818 |
||
819 |
||
820 |
def restore_selections(self, changes, packages): |
|
821 |
"""Restore package selections in a chroot by applying 'changes'.
|
|
822 |
'changes' is a return value from diff_selections."""
|
|
823 |
||
824 |
deps = {} |
|
825 |
nondeps = {} |
|
826 |
for name, state in changes.iteritems(): |
|
827 |
if name in packages: |
|
828 |
nondeps[name] = state |
|
829 |
else: |
|
830 |
deps[name] = state |
|
831 |
||
832 |
deps_to_remove = [name for name, state in deps.iteritems() |
|
833 |
if state == "remove"] |
|
834 |
deps_to_purge = [name for name, state in deps.iteritems() |
|
835 |
if state == "purge"] |
|
836 |
nondeps_to_remove = [name for name, state in nondeps.iteritems() |
|
837 |
if state == "remove"] |
|
838 |
nondeps_to_purge = [name for name, state in nondeps.iteritems() |
|
839 |
if state == "purge"] |
|
840 |
||
841 |
# First remove all packages.
|
|
842 |
self.remove_or_purge("remove", deps_to_remove + deps_to_purge + |
|
843 |
nondeps_to_remove + nondeps_to_purge) |
|
844 |
# Run custom scripts after remove all packages.
|
|
845 |
if settings.scriptsdir is not None: |
|
846 |
self.run_scripts("post_remove") |
|
847 |
||
848 |
if not settings.skip_cronfiles_test: |
|
849 |
cronfiles, cronfiles_list = self.check_if_cronfiles(packages) |
|
850 |
||
851 |
if not settings.skip_cronfiles_test and cronfiles: |
|
852 |
self.check_output_cronfiles(cronfiles_list) |
|
853 |
||
854 |
# Then purge all packages being depended on.
|
|
855 |
self.remove_or_purge("purge", deps_to_purge) |
|
856 |
||
857 |
# Finally, purge actual packages.
|
|
858 |
self.remove_or_purge("purge", nondeps_to_purge) |
|
859 |
||
860 |
# Run custom scripts after purge all packages.
|
|
861 |
if settings.scriptsdir is not None: |
|
862 |
self.run_scripts("post_purge") |
|
863 |
||
864 |
# Now do a final run to see that everything worked.
|
|
865 |
self.run(["dpkg", "--purge", "--pending"]) |
|
866 |
self.run(["dpkg", "--remove", "--pending"]) |
|
867 |
||
868 |
def save_meta_data(self): |
|
869 |
"""Return the filesystem meta data for all objects in the chroot."""
|
|
870 |
root = os.path.join(self.name, ".") |
|
871 |
dict = {} |
|
872 |
proc = os.path.join(root, "proc") |
|
873 |
for dirpath, dirnames, filenames in os.walk(root): |
|
874 |
assert dirpath[:len(root)] == root |
|
875 |
if dirpath[:len(proc) + 1] in [proc, proc + "/"]: |
|
876 |
continue
|
|
877 |
for name in [dirpath] + \ |
|
878 |
[os.path.join(dirpath, f) for f in filenames]: |
|
879 |
st = os.lstat(name) |
|
880 |
if stat.S_ISLNK(st.st_mode): |
|
881 |
target = os.readlink(name) |
|
882 |
else: |
|
883 |
target = None |
|
884 |
dict[name[len(root):]] = (st, target) |
|
885 |
return dict |
|
886 |
||
887 |
def relative(self, pathname): |
|
888 |
if pathname.startswith('/'): |
|
889 |
return os.path.join(self.name, pathname[1:]) |
|
890 |
return os.path.join(self.name, pathname) |
|
891 |
||
892 |
def get_files_owned_by_packages(self): |
|
893 |
"""Return dict[filename] = [packagenamelist]."""
|
|
894 |
dir = self.relative("var/lib/dpkg/info") |
|
895 |
dict = {} |
|
896 |
for basename in os.listdir(dir): |
|
897 |
if basename.endswith(".list"): |
|
898 |
pkg = basename[:-len(".list")] |
|
899 |
f = file(os.path.join(dir, basename), "r") |
|
900 |
for line in f: |
|
901 |
pathname = line.strip() |
|
902 |
if pathname in dict: |
|
903 |
dict[pathname].append(pkg) |
|
904 |
else: |
|
905 |
dict[pathname] = [pkg] |
|
906 |
f.close() |
|
907 |
return dict |
|
908 |
||
909 |
def install_packages_by_name(self, packages): |
|
910 |
if packages: |
|
911 |
if settings.list_installed_files: |
|
912 |
pre_info = self.save_meta_data() |
|
913 |
self.run(["apt-get", "-y", "install"] + packages) |
|
914 |
self.list_installed_files (pre_info, self.save_meta_data()) |
|
915 |
else: |
|
916 |
self.run(["apt-get", "-y", "install"] + packages) |
|
917 |
||
918 |
||
919 |
def check_for_no_processes(self): |
|
920 |
"""Check there are no processes running inside the chroot."""
|
|
921 |
(status, output) = run(["lsof", "-w", "+D", self.name], ignore_errors=True) |
|
922 |
count = len(output.split("\n")) - 1 |
|
923 |
if count > 0: |
|
924 |
logging.error("FAIL: Processes are running inside chroot:\n%s" % |
|
925 |
indent_string(output)) |
|
926 |
panic() |
|
927 |
||
928 |
||
929 |
def mount_selinux(self): |
|
930 |
if selinux_enabled(): |
|
931 |
run(["mkdir", "-p", self.relative("/selinux")]) |
|
932 |
run(["mount", "-t", "selinuxfs", "/selinux", self.relative("/selinux")]) |
|
933 |
logging.info("SElinux mounted into chroot") |
|
934 |
||
935 |
def unmount_selinux(self): |
|
936 |
if selinux_enabled(): |
|
937 |
run(["umount", self.relative("/selinux")]) |
|
938 |
logging.info("SElinux unmounted from chroot") |
|
939 |
||
940 |
def mount_proc(self): |
|
941 |
"""Mount /proc inside chroot."""
|
|
942 |
self.run(["mount", "-t", "proc", "proc", "/proc"]) |
|
943 |
||
944 |
def unmount_proc(self): |
|
945 |
"""Unmount /proc inside chroot."""
|
|
946 |
self.run(["umount", "/proc"], ignore_errors=True) |
|
947 |
for bindmount in settings.bindmounts: |
|
948 |
run(["umount", self.relative(bindmount)], ignore_errors=True) |
|
949 |
||
950 |
def is_ignored(self, pathname): |
|
951 |
"""Is a file (or dir or whatever) to be ignored?"""
|
|
952 |
if pathname in settings.ignored_files: |
|
953 |
return True |
|
954 |
for pattern in settings.ignored_patterns: |
|
955 |
if re.search(pattern, pathname): |
|
956 |
return True |
|
957 |
return False |
|
958 |
||
959 |
def check_for_broken_symlinks(self): |
|
960 |
"""Check that all symlinks in chroot are non-broken."""
|
|
961 |
if not settings.check_broken_symlinks: |
|
962 |
return
|
|
963 |
broken = [] |
|
964 |
for dirpath, dirnames, filenames in os.walk(self.name): |
|
965 |
# Remove /proc within chroot to avoid lots of spurious errors.
|
|
966 |
if dirpath == self.name and "proc" in dirnames: |
|
967 |
dirnames.remove("proc") |
|
968 |
for filename in filenames: |
|
969 |
full_name = name = os.path.join(dirpath, filename) |
|
970 |
if name.startswith(self.name): |
|
971 |
name = name[len(self.name):] |
|
972 |
ret = is_broken_symlink(self.name, dirpath, filename) |
|
973 |
if ret and not self.is_ignored(name): |
|
974 |
try: |
|
975 |
target = os.readlink(full_name) |
|
976 |
except os.error: |
|
977 |
target = "<unknown>" |
|
978 |
broken.append("%s -> %s" % (name, target)) |
|
979 |
if broken: |
|
980 |
logging.error("FAIL: Broken symlinks:\n%s" % |
|
981 |
indent_string("\n".join(broken))) |
|
982 |
panic() |
|
983 |
else: |
|
984 |
logging.debug("No broken symlinks as far as we can find.") |
|
985 |
||
986 |
def check_if_cronfiles(self, packages): |
|
987 |
"""Check if the packages have cron files under /etc/cron.d and in case positive,
|
|
988 |
it returns the list of files. """
|
|
989 |
||
990 |
dir = self.relative("var/lib/dpkg/info") |
|
991 |
list = [] |
|
992 |
has_cronfiles = False |
|
993 |
for p in packages: |
|
994 |
basename = p + ".list" |
|
995 |
||
996 |
if not os.path.exists(os.path.join(dir,basename)): |
|
997 |
continue
|
|
998 |
||
999 |
f = file(os.path.join(dir,basename), "r") |
|
1000 |
for line in f: |
|
1001 |
pathname = line.strip() |
|
1002 |
if pathname.startswith("/etc/cron."): |
|
1003 |
if os.path.isfile(self.relative(pathname.strip("/"))): |
|
1004 |
st = os.lstat(self.relative(pathname.strip("/"))) |
|
1005 |
mode = st[stat.ST_MODE] |
|
1006 |
# XXX /etc/cron.d/ files are NOT executables
|
|
1007 |
if (mode & stat.S_IEXEC): |
|
1008 |
if not has_cronfiles: |
|
1009 |
has_cronfiles = True |
|
1010 |
list.append(pathname) |
|
1011 |
logging.info("Package " + p + " contains cron file: " + pathname) |
|
1012 |
f.close() |
|
1013 |
||
1014 |
return has_cronfiles, list |
|
1015 |
||
1016 |
def check_output_cronfiles (self, list): |
|
1017 |
"""Check if a given list of cronfiles has any output. Executes
|
|
1018 |
cron file as cron would do (except for SHELL)"""
|
|
1019 |
failed = False |
|
1020 |
for file in list: |
|
1021 |
||
1022 |
if not os.path.exists(self.relative(file.strip("/"))): |
|
1023 |
continue
|
|
1024 |
||
1025 |
(retval, output) = self.run([file]) |
|
1026 |
if output: |
|
1027 |
failed = True |
|
1028 |
logging.error("FAIL: Cron file %s has output with package removed" % file) |
|
1029 |
||
1030 |
if failed: |
|
1031 |
panic() |
|
1032 |
||
1033 |
def run_scripts (self, step): |
|
1034 |
""" Run custom scripts to given step post-install|remove|purge"""
|
|
1035 |
||
1036 |
logging.info("Running scripts "+ step) |
|
1037 |
basepath = self.relative("tmp/scripts/") |
|
1038 |
if not os.path.exists(basepath): |
|
1039 |
logging.error("Scripts directory %s does not exist" % basepath) |
|
1040 |
panic() |
|
1041 |
list_scripts = os.listdir(basepath) |
|
1042 |
list_scripts.sort() |
|
1043 |
for file in list_scripts: |
|
1044 |
if file.startswith(step): |
|
1045 |
script = os.path.join("tmp/scripts", file) |
|
1046 |
self.run([script]) |
|
1047 |
||
1048 |
||
1049 |
class VirtServ(Chroot): |
|
1050 |
# Provides a thing that looks to the rest of piuparts much like
|
|
1051 |
# a chroot but is actually provided by an adt virtualisation server.
|
|
1052 |
# See /usr/share/doc/autopkgtest/README.virtualisation-server.
|
|
1053 |
||
1054 |
def __init__(self, cmdline): |
|
1055 |
self._cmdline = cmdline |
|
1056 |
self.name = '/ADT-VIRT' |
|
1057 |
self._vs = None |
|
1058 |
||
1059 |
def _awaitok(self, cmd): |
|
1060 |
r = self._vs.stdout.readline().rstrip('\n') |
|
1061 |
l = r.split(' ') |
|
1062 |
if l[0] != 'ok': self._fail('virtserver response to %s: %s' % (cmd,r)) |
|
1063 |
logging.debug('adt-virt << %s', r) |
|
1064 |
return l[1:] |
|
1065 |
||
1066 |
def _vs_send(self, cmd): |
|
1067 |
if type(cmd) == type([]): |
|
1068 |
def maybe_quote(a): |
|
1069 |
if type(a) != type(()): return a |
|
1070 |
(a,) = a |
|
1071 |
return urllib.quote(a) |
|
1072 |
cmd = ' '.join(map(maybe_quote,cmd)) |
|
1073 |
logging.debug('adt-virt >> %s', cmd) |
|
1074 |
print >>self._vs.stdin, cmd |
|
1075 |
return cmd.split(' ')[0] |
|
1076 |
||
1077 |
def _command(self, cmd): |
|
1078 |
# argument forms: complete-command-string
|
|
1079 |
# [arg, ...] where arg may be (arg,) to quote it
|
|
1080 |
cmdp = self._vs_send(cmd) |
|
1081 |
self._vs.stdin.flush() |
|
1082 |
return self._awaitok(cmdp) |
|
1083 |
||
1084 |
def _getfilecontents(self, filename): |
|
1085 |
try: |
|
1086 |
(_,tf) = create_temp_file() |
|
1087 |
self._command(['copyup',(filename,),(tf,)]) |
|
1088 |
f = file(tf) |
|
1089 |
d = f.read() |
|
1090 |
f.close() |
|
1091 |
finally: |
|
1092 |
os.remove(tf) |
|
1093 |
return d |
|
1094 |
||
1095 |
def create_temp_dir(self): |
|
1096 |
if self._vs is None: |
|
1097 |
logging.debug('adt-virt || %s' % self._cmdline) |
|
1098 |
self._vs = subprocess.Popen(self._cmdline, shell=True, |
|
1099 |
stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=None) |
|
1100 |
self._awaitok('banner') |
|
1101 |
self._caps = self._command('capabilities') |
|
1102 |
||
1103 |
def shutdown(self): |
|
1104 |
if self._vs is None: return |
|
1105 |
self._vs_send('quit') |
|
1106 |
self._vs.stdin.close() |
|
1107 |
self._vs.stdout.close() |
|
1108 |
self._vs.wait() |
|
1109 |
self._vs = None |
|
1110 |
||
1111 |
def remove(self): |
|
1112 |
self._command('close') |
|
1113 |
||
1114 |
def _fail(self,m): |
|
1115 |
logging.error("adt-virt-* error: "+m) |
|
1116 |
panic() |
|
1117 |
||
1118 |
def _open(self): |
|
1119 |
self._scratch = self._command('open')[0] |
|
1120 |
||
1121 |
# this is a hack to make install_and_upgrade_between distros
|
|
1122 |
# work; we pretend to save the chroot to a tarball but in
|
|
1123 |
# fact we do nothing and then we can `restore' the `tarball' with
|
|
1124 |
# adt-virt revert
|
|
1125 |
def create_temp_tgz_file(self): |
|
1126 |
return self |
|
1127 |
def pack_into_tgz(self, tgz): |
|
1128 |
if tgz is not self: self._fail('packing into tgz not supported') |
|
1129 |
if not 'revert' in self._caps: self._fail('testbed cannot revert') |
|
1130 |
def unpack_from_tgz(self, tgz): |
|
1131 |
if tgz is not self: self._fail('unpacking from tgz not supported') |
|
1132 |
self._open() |
|
1133 |
||
1134 |
def _execute(self, cmdl, tolerate_errors=False): |
|
1135 |
assert type(cmdl) == type([]) |
|
1136 |
prefix = ['sh','-ec',''' |
|
1137 |
LC_ALL=C
|
|
1138 |
unset LANGUAGES
|
|
1139 |
export LC_ALL
|
|
1140 |
exec 2>&1
|
|
1141 |
exec "$@"
|
|
1142 |
''','<command>'] |
|
1143 |
ca = ','.join(map(urllib.quote, prefix + cmdl)) |
|
1144 |
stdout = '%s/cmd-stdout' % self._scratch |
|
1145 |
stderr = '%s/cmd-stderr-base' % self._scratch |
|
1146 |
cmd = ['execute',ca, |
|
1147 |
'/dev/null',(stdout,),(stderr,), |
|
1148 |
'/root','timeout=600'] |
|
1149 |
es = int(self._command(cmd)[0]) |
|
1150 |
if es and not tolerate_errors: |
|
1151 |
stderr_data = self._getfilecontents(stderr) |
|
1152 |
logging.error("Execution failed (status=%d): %s\n%s" % |
|
1153 |
(es, `cmdl`, indent_string(stderr_data))) |
|
1154 |
panic() |
|
1155 |
return (es, stdout, stderr) |
|
1156 |
||
1157 |
def _execute_getoutput(self, cmdl): |
|
1158 |
(es,stdout,stderr) = self._execute(cmdl) |
|
1159 |
stderr_data = self._getfilecontents(stderr) |
|
1160 |
if es or stderr_data: |
|
1161 |
logging.error('Internal command failed (status=%d): %s\n%s' % |
|
1162 |
(es, `cmdl`, indent_string(stderr_data))) |
|
1163 |
panic() |
|
1164 |
(_,tf) = create_temp_file() |
|
1165 |
try: |
|
1166 |
self._command(['copyup',(stdout,),(tf,)]) |
|
1167 |
except: |
|
1168 |
os.remove(tf) |
|
1169 |
raise
|
|
1170 |
return tf |
|
1171 |
||
1172 |
def run(self, command, ignore_errors=False): |
|
1173 |
cmdl = ['sh','-ec','cd /\n' + ' '.join(command)] |
|
1174 |
(es,stdout,stderr) = self._execute(cmdl, tolerate_errors=True) |
|
1175 |
stdout_data = self._getfilecontents(stdout) |
|
1176 |
print >>sys.stderr, "VirtServ run", `command`,`cmdl`, '==>', `es`,`stdout`,`stderr`, '|', stdout_data |
|
1177 |
if es == 0 or ignore_errors: return (es, stdout_data) |
|
1178 |
stderr_data = self._getfilecontents(stderr) |
|
1179 |
logging.error('Command failed (status=%d): %s\n%s' % |
|
1180 |
(es, `command`, indent_string(stdout_data + stderr_data))) |
|
1181 |
panic() |
|
1182 |
||
1183 |
def setup_minimal_chroot(self): |
|
1184 |
self._open() |
|
1185 |
||
1186 |
def _tbpath(self, with_junk): |
|
1187 |
if not with_junk.startswith(self.name): |
|
1188 |
logging.error("Un-mangling testbed path `%s' but it does not" |
|
1189 |
"start with expected manglement `%s'" % |
|
1190 |
(with_junk, self.name)) |
|
1191 |
panic() |
|
1192 |
return with_junk[len(self.name):] |
|
1193 |
||
1194 |
def chmod(self, path, mode): |
|
1195 |
self._execute(['chmod', ('0%o' % mode), self._tbpath(path)]) |
|
1196 |
def remove_files(self, paths): |
|
1197 |
self._execute(['rm','--'] + map(self._tbpath, paths)) |
|
1198 |
def copy_file(self, our_src, tb_dest): |
|
1199 |
self._command(['copydown',(our_src,), |
|
1200 |
(self._tbpath(tb_dest)+'/'+os.path.basename(our_src),)]) |
|
1201 |
def create_file(self, path, data): |
|
1202 |
path = self._tbpath(path) |
|
1203 |
try: |
|
1204 |
(_,tf) = create_temp_file() |
|
1205 |
f = file(tf,'w') |
|
1206 |
f.write(tf) |
|
1207 |
f.close() |
|
1208 |
self._command(['copydown',(tf,),(path,)]) |
|
1209 |
finally: |
|
1210 |
os.remove(tf) |
|
1211 |
||
1212 |
class DummyStat: pass |
|
1213 |
||
1214 |
def save_meta_data(self): |
|
1215 |
mode_map = { |
|
1216 |
's': stat.S_IFSOCK, |
|
1217 |
'l': stat.S_IFLNK, |
|
1218 |
'f': stat.S_IFREG, |
|
1219 |
'b': stat.S_IFBLK, |
|
1220 |
'd': stat.S_IFDIR, |
|
1221 |
'c': stat.S_IFCHR, |
|
1222 |
'p': stat.S_IFIFO, |
|
1223 |
}
|
|
1224 |
||
1225 |
dict = {} |
|
1226 |
||
1227 |
tf = self._execute_getoutput(['find','/','-xdev','-printf', |
|
1228 |
"%y %m %U %G %s %p %l \\n".replace(' ','\\0')]) |
|
1229 |
try: |
|
1230 |
f = file(tf) |
|
1231 |
||
1232 |
while 1: |
|
1233 |
line = '' |
|
1234 |
while 1: |
|
1235 |
splut = line.split('\0') |
|
1236 |
if len(splut) == 8 and splut[7] == '\n': break |
|
1237 |
if len(splut) >= 8: |
|
1238 |
self._fail('aaargh wrong output from find: %s' % |
|
1239 |
urllib.quote(line), `splut`) |
|
1240 |
l = f.readline() |
|
1241 |
if not l: |
|
1242 |
if not line: break |
|
1243 |
self._fail('aargh missing final newline from find' |
|
1244 |
': %s, %s' % (`l`[0:200], `splut`[0:200])) |
|
1245 |
line += l |
|
1246 |
if not line: break |
|
1247 |
||
1248 |
st = VirtServ.DummyStat() |
|
1249 |
st.st_mode = mode_map[splut[0]] | int(splut[1],8) |
|
1250 |
(st.st_uid, st.st_gid, st.st_size) = map(int, splut[2:5]) |
|
1251 |
||
1252 |
dict[splut[5]] = (st, splut[6]) |
|
1253 |
||
1254 |
f.close() |
|
1255 |
finally: |
|
1256 |
os.remove(tf) |
|
1257 |
||
1258 |
return dict |
|
1259 |
||
1260 |
def get_files_owned_by_packages(self): |
|
1261 |
tf = self._execute_getoutput(['bash','-ec',''' |
|
1262 |
cd /var/lib/dpkg/info
|
|
1263 |
find . -name "*.list" -type f -print0 | \\ |
|
1264 |
xargs -r0 egrep . /dev/null
|
|
1265 |
test "${PIPESTATUS[*]}" = "0 0" |
|
1266 |
''']) |
|
1267 |
dict = {} |
|
1268 |
try: |
|
1269 |
f = file(tf) |
|
1270 |
for l in f: |
|
1271 |
(lf,pathname) = l.rstrip('\n').split(':',1) |
|
1272 |
assert lf.endswith('.list') |
|
1273 |
pkg = lf[:-5] |
|
1274 |
if pathname in dict: |
|
1275 |
dict[pathname].append(pkg) |
|
1276 |
else: |
|
1277 |
dict[pathname] = [pkg] |
|
1278 |
||
1279 |
f.close() |
|
1280 |
finally: |
|
1281 |
os.remove(tf) |
|
1282 |
return dict |
|
1283 |
||
1284 |
def check_for_broken_symlinks(self): |
|
1285 |
if not settings.check_broken_symlinks: |
|
1286 |
return
|
|
1287 |
tf = self._execute_getoutput(['bash','-ec',''' |
|
1288 |
find / -xdev -type l -print0 | \\ |
|
1289 |
xargs -r0 -i'{}' \\ |
|
1290 |
find '{}' -maxdepth 0 -follow -type l -ls |
|
1291 |
test "${PIPESTATUS[*]}" = "0 0" |
|
1292 |
''']) |
|
1293 |
try: |
|
1294 |
f = file(tf) |
|
1295 |
broken = False |
|
1296 |
for l in f: |
|
1297 |
logging.error("FAIL: Broken symlink: " + l) |
|
1298 |
broken = True |
|
1299 |
if broken: panic() |
|
1300 |
logging.debug("No broken symlinks found.") |
|
1301 |
finally: |
|
1302 |
os.remove(tf) |
|
1303 |
||
1304 |
def check_for_no_processes(self): pass # ?! |
|
1305 |
def mount_proc(self): pass |
|
1306 |
def unmount_proc(self): pass |
|
1307 |
||
1308 |
def selinux_enabled(enabled_test="/usr/sbin/selinuxenabled"): |
|
1309 |
if os.access(enabled_test, os.X_OK): |
|
1310 |
retval, output = run([enabled_test], ignore_errors=True) |
|
1311 |
if retval == 0: |
|
1312 |
return True |
|
1313 |
else: |
|
1314 |
return False |
|
1315 |
||
1316 |
def objects_are_different(pair1, pair2): |
|
1317 |
"""Are filesystem objects different based on their meta data?"""
|
|
1318 |
(m1, target1) = pair1 |
|
1319 |
(m2, target2) = pair2 |
|
1320 |
if (m1.st_mode != m2.st_mode or |
|
1321 |
m1.st_uid != m2.st_uid or |
|
1322 |
m1.st_gid != m2.st_gid or |
|
1323 |
target1 != target2): |
|
1324 |
return True |
|
1325 |
if stat.S_ISREG(m1.st_mode): |
|
1326 |
return m1.st_size != m2.st_size # or m1.st_mtime != m2.st_mtime |
|
1327 |
return False |
|
1328 |
||
1329 |
||
1330 |
def diff_meta_data(tree1, tree2): |
|
1331 |
"""Compare two dir trees and return list of new files (only in 'tree2'),
|
|
1332 |
removed files (only in 'tree1'), and modified files."""
|
|
1333 |
||
1334 |
tree1 = tree1.copy() |
|
1335 |
tree2 = tree2.copy() |
|
1336 |
||
1337 |
for name in settings.ignored_files: |
|
1338 |
if name in tree1: |
|
1339 |
del tree1[name] |
|
1340 |
if name in tree2: |
|
1341 |
del tree2[name] |
|
1342 |
||
1343 |
for pattern in settings.ignored_patterns: |
|
1344 |
pat = re.compile(pattern) |
|
1345 |
for name in tree1.keys(): |
|
1346 |
m = pat.search(name) |
|
1347 |
if m: |
|
1348 |
del tree1[name] |
|
1349 |
for name in tree2.keys(): |
|
1350 |
m = pat.search(name) |
|
1351 |
if m: |
|
1352 |
del tree2[name] |
|
1353 |
||
1354 |
modified = [] |
|
1355 |
for name in tree1.keys()[:]: |
|
1356 |
if name in tree2: |
|
1357 |
if objects_are_different(tree1[name], tree2[name]): |
|
1358 |
modified.append((name, tree1[name])) |
|
1359 |
del tree1[name] |
|
1360 |
del tree2[name] |
|
1361 |
||
1362 |
removed = [x for x in tree1.iteritems()] |
|
1363 |
new = [x for x in tree2.iteritems()] |
|
1364 |
||
1365 |
return new, removed, modified |
|
1366 |
||
1367 |
||
1368 |
def file_list(meta_infos, file_owners): |
|
1369 |
"""Return list of indented filenames."""
|
|
1370 |
meta_infos = meta_infos[:] |
|
1371 |
meta_infos.sort() |
|
1372 |
list = [] |
|
1373 |
for name, data in meta_infos: |
|
1374 |
list.append(" %s\t" % name) |
|
1375 |
if name in file_owners: |
|
1376 |
list.append(" owned by: %s\n" % ", ".join(file_owners[name])) |
|
1377 |
else: |
|
1378 |
list.append(" not owned\n") |
|
1379 |
||
1380 |
return "".join(list) |
|
1381 |
||
1382 |
||
1383 |
def offending_packages(meta_infos, file_owners): |
|
1384 |
"""Return a Set of offending packages."""
|
|
1385 |
pkgset = sets.Set() |
|
1386 |
for name, data in meta_infos: |
|
1387 |
if name in file_owners: |
|
1388 |
for pkg in file_owners[name]: |
|
1389 |
pkgset.add(pkg) |
|
1390 |
return pkgset |
|
1391 |
||
1392 |
||
1393 |
def prune_files_list(files, depsfiles): |
|
1394 |
"""Remove elements from 'files' that are in 'depsfiles', and return the
|
|
1395 |
list of removed elements.
|
|
1396 |
"""
|
|
1397 |
warn = [] |
|
1398 |
for file in depsfiles: |
|
1399 |
if file in files: |
|
1400 |
files.remove(file) |
|
1401 |
warn.append(file) |
|
1402 |
return warn |
|
1403 |
||
1404 |
||
1405 |
def diff_selections(chroot, selections): |
|
1406 |
"""Compare original and current package selection.
|
|
1407 |
Return dict where dict[package_name] = original_status, that is,
|
|
1408 |
the value in the dict is the state that the package needs to be
|
|
1409 |
set to to restore original selections."""
|
|
1410 |
changes = {} |
|
1411 |
current = chroot.get_selections() |
|
1412 |
for name, value in current.iteritems(): |
|
1413 |
if name not in selections: |
|
1414 |
changes[name] = "purge" |
|
1415 |
elif selections[name] != current[name] and \ |
|
1416 |
selections[name] == "purge": |
|
1417 |
changes[name] = selections[name] |
|
1418 |
return changes |
|
1419 |
||
1420 |
||
1421 |
def get_package_names_from_package_files(filenames): |
|
1422 |
"""Return list of package names given list of package file names."""
|
|
1423 |
list = [] |
|
1424 |
for filename in filenames: |
|
1425 |
(status, output) = run(["dpkg", "--info", filename]) |
|
1426 |
for line in [line.lstrip() for line in output.split("\n")]: |
|
1427 |
if line[:len("Package:")] == "Package:": |
|
1428 |
list.append(line.split(":", 1)[1].strip()) |
|
1429 |
return list |
|
1430 |
||
1431 |
# Method to process a changes file, returning a list of all the .deb packages
|
|
1432 |
# from the 'Files' stanza.
|
|
1433 |
def process_changes(changes): |
|
1434 |
# Determine the path to the changes file, then check if it's readable.
|
|
1435 |
dir_path = "" |
|
1436 |
changes_path = "" |
|
1437 |
if not os.path.dirname(changes): |
|
1438 |
changes_path = os.path.basename(changes) |
|
1439 |
else: |
|
1440 |
dir_path = os.path.dirname(changes) + "/" |
|
1441 |
changes_path = os.path.abspath(changes) |
|
1442 |
if not os.access(changes_path, os.R_OK): |
|
1443 |
logging.warn(changes_path + " is not readable. Skipping.") |
|
1444 |
return
|
|
1445 |
||
1446 |
# Determine the packages in the changes file through the 'Files' stanza.
|
|
1447 |
field = 'Files' |
|
1448 |
pattern = re.compile(\ |
|
1449 |
r'^'+field+r':' + r''' # The field we want the contents from |
|
1450 |
(.*?) # The contents of the field
|
|
1451 |
\n([^ ]|$) # Start of a new field or EOF
|
|
1452 |
''', |
|
1453 |
re.MULTILINE | re.DOTALL | re.VERBOSE) |
|
1454 |
f = open(changes_path) |
|
1455 |
file_text = f.read() |
|
1456 |
f.close() |
|
1457 |
matches = pattern.split(file_text) |
|
1458 |
||
1459 |
# Append all the packages found in the changes file to a package list.
|
|
1460 |
package_list = [] |
|
1461 |
newline_p = re.compile('\n') |
|
1462 |
package_p = re.compile('.*?([^ ]+\.deb)$') |
|
1463 |
for line in newline_p.split(matches[1]): |
|
1464 |
if package_p.match(line): |
|
1465 |
package = dir_path + package_p.split(line)[1] |
|
1466 |
package_list.append(package) |
|
1467 |
||
1468 |
# Return the list.
|
|
1469 |
return package_list |
|
1470 |
||
1471 |
||
1472 |
def check_results(chroot, root_info, file_owners, deps_info=None): |
|
1473 |
"""Check that current chroot state matches 'root_info'.
|
|
1474 |
|
|
1475 |
If settings.warn_on_others is True and deps_info is not None, then only
|
|
1476 |
print a warning rather than failing if the current chroot contains files
|
|
1477 |
that are in deps_info but not in root_info. (In this case, deps_info
|
|
1478 |
should be the result of chroot.save_meta_data() right after the
|
|
1479 |
dependencies are installed, but before the actual packages to test are
|
|
1480 |
installed.)
|
|
1481 |
"""
|
|
1482 |
||
1483 |
current_info = chroot.save_meta_data() |
|
1484 |
if settings.warn_on_others and deps_info is not None: |
|
1485 |
(new, removed, modified) = diff_meta_data(root_info, current_info) |
|
1486 |
(depsnew, depsremoved, depsmodified) = diff_meta_data(root_info, |
|
1487 |
deps_info) |
|
1488 |
||
1489 |
warnnew = prune_files_list(new, depsnew) |
|
1490 |
warnremoved = prune_files_list(removed, depsremoved) |
|
1491 |
warnmodified = prune_files_list(modified, depsmodified) |
|
1492 |
||
1493 |
else: |
|
1494 |
(new, removed, modified) = diff_meta_data(root_info, current_info) |
|
1495 |
||
1496 |
ok = True |
|
1497 |
if new: |
|
1498 |
logging.error("FAIL: Package purging left files on system:\n" + |
|
1499 |
file_list(new, file_owners)) |
|
1500 |
ok = False |
|
1501 |
if removed: |
|
1502 |
logging.error("FAIL: After purging files have disappeared:\n" + |
|
1503 |
file_list(removed, file_owners)) |
|
1504 |
ok = False |
|
1505 |
if modified: |
|
1506 |
logging.error("FAIL: After purging files have been modified:\n" + |
|
1507 |
file_list(modified, file_owners)) |
|
1508 |
ok = False |
|
1509 |
||
1510 |
if ok and settings.warn_on_others and deps_info is not None: |
|
1511 |
if warnnew: |
|
1512 |
msg = ("Warning: Package puring left files on system:\n" + |
|
1513 |
file_list(warnnew, file_owners) + \ |
|
1514 |
"These files seem to have been left by dependencies rather "
|
|
1515 |
"than by packages\nbeing explicitly tested.\n") |
|
1516 |
logging.info(msg) |
|
1517 |
if warnremoved: |
|
1518 |
msg = ("After purging files have dissappeared:\n" + |
|
1519 |
file_list(warnremoved, file_owners) + |
|
1520 |
"This seems to have been caused by dependencies rather "
|
|
1521 |
"than by packages\nbbeing explicitly tested.\n") |
|
1522 |
logging.info(msg) |
|
1523 |
if warnmodified: |
|
1524 |
msg = ("After purging files have been modified:\n" + |
|
1525 |
file_list(warnmodified, file_owners) + |
|
1526 |
"This seems to have been caused by dependencies rather "
|
|
1527 |
"than by packages\nbbeing explicitly tested.\n") |
|
1528 |
logging.info(msg) |
|
1529 |
||
1530 |
return ok |
|
1531 |
||
1532 |
||
1533 |
def install_purge_test(chroot, root_info, selections, package_list, packages): |
|
1534 |
"""Do an install-purge test. Return True if successful, False if not.
|
|
1535 |
Assume 'root' is a directory already populated with a working
|
|
1536 |
chroot, with packages in states given by 'selections'."""
|
|
1537 |
||
1538 |
# Install packages into the chroot.
|
|
1539 |
||
1540 |
if settings.warn_on_others: |
|
1541 |
# Create a metapackage with dependencies from the given packages
|
|
1542 |
if package_list: |
|
1543 |
control_infos = [] |
|
1544 |
# We were given package files, so let's get the Depends and
|
|
1545 |
# Conflicts directly from the .debs
|
|
1546 |
for deb in package_list: |
|
1547 |
returncode, output = run(["dpkg", "-f", deb]) |
|
1548 |
control = deb822.Deb822(output) |
|
1549 |
control_infos.append(control) |
|
1550 |
else: |
|
1551 |
# We have package names. Use apt to get all their control
|
|
1552 |
# information.
|
|
1553 |
apt_cache_args = ["apt-cache", "show"] |
|
1554 |
apt_cache_args.extend(packages) |
|
1555 |
returncode, output = chroot.run(apt_cache_args) |
|
1556 |
control_infos = deb822.Deb822.iter_paragraphs(output.splitlines()) |
|
1557 |
||
1558 |
depends = [] |
|
1559 |
conflicts = [] |
|
1560 |
for control in control_infos: |
|
1561 |
if control.get("depends"): |
|
1562 |
depends.append(control["depends"]) |
|
1563 |
if control.get("conflicts"): |
|
1564 |
conflicts.append(control["conflicts"]) |
|
1565 |
all_depends = ", ".join(depends) |
|
1566 |
all_conflicts = ", ".join(conflicts) |
|
1567 |
metapackage = make_metapackage("piuparts-depends-dummy", |
|
1568 |
all_depends, all_conflicts) |
|
1569 |
||
1570 |
# Install the metapackage
|
|
1571 |
chroot.install_package_files([metapackage]) |
|
1572 |
# Now remove it
|
|
1573 |
metapackagename = os.path.basename(metapackage)[:-4] |
|
1574 |
chroot.remove_or_purge("purge", [metapackagename]) |
|
1575 |
shutil.rmtree(os.path.dirname(metapackage)) |
|
1576 |
||
1577 |
# Save the file ownership information so we can tell which
|
|
1578 |
# modifications were caused by the actual packages we are testing,
|
|
1579 |
# rather than by their dependencies.
|
|
1580 |
deps_info = chroot.save_meta_data() |
|
1581 |
else: |
|
1582 |
deps_info = None |
|
1583 |
||
1584 |
if package_list: |
|
1585 |
chroot.install_package_files(package_list) |
|
1586 |
else: |
|
1587 |
chroot.install_packages_by_name(packages) |
|
1588 |
chroot.run(["apt-get", "clean"]) |
|
1589 |
||
1590 |
||
1591 |
chroot.check_for_no_processes() |
|
1592 |
chroot.check_for_broken_symlinks() |
|
1593 |
||
1594 |
file_owners = chroot.get_files_owned_by_packages() |
|
1595 |
||
1596 |
# Remove all packages from the chroot that weren't there initially.
|
|
1597 |
changes = diff_selections(chroot, selections) |
|
1598 |
chroot.restore_selections(changes, packages) |
|
1599 |
||
1600 |
chroot.check_for_broken_symlinks() |
|
1601 |
||
1602 |
return check_results(chroot, root_info, file_owners, deps_info=deps_info) |
|
1603 |
||
1604 |
||
1605 |
def install_upgrade_test(chroot, root_info, selections, package_list, package_names): |
|
1606 |
"""Install package via apt-get, then upgrade from package files.
|
|
1607 |
Return True if successful, False if not."""
|
|
1608 |
||
1609 |
# First install via apt-get.
|
|
1610 |
chroot.install_packages_by_name(package_names) |
|
1611 |
||
1612 |
if settings.scriptsdir is not None: |
|
1613 |
chroot.run_scripts("pre_upgrade") |
|
1614 |
||
1615 |
chroot.check_for_broken_symlinks() |
|
1616 |
||
1617 |
# Then from the package files.
|
|
1618 |
chroot.install_package_files(package_list) |
|
1619 |
||
1620 |
file_owners = chroot.get_files_owned_by_packages() |
|
1621 |
||
1622 |
# Remove all packages from the chroot that weren't there
|
|
1623 |
# initially.
|
|
1624 |
changes = diff_selections(chroot, selections) |
|
1625 |
chroot.restore_selections(changes, package_names) |
|
1626 |
||
1627 |
chroot.check_for_no_processes() |
|
1628 |
chroot.check_for_broken_symlinks() |
|
1629 |
||
1630 |
return check_results(chroot, root_info, file_owners) |
|
1631 |
||
1632 |
||
1633 |
def save_meta_data(filename, root_info, selections): |
|
1634 |
"""Save directory tree meta data into a file for fast access later."""
|
|
1635 |
logging.debug("Saving chroot meta data to %s" % filename) |
|
1636 |
f = file(filename, "w") |
|
1637 |
pickle.dump((root_info, selections), f) |
|
1638 |
f.close() |
|
1639 |
||
1640 |
||
1641 |
def load_meta_data(filename): |
|
1642 |
"""Load meta data saved by 'save_meta_data'."""
|
|
1643 |
logging.debug("Loading chroot meta data from %s" % filename) |
|
1644 |
f = file(filename, "r") |
|
1645 |
(root_info, selections) = pickle.load(f) |
|
1646 |
f.close() |
|
1647 |
return root_info, selections |
|
1648 |
||
1649 |
||
1650 |
def install_and_upgrade_between_distros(filenames, packages): |
|
1651 |
"""Install package and upgrade it between distributions, then remove.
|
|
1652 |
Return True if successful, False if not."""
|
|
1653 |
||
1654 |
chroot = get_chroot() |
|
1655 |
chroot.create() |
|
1656 |
id = do_on_panic(chroot.remove) |
|
1657 |
||
1658 |
if settings.basetgz: |
|
1659 |
root_tgz = settings.basetgz |
|
1660 |
else: |
|
1661 |
root_tgz = chroot.create_temp_tgz_file() |
|
1662 |
chroot.pack_into_tgz(root_tgz) |
|
1663 |
||
1664 |
if settings.endmeta: |
|
1665 |
root_info, selections = load_meta_data(settings.endmeta) |
|
1666 |
else: |
|
1667 |
chroot.upgrade_to_distros(settings.debian_distros[1:], []) |
|
1668 |
chroot.run(["apt-get", "clean"]) |
|
1669 |
||
1670 |
root_info = chroot.save_meta_data() |
|
1671 |
selections = chroot.get_selections() |
|
1672 |
||
1673 |
if settings.saveendmeta: |
|
1674 |
save_meta_data(settings.saveendmeta, root_info, selections) |
|
1675 |
||
1676 |
chroot.remove() |
|
1677 |
dont_do_on_panic(id) |
|
1678 |
chroot = get_chroot() |
|
1679 |
chroot.create_temp_dir() |
|
1680 |
id = do_on_panic(chroot.remove) |
|
1681 |
chroot.unpack_from_tgz(root_tgz) |
|
1682 |
||
1683 |
chroot.check_for_no_processes() |
|
1684 |
||
1685 |
chroot.run(["apt-get", "update"]) |
|
1686 |
chroot.install_packages_by_name(packages) |
|
1687 |
||
1688 |
chroot.check_for_no_processes() |
|
1689 |
||
1690 |
chroot.upgrade_to_distros(settings.debian_distros[1:], packages) |
|
1691 |
||
1692 |
chroot.check_for_no_processes() |
|
1693 |
||
1694 |
chroot.install_package_files(filenames) |
|
1695 |
chroot.run(["apt-get", "clean"]) |
|
1696 |
||
1697 |
chroot.check_for_no_processes() |
|
1698 |
||
1699 |
file_owners = chroot.get_files_owned_by_packages() |
|
1700 |
||
1701 |
changes = diff_selections(chroot, selections) |
|
1702 |
chroot.restore_selections(changes, packages) |
|
1703 |
result = check_results(chroot, root_info, file_owners) |
|
1704 |
||
1705 |
chroot.check_for_no_processes() |
|
1706 |
||
1707 |
if root_tgz != settings.basetgz: |
|
1708 |
remove_files([root_tgz]) |
|
1709 |
chroot.remove() |
|
1710 |
dont_do_on_panic(id) |
|
1711 |
||
1712 |
return result |
|
1713 |
||
1714 |
||
1715 |
def parse_mirror_spec(str, defaultcomponents=[]): |
|
1716 |
"""Parse a mirror specification from the --mirror option argument.
|
|
1717 |
Return (mirror, componentslist)."""
|
|
1718 |
parts = str.split() |
|
1719 |
return parts[0], parts[1:] or defaultcomponents[:] |
|
1720 |
||
1721 |
||
1722 |
def forget_ignores(option, opt, value, parser, *args, **kwargs): |
|
1723 |
settings.bindmounts = [] |
|
1724 |
parser.values.ignore = [] |
|
1725 |
parser.values.ignore_regex = [] |
|
1726 |
settings.ignored_files = [] |
|
1727 |
settings.ignored_patterns = [] |
|
1728 |
||
1729 |
||
1730 |
def set_basetgz_to_pbuilder(option, opt, value, parser, *args, **kwargs): |
|
1731 |
parser.values.basetgz = "/var/cache/pbuilder/base.tgz" |
|
1732 |
||
1733 |
||
1734 |
def parse_command_line(): |
|
1735 |
"""Parse the command line, change global settings, return non-options."""
|
|
1736 |
||
1737 |
parser = optparse.OptionParser(usage="%prog [options] package ...", |
|
1738 |
version="piuparts %s" % VERSION) |
|
1739 |
||
1740 |
parser.add_option("-D", "--defaults", action="store", |
|
1741 |
help="Choose which set of defaults to use " |
|
1742 |
"(debian/ubuntu).") |
|
1743 |
||
1744 |
parser.add_option("-a", "--apt", action="store_true", default=False, |
|
1745 |
help="Command line arguments are package names " + |
|
1746 |
"to be installed via apt.") |
|
1747 |
||
1748 |
parser.add_option("-b", "--basetgz", metavar="TARBALL", |
|
1749 |
help="Use TARBALL as the contents of the initial " + |
|
1750 |
"chroot, instead of building a new one with " + |
|
1751 |
"debootstrap.") |
|
1752 |
||
1753 |
parser.add_option("-B", "--end-meta", metavar="FILE", |
|
1754 |
help="XXX") |
|
1755 |
||
1756 |
parser.add_option("--bindmount", action="append", metavar="DIR", |
|
1757 |
default=[], |
|
1758 |
help="Directory to be bind-mounted inside the chroot.") |
|
1759 |
||
1760 |
parser.add_option("-d", "--distribution", action="append", metavar="NAME", |
|
1761 |
help="Which Debian distribution to use: a code name " + |
|
1762 |
"(etch, lenny, sid) or experimental. The " + |
|
1763 |
"default is sid (i.e., unstable).") |
|
1764 |
||
1765 |
parser.add_option("-i", "--ignore", action="append", metavar="FILENAME", |
|
1766 |
default=[], |
|
1767 |
help="Add FILENAME to list of filenames to be " + |
|
1768 |
"ignored when comparing changes to chroot.") |
|
1769 |
||
1770 |
parser.add_option("-I", "--ignore-regex", action="append", |
|
1771 |
metavar="REGEX", default=[], |
|
1772 |
help="Add REGEX to list of Perl compatible regular " + |
|
1773 |
"expressions for filenames to be " + |
|
1774 |
"ignored when comparing changes to chroot.") |
|
1775 |
||
1776 |
parser.add_option("-k", "--keep-tmpdir", |
|
1777 |
action="store_true", default=False, |
|
1778 |
help="Don't remove the temporary directory for the " + |
|
1779 |
"chroot when the program ends.") |
|
1780 |
||
1781 |
parser.add_option("--keep-sources-list", |
|
1782 |
action="store_true", default=False, |
|
1783 |
help="Don't modify the chroot's " + |
|
1784 |
"etc/apt/sources.list (only makes sense " + |
|
1785 |
"with --basetgz).") |
|
1786 |
||
1787 |
parser.add_option("--warn-on-others", |
|
1788 |
action="store_true", default=False, |
|
1789 |
help="Print a warning rather than failing if " |
|
1790 |
"files are left behind, modified, or removed "
|
|
1791 |
"by a package that was not given on the "
|
|
1792 |
"command-line. Behavior with multple packages "
|
|
1793 |
"given could be problematic, particularly if the "
|
|
1794 |
"dependency tree of one package in the list "
|
|
1795 |
"includes another in the list. Therefore, it is "
|
|
1796 |
"recommended to use this option with one package "
|
|
1797 |
"at a time.") |
|
1798 |
||
1799 |
parser.add_option("--skip-minimize", |
|
1800 |
action="store_true", default=False, |
|
1801 |
help="Skip minimize chroot step.") |
|
1802 |
||
1803 |
parser.add_option("--list-installed-files", |
|
1804 |
action="store_true", default=False, |
|
1805 |
help="List files added to the chroot after the " + |
|
1806 |
"installation of the package.") |
|
1807 |
||
1808 |
parser.add_option("--no-upgrade-test", |
|
1809 |
action="store_true", default=False, |
|
1810 |
help="Skip testing the upgrade from an existing version " + |
|
1811 |
"in the archive.") |
|
1812 |
||
1813 |
parser.add_option("--skip-cronfiles-test", |
|
1814 |
action="store_true", default=False, |
|
1815 |
help="Skip testing the output from the cron files.") |
|
1816 |
||
1817 |
parser.add_option("--scriptsdir", metavar="DIR", |
|
1818 |
help="Directory where are placed the custom scripts.") |
|
1819 |
||
1820 |
parser.add_option("-l", "--log-file", metavar="FILENAME", |
|
1821 |
help="Write log file to FILENAME in addition to " + |
|
1822 |
"the standard output.") |
|
1823 |
||
1824 |
parser.add_option("-m", "--mirror", action="append", metavar="URL", |
|
1825 |
default=[], |
|
1826 |
help="Which Debian mirror to use.") |
|
1827 |
||
1828 |
parser.add_option("-n", "--no-ignores", action="callback", |
|
1829 |
callback=forget_ignores, |
|
1830 |
help="Forget all ignores set so far, including " + |
|
1831 |
"built-in ones.") |
|
1832 |
||
1833 |
parser.add_option("-N", "--no-symlinks", action="store_true", |
|
1834 |
default=False, |
|
1835 |
help="Don't check for broken symlinks.") |
|
1836 |
||
1837 |
parser.add_option("-p", "--pbuilder", action="callback", |
|
1838 |
callback=set_basetgz_to_pbuilder, |
|
1839 |
help="Use /var/cache/pbuilder/base.tgz as the base " + |
|
1840 |
"tarball.") |
|
1841 |
||
1842 |
parser.add_option('', "--adt-virt", |
|
1843 |
metavar='CMDLINE', default=None, |
|
1844 |
help="Use CMDLINE via autopkgtest (adt-virt-*)" |
|
1845 |
" protocol instead of managing a chroot.") |
|
1846 |
||
1847 |
parser.add_option("-s", "--save", metavar="FILENAME", |
|
1848 |
help="Save the chroot into FILENAME.") |
|
1849 |
||
1850 |
parser.add_option("-S", "--save-end-meta", metavar="FILE", |
|
1851 |
help="XXX") |
|
1852 |
||
1853 |
parser.add_option("-t", "--tmpdir", metavar="DIR", |
|
1854 |
help="Use DIR for temporary storage. Default is " + |
|
1855 |
"$TMPDIR or /tmp.") |
|
1856 |
||
1857 |
parser.add_option("--single-changes-list", default=False, |
|
1858 |
action="store_true", |
|
1859 |
help="test all packages from all changes files together.") |
|
1860 |
||
1861 |
parser.add_option("-v", "--verbose", |
|
1862 |
action="store_true", default=False, |
|
1863 |
help="No meaning anymore.") |
|
1864 |
||
1865 |
parser.add_option("--debfoster-options", |
|
2
by kklimonda at syntaxhighlighted
merged 0.36 |
1866 |
default="-o MaxPriority=important -o UseRecommends=no -f -n apt debfoster", |
1867 |
help="Run debfoster with different parameters (default: -o MaxPriority=important -o UseRecommends=no -f -n apt debfoster).") |
|
1
by kklimonda at syntaxhighlighted
initial piuparts 0.36 import |
1868 |
|
1869 |
||
1870 |
(opts, args) = parser.parse_args() |
|
1871 |
||
1872 |
settings.defaults = opts.defaults |
|
1873 |
settings.args_are_package_files = not opts.apt |
|
1874 |
settings.basetgz = opts.basetgz |
|
1875 |
settings.bindmounts += opts.bindmount |
|
1876 |
settings.debian_distros = opts.distribution |
|
1877 |
settings.ignored_files += opts.ignore |
|
1878 |
settings.ignored_patterns += opts.ignore_regex |
|
1879 |
settings.keep_tmpdir = opts.keep_tmpdir |
|
1880 |
settings.single_changes_list = opts.single_changes_list |
|
1881 |
settings.keep_sources_list = opts.keep_sources_list |
|
1882 |
settings.skip_minimize = opts.skip_minimize |
|
1883 |
settings.list_installed_files = opts.list_installed_files |
|
1884 |
settings.no_upgrade_test = opts.no_upgrade_test |
|
1885 |
settings.skip_cronfiles_test = opts.skip_cronfiles_test |
|
1886 |
log_file_name = opts.log_file |
|
1887 |
||
1888 |
defaults = DefaultsFactory().new_defaults() |
|
1889 |
||
1890 |
settings.debian_mirrors = [parse_mirror_spec(x, defaults.get_components()) |
|
1891 |
for x in opts.mirror] |
|
1892 |
settings.check_broken_symlinks = not opts.no_symlinks |
|
1893 |
settings.savetgz = opts.save |
|
1894 |
settings.warn_on_others = opts.warn_on_others |
|
1895 |
settings.debfoster_options = opts.debfoster_options.split() |
|
1896 |
||
1897 |
if opts.adt_virt is None: |
|
1898 |
settings.adt_virt = None |
|
1899 |
else: |
|
1900 |
settings.adt_virt = VirtServ(opts.adt_virt) |
|
1901 |
||
1902 |
if opts.tmpdir is not None: |
|
1903 |
settings.tmpdir = opts.tmpdir |
|
1904 |
if not os.path.isdir(settings.tmpdir): |
|
1905 |
logging.error("Temporary directory is not a directory: %s" % |
|
1906 |
settings.tmpdir) |
|
1907 |
panic() |
|
1908 |
||
1909 |
setup_logging(DUMP, log_file_name) |
|
1910 |
||
1911 |
exit = None |
|
1912 |
||
1913 |
if not settings.tmpdir: |
|
1914 |
if "TMPDIR" in os.environ: |
|
1915 |
settings.tmpdir = os.environ["TMPDIR"] |
|
1916 |
else: |
|
1917 |
settings.tmpdir = "/tmp" |
|
1918 |
||
1919 |
if opts.scriptsdir is not None: |
|
1920 |
settings.scriptsdir = opts.scriptsdir |
|
1921 |
if not os.path.isdir(settings.scriptsdir): |
|
1922 |
logging.error("Scripts directory is not a directory: %s" % |
|
1923 |
settings.scriptsdir) |
|
1924 |
panic() |
|
1925 |
||
1926 |
if not settings.debian_distros: |
|
1927 |
settings.debian_distros = defaults.get_distribution() |
|
1928 |
||
1929 |
if not settings.debian_mirrors: |
|
5
by kklimonda at syntaxhighlighted
Abstract find_default_debian_mirrors() as Ubuntu require slightly |
1930 |
settings.debian_mirrors = defaults.find_default_mirrors() |
1
by kklimonda at syntaxhighlighted
initial piuparts 0.36 import |
1931 |
if not settings.debian_mirrors: |
1932 |
settings.debian_mirrors = defaults.get_mirror() |
|
1933 |
||
1934 |
if settings.keep_sources_list and \ |
|
1935 |
(not settings.basetgz or len(settings.debian_distros) > 1): |
|
1936 |
logging.error("--keep-sources-list only makes sense with --basetgz " |
|
1937 |
"and only one distribution") |
|
1938 |
exit = 1 |
|
1939 |
||
1940 |
if not args: |
|
1941 |
logging.error("Need command line arguments: " + |
|
1942 |
"names of packages or package files") |
|
1943 |
exit = 1 |
|
1944 |
||
1945 |
if exit is not None: |
|
1946 |
sys.exit(exit) |
|
1947 |
||
1948 |
return args |
|
1949 |
||
1950 |
||
1951 |
def get_chroot(): |
|
1952 |
if settings.adt_virt is None: return Chroot() |
|
1953 |
return settings.adt_virt |
|
1954 |
||
1955 |
# Process the packages given in a list
|
|
1956 |
def process_packages(package_list): |
|
1957 |
# Find the names of packages.
|
|
1958 |
if settings.args_are_package_files: |
|
1959 |
packages = get_package_names_from_package_files(package_list) |
|
1960 |
else: |
|
1961 |
packages = package_list |
|
1962 |
package_list = [] |
|
1963 |
||
1964 |
if len(settings.debian_distros) == 1: |
|
1965 |
chroot = get_chroot() |
|
1966 |
chroot.create() |
|
1967 |
id = do_on_panic(chroot.remove) |
|
1968 |
||
1969 |
root_info = chroot.save_meta_data() |
|
1970 |
selections = chroot.get_selections() |
|
1971 |
||
1972 |
if not install_purge_test(chroot, root_info, selections, |
|
1973 |
package_list, packages): |
|
1974 |
logging.error("FAIL: Installation and purging test.") |
|
1975 |
panic() |
|
1976 |
logging.info("PASS: Installation and purging test.") |
|
1977 |
||
1978 |
if not settings.no_upgrade_test: |
|
1979 |
if not settings.args_are_package_files: |
|
1980 |
logging.info("Can't test upgrades: -a or --apt option used.") |
|
1981 |
elif not chroot.apt_get_knows(packages): |
|
1982 |
logging.info("Can't test upgrade: packages not known by apt-get.") |
|
1983 |
elif install_upgrade_test(chroot, root_info, selections, package_list, |
|
1984 |
packages): |
|
1985 |
logging.info("PASS: Installation, upgrade and purging tests.") |
|
1986 |
else: |
|
1987 |
logging.error("FAIL: Installation, upgrade and purging tests.") |
|
1988 |
panic() |
|
1989 |
||
1990 |
chroot.remove() |
|
1991 |
dont_do_on_panic(id) |
|
1992 |
else: |
|
1993 |
if install_and_upgrade_between_distros(package_list, packages): |
|
1994 |
logging.info("PASS: Upgrading between Debian distributions.") |
|
1995 |
else: |
|
1996 |
logging.error("FAIL: Upgrading between Debian distributions.") |
|
1997 |
panic() |
|
1998 |
||
1999 |
if settings.adt_virt is not None: settings.adt_virt.shutdown() |
|
2000 |
||
2001 |
def main(): |
|
2002 |
"""Main program. But you knew that."""
|
|
2003 |
||
2004 |
args = parse_command_line() |
|
2005 |
||
2006 |
logging.info("-" * 78) |
|
2007 |
logging.info("To quickly glance what went wrong, scroll down to the bottom of this logfile.") |
|
2008 |
logging.info("FAQ available at http://wiki.debian.org/piuparts/FAQ") |
|
2009 |
logging.info("-" * 78) |
|
2010 |
logging.info("piuparts version %s starting up." % VERSION) |
|
2011 |
logging.info("Command line arguments: %s" % " ".join(sys.argv)) |
|
2012 |
logging.info("Running on: %s %s %s %s %s" % os.uname()) |
|
2013 |
||
2014 |
# Make sure debconf does not ask questions and stop everything.
|
|
2015 |
# Packages that don't use debconf will lose.
|
|
2016 |
os.environ["DEBIAN_FRONTEND"] = "noninteractive" |
|
2017 |
||
2018 |
||
2019 |
changes_packages_list = [] |
|
2020 |
regular_packages_list = [] |
|
2021 |
changes_p = re.compile('.*\.changes$') |
|
2022 |
for arg in args: |
|
2023 |
if changes_p.match(arg): |
|
2024 |
package_list = process_changes(arg) |
|
2025 |
if settings.single_changes_list: |
|
2026 |
for package in package_list: |
|
2027 |
regular_packages_list.append(package) |
|
2028 |
else: |
|
2029 |
changes_packages_list.append(package_list) |
|
2030 |
else: |
|
2031 |
regular_packages_list.append(arg) |
|
2032 |
||
2033 |
if changes_packages_list: |
|
2034 |
for package_list in changes_packages_list: |
|
2035 |
process_packages(package_list) |
|
2036 |
||
2037 |
if regular_packages_list: |
|
2038 |
process_packages(regular_packages_list) |
|
2039 |
||
2040 |
logging.info("PASS: All tests.") |
|
2041 |
logging.info("piuparts run ends.") |
|
2042 |
||
2043 |
||
2044 |
if __name__ == "__main__": |
|
2045 |
# check if user has root privileges
|
|
2046 |
if os.getuid(): |
|
2047 |
print 'You need to be root to use piuparts.' |
|
2048 |
sys.exit(0) |
|
2049 |
||
2050 |
try: |
|
2051 |
if sys.argv[1:] == ["unittest"]: |
|
2052 |
del sys.argv[1] |
|
2053 |
unittest.main() |
|
2054 |
else: |
|
2055 |
main() |
|
2056 |
except KeyboardInterrupt: |
|
2057 |
print '' |
|
2058 |
print 'Piuparts interrupted by the user, exiting...' |
|
2059 |
sys.exit(1) |