8
from random import randint
12
def archive(ppds_directory):
13
"""Returns a string with the decompressor, its dependencies and the archive.
15
It reads the template at pyppd/pyppd-ppdfile.in, inserts the dependencies
16
and the archive encoded in base64, and returns as a string.
19
logging.info('Compressing folder "%s".' % ppds_directory)
20
ppds_compressed = compress(ppds_directory)
21
if not ppds_compressed:
24
ppds_compressed_b64 = base64.b64encode(ppds_compressed)
26
logging.info('Populating template.')
27
template = read_file_in_syspath("pyppd/pyppd-ppdfile.in")
28
compressor_py = read_file_in_syspath("pyppd/compressor.py")
30
template = template.replace("@compressor@", compressor_py)
31
template = template.replace("@ppds_compressed_b64@", ppds_compressed_b64)
35
def compress(directory):
36
"""Compresses and indexes *.ppd and *.ppd.gz in directory returning a string.
38
The directory is walked recursively, concatenating all ppds found in a string.
39
For each, it tests if its filename ends in *.gz. If so, opens with gzip. If
40
not, opens directly. Then, it parses and saves its name, description (in the
41
format CUPS needs (which can be more than one)) and it's position in the ppds
42
string (start position and length) into a dictionary, used as an index.
43
Then, it compresses the string, adds into the dictionary as key ARCHIVE and
44
returns a compressed pickle dump of it.
49
abs_directory = os.path.abspath(directory)
51
for ppd_path in find_files(directory, ("*.ppd", "*.ppd.gz")):
52
# Remove 'directory/' from the filename
53
ppd_filename = ppd_path[len(abs_directory)+1:]
55
if ppd_path.lower().endswith(".gz"):
56
ppd_file = gzip.open(ppd_path).read()
57
# We don't want the .gz extension in our filename
58
ppd_filename = ppd_filename[:-3]
60
ppd_file = open(ppd_path).read()
63
length = len(ppd_file)
64
logging.debug('Found %s (%d bytes).' % (ppd_path, length))
66
ppd_parsed = ppd.parse(ppd_file, ppd_filename)
67
ppd_descriptions = map(str, ppd_parsed)
68
ppds_index[ppd_parsed[0].uri] = (start, length, ppd_descriptions)
69
logging.debug('Adding %d entry(ies): %s.' % (len(ppd_descriptions), map(str, ppd_parsed)))
73
logging.error('No PPDs found in folder "%s".' % directory)
76
logging.info('Compressing archive.')
77
ppds_index['ARCHIVE'] = compressor.compress(ppds)
78
logging.info('Generating and compressing pickle dump.')
79
ppds_pickle = compressor.compress(cPickle.dumps(ppds_index))
85
def read_file_in_syspath(filename):
86
"""Reads the file in filename in each sys.path.
88
If we couldn't find, throws the last IOError caught.
94
return open(path + "/" + filename).read()
100
def find_files(directory, patterns):
101
"""Yields each file that matches any of patterns in directory."""
102
logging.debug('Searching for "%s" files in folder "%s".' %
103
(", ".join(patterns), directory))
104
abs_directory = os.path.abspath(directory)
105
for root, dirnames, filenames in os.walk(abs_directory):
106
for pattern in patterns:
107
for filename in fnmatch.filter(filenames, pattern):
108
yield os.path.join(root, filename)