26
from packages import YumAvailablePackage
27
from repomd import mdUtils, mdErrors
29
# Simple subclass of YumAvailablePackage that can load 'simple headers' from
30
# the database when they are requested
31
class YumAvailablePackageSqlite(YumAvailablePackage):
32
def __init__(self, pkgdict, repoid):
33
YumAvailablePackage.__init__(self,pkgdict,repoid)
34
self.sack = pkgdict.sack
35
self.pkgId = pkgdict.pkgId
36
self.simple['id'] = self.pkgId
39
def loadChangelog(self):
40
if hasattr(self, 'dbusedother'):
43
self.changelog = self.sack.getChangelog(self.pkgId)
45
def returnSimple(self, varname):
46
if (not self.simple.has_key(varname) and not hasattr(self,'dbusedsimple')):
47
# Make sure we only try once to get the stuff from the database
49
details = self.sack.getPackageDetails(self.pkgId)
50
self.importFromDict(details,self.simple['repoid'])
52
return YumAvailablePackage.returnSimple(self,varname)
55
if (hasattr(self,'dbusedfiles')):
58
self.files = self.sack.getFiles(self.pkgId)
28
from packages import PackageObject, RpmBase, YumAvailablePackage
32
from sqlutils import executeSQL
33
import rpmUtils.miscutils
36
def catchSqliteException(func):
37
"""This decorator converts sqlite exceptions into RepoError"""
38
def newFunc(*args, **kwargs):
40
return func(*args, **kwargs)
41
except sqlutils.sqlite.Error:
42
raise Errors.RepoError
44
newFunc.__name__ = func.__name__
45
newFunc.__doc__ = func.__doc__
46
newFunc.__dict__.update(func.__dict__)
50
def _share_data(value):
51
""" Take a value and use the same value from the store,
52
if the value isn't in the store this one becomes the shared version. """
53
return _store.setdefault(value, value)
55
class YumAvailablePackageSqlite(YumAvailablePackage, PackageObject, RpmBase):
56
def __init__(self, repo, db_obj):
57
self.prco = { 'obsoletes': (),
65
self._loadedfiles = False
66
self._read_db_obj(db_obj)
68
self.ver = self.version
69
self.rel = self.release
70
self.pkgtup = (self.name, self.arch, self.epoch, self.version, self.release)
72
self._changelog = None
75
files = property(fget=lambda self: self._loadFiles())
77
def _read_db_obj(self, db_obj, item=None):
78
"""read the db obj. If asked for a specific item, return it.
79
otherwise populate out into the object what exists"""
83
except (IndexError, KeyError):
86
for item in ['name', 'arch', 'epoch', 'version', 'release']:
88
setattr(self, item, _share_data(db_obj[item]))
89
except (IndexError, KeyError):
92
for item in ['pkgId', 'pkgKey']:
94
setattr(self, item, db_obj[item])
95
except (IndexError, KeyError):
99
checksum_type = _share_data(db_obj['checksum_type'])
100
check_sum = (checksum_type, db_obj['pkgId'], True)
101
self._checksums = [ check_sum ]
102
except (IndexError, KeyError):
105
@catchSqliteException
106
def _sql_MD(self, MD, sql, *args):
107
""" Exec SQL against an MD of the repo, return a cursor. """
109
cache = getattr(self.sack, MD + 'db')[self.repo]
111
executeSQL(cur, sql, *args)
114
def __getattr__(self, varname):
115
db2simplemap = { 'packagesize' : 'size_package',
116
'archivesize' : 'size_archive',
117
'installedsize' : 'size_installed',
118
'buildtime' : 'time_build',
119
'hdrstart' : 'rpm_header_start',
120
'hdrend' : 'rpm_header_end',
121
'basepath' : 'location_base',
122
'relativepath': 'location_href',
123
'filetime' : 'time_file',
124
'packager' : 'rpm_packager',
125
'group' : 'rpm_group',
126
'buildhost' : 'rpm_buildhost',
127
'sourcerpm' : 'rpm_sourcerpm',
128
'vendor' : 'rpm_vendor',
129
'license' : 'rpm_license',
130
'checksum_value' : 'pkgId',
134
if db2simplemap.has_key(varname):
135
dbname = db2simplemap[varname]
136
r = self._sql_MD('primary',
137
"SELECT %s FROM packages WHERE pkgId = ?" % dbname,
138
(self.pkgId,)).fetchone()
140
if varname in {'vendor' : 1, 'packager' : 1, 'buildhost' : 1,
141
'license' : 1, 'group' : 1}:
142
value = _share_data(value)
143
setattr(self, varname, value)
147
def _loadFiles(self):
148
if self._loadedfiles:
153
#FIXME - this should be try, excepting
154
self.sack.populate(self.repo, mdtype='filelists')
155
cur = self._sql_MD('filelists',
156
"SELECT dirname, filetypes, filenames " \
157
"FROM filelist JOIN packages USING(pkgKey) " \
158
"WHERE packages.pkgId = ?", (self.pkgId,))
160
dirname = ob['dirname']
161
filetypes = decodefiletypelist(ob['filetypes'])
162
filenames = decodefilenamelist(ob['filenames'])
165
filename = dirname+'/'+filenames.pop()
167
filename = filenames.pop()
168
filetype = _share_data(filetypes.pop())
169
result.setdefault(filetype,[]).append(filename)
170
self._loadedfiles = True
175
def _loadChangelog(self):
177
if not self._changelog:
178
if not self.sack.otherdb.has_key(self.repo):
180
self.sack.populate(self.repo, mdtype='otherdata')
181
except Errors.RepoError:
182
self._changelog = result
184
cur = self._sql_MD('other',
185
"SELECT date, author, changelog " \
186
"FROM changelog JOIN packages USING(pkgKey) " \
187
"WHERE pkgId = ?", (self.pkgId,))
188
# Check count(pkgId) here, the same way we do in searchFiles()?
189
# Failure mode is much less of a problem.
191
result.append( (ob['date'], _share_data(ob['author']),
193
self._changelog = result
197
def returnIdSum(self):
198
return (self.checksum_type, self.pkgId)
60
200
def returnChangelog(self):
62
return YumAvailablePackage.returnChangelog(self)
201
self._loadChangelog()
202
return self._changelog
64
204
def returnFileEntries(self, ftype='file'):
66
return YumAvailablePackage.returnFileEntries(self,ftype)
206
return RpmBase.returnFileEntries(self,ftype)
68
208
def returnFileTypes(self):
70
return YumAvailablePackage.returnFileTypes(self)
72
def returnPrco(self, prcotype):
73
if not self.prco[prcotype]:
74
self.prco = self.sack.getPrco(self.pkgId, prcotype)
75
return self.prco[prcotype]
77
class YumSqlitePackageSack(repos.YumPackageSack):
210
return RpmBase.returnFileTypes(self)
212
def simpleFiles(self, ftype='file'):
213
sql = "SELECT name as fname FROM files WHERE pkgKey = ? and type = ?"
214
cur = self._sql_MD('primary', sql, (self.pkgKey, ftype))
215
return map(lambda x: x['fname'], cur)
217
def returnPrco(self, prcotype, printable=False):
218
prcotype = _share_data(prcotype)
219
if isinstance(self.prco[prcotype], tuple):
220
sql = "SELECT name, version, release, epoch, flags " \
221
"FROM %s WHERE pkgKey = ?" % prcotype
222
cur = self._sql_MD('primary', sql, (self.pkgKey,))
223
self.prco[prcotype] = [ ]
225
prco_set = (_share_data(ob['name']), _share_data(ob['flags']),
226
(_share_data(ob['epoch']),
227
_share_data(ob['version']),
228
_share_data(ob['release'])))
229
self.prco[prcotype].append(_share_data(prco_set))
231
return RpmBase.returnPrco(self, prcotype, printable)
233
class YumSqlitePackageSack(yumRepo.YumPackageSack):
78
234
""" Implementation of a PackageSack that uses sqlite cache instead of fully
79
235
expanded metadata objects to provide information """
81
237
def __init__(self, packageClass):
82
238
# Just init as usual and create a dict to hold the databases
83
repos.YumPackageSack.__init__(self,packageClass)
239
yumRepo.YumPackageSack.__init__(self, packageClass)
241
self.filelistsdb = {}
244
self._excludes = set() # of (repo, pkgKey)
245
self._all_excludes = {}
246
self._search_cache = {
252
@catchSqliteException
253
def _sql_MD(self, MD, repo, sql, *args):
254
""" Exec SQL against an MD of the repo, return a cursor. """
256
cache = getattr(self, MD + 'db')[repo]
258
executeSQL(cur, sql, *args)
261
def _sql_MD_pkg_num(self, MD, repo):
262
""" Give a count of pkgIds in the given repo DB """
263
sql = "SELECT count(pkgId) FROM packages"
264
return self._sql_MD('primary', repo, sql).fetchone()[0]
267
# First check if everything is excluded
269
for (repo, cache) in self.primarydb.items():
270
if repo not in self._all_excludes:
277
for repo in self.excludes:
278
exclude_num += len(self.excludes[repo])
279
if hasattr(self, 'pkgobjlist'):
280
return len(self.pkgobjlist) - exclude_num
283
sql = "SELECT count(pkgId) FROM packages"
284
for repo in self.primarydb:
285
pkg_num += self._sql_MD_pkg_num('primary', repo)
286
return pkg_num - exclude_num
288
@catchSqliteException
290
for dataobj in self.primarydb.values() + \
291
self.filelistsdb.values() + \
292
self.otherdb.values():
295
self.filelistsdb = {}
298
self._excludes = set()
299
self._all_excludes = {}
300
self._search_cache = {
304
if hasattr(self, 'pkgobjlist'):
307
yumRepo.YumPackageSack.close(self)
89
309
def buildIndexes(self):
310
# We don't need to play with returnPackages() caching as it handles
311
# additions to excludes after the cache is built.
93
314
def _checkIndexes(self, failure='error'):
97
318
# Because we don't want to remove a package from the database we just
98
319
# add it to the exclude list
99
320
def delPackage(self, obj):
101
self.excludes[repoid][obj.pkgId] = 1
103
def addDict(self, repoid, datatype, dataobj, callback=None):
104
if (not self.excludes.has_key(repoid)):
105
self.excludes[repoid] = {}
321
if not self.excludes.has_key(obj.repo):
322
self.excludes[obj.repo] = {}
323
self.excludes[obj.repo][obj.pkgId] = 1
324
self._excludes.add( (obj.repo, obj.pkgKey) )
326
def _delAllPackages(self, repo):
327
""" Exclude all packages from the repo. """
328
self._all_excludes[repo] = True
330
def _excluded(self, repo, pkgId):
331
if repo in self._all_excludes:
334
if repo in self.excludes and pkgId in self.excludes[repo]:
339
def _pkgKeyExcluded(self, repo, pkgKey):
340
if repo in self._all_excludes:
343
return (repo, pkgKey) in self._excludes
345
def _pkgExcluded(self, po):
346
return self._pkgKeyExcluded(po.repo, po.pkgKey)
348
def _packageByKey(self, repo, pkgKey):
349
if not self._key2pkg.has_key(repo):
350
self._key2pkg[repo] = {}
351
if not self._key2pkg[repo].has_key(pkgKey):
352
sql = "SELECT pkgKey, pkgId, name, epoch, version, release " \
353
"FROM packages WHERE pkgKey = ?"
354
cur = self._sql_MD('primary', repo, sql, (pkgKey,))
355
po = self.pc(repo, cur.fetchone())
356
self._key2pkg[repo][pkgKey] = po
357
return self._key2pkg[repo][pkgKey]
359
def addDict(self, repo, datatype, dataobj, callback=None):
360
if self.added.has_key(repo):
361
if datatype in self.added[repo]:
364
self.added[repo] = []
366
if not self.excludes.has_key(repo):
367
self.excludes[repo] = {}
106
369
if datatype == 'metadata':
107
if (self.primarydb.has_key(repoid)):
109
self.added[repoid] = ['primary']
110
self.primarydb[repoid] = dataobj
370
self.primarydb[repo] = dataobj
111
371
elif datatype == 'filelists':
112
if (self.filelistsdb.has_key(repoid)):
114
self.added[repoid] = ['filelists']
115
self.filelistsdb[repoid] = dataobj
372
self.filelistsdb[repo] = dataobj
116
373
elif datatype == 'otherdata':
117
if (self.otherdb.has_key(repoid)):
119
self.added[repoid] = ['otherdata']
120
self.otherdb[repoid] = dataobj
374
self.otherdb[repo] = dataobj
122
376
# We can not handle this yet...
123
377
raise "Sorry sqlite does not support %s" % (datatype)
125
def getChangelog(self,pkgId):
127
for (rep,cache) in self.otherdb.items():
129
cur.execute("select changelog.date as date,\
130
changelog.author as author,\
131
changelog.changelog as changelog from packages,changelog where packages.pkgId = %s and packages.pkgKey = changelog.pkgKey",pkgId)
132
for ob in cur.fetchall():
133
result.append(( ob['date'],
139
def getPrco(self,pkgId, prcotype=None):
140
if prcotype is not None:
141
result = {'requires': [], 'provides': [], 'obsoletes': [], 'conflicts': []}
143
result = { prcotype: [] }
144
for (rep, cache) in self.primarydb.items():
146
for prco in result.keys():
147
cur.execute("select %s.name as name, %s.version as version,\
148
%s.release as release, %s.epoch as epoch, %s.flags as flags\
150
where packages.pkgId = %s and packages.pkgKey = %s.pkgKey", prco, prco, prco, prco, prco, prco, pkgId, prco)
151
for ob in cur.fetchall():
153
version = ob['version']
154
release = ob['release']
157
result[prco].append((name, flags, (epoch, version, release)))
379
self.added[repo].append(datatype)
160
382
# Get all files for a certain pkgId from the filelists.xml metadata
161
def getFiles(self,pkgId):
162
for (rep,cache) in self.filelistsdb.items():
166
cur.execute("select filelist.dirname as dirname,\
167
filelist.filetypes as filetypes,\
168
filelist.filenames as filenames from packages,filelist\
169
where packages.pkgId = %s and packages.pkgKey = filelist.pkgKey", pkgId)
170
for ob in cur.fetchall():
172
dirname = ob['dirname']
173
filetypes = decodefiletypelist(ob['filetypes'])
174
filenames = decodefilenamelist(ob['filenames'])
177
filename = dirname+'/'+filenames.pop()
179
filename = filenames.pop()
180
filetype = filetypes.pop()
181
result.setdefault(filetype,[]).append(filename)
186
383
# Search packages that either provide something containing name
187
384
# or provide a file containing name
188
def searchAll(self,name):
189
# This should never be called with a name containing a %
190
assert(name.find('%') == -1)
385
def searchAll(self,name, query_type='like'):
386
# this function is just silly and it reduces down to just this
387
return self.searchPrco(name, 'provides')
389
def _sql_pkgKey2po(self, repo, cur, pkgs=None):
390
""" Takes a cursor and maps the pkgKey rows into a list of packages. """
391
if pkgs is None: pkgs = []
393
if self._pkgKeyExcluded(repo, ob['pkgKey']):
395
pkgs.append(self._packageByKey(repo, ob['pkgKey']))
398
@catchSqliteException
399
def searchFiles(self, name, strict=False):
400
"""search primary if file will be in there, if not, search filelists, use globs, if possible"""
403
# if it is not glob, then see if it is in the primary.xml filelists,
404
# if so, just use those for the lookup
408
if strict or not re.match('.*[\*\?\[\]].*', name):
412
# Take off the trailing slash to act like rpm
417
if len(self.filelistsdb) == 0:
418
# grab repo object from primarydb and force filelists population in this sack using repo
419
# sack.populate(repo, mdtype, callback, cacheonly)
420
for (repo,cache) in self.primarydb.items():
421
self.populate(repo, mdtype='filelists')
423
# Check to make sure the DB data matches, this should always pass but
424
# we've had weird errors. So check it for a bit.
425
for repo in self.filelistsdb:
426
pri_pkgs = self._sql_MD_pkg_num('primary', repo)
427
fil_pkgs = self._sql_MD_pkg_num('filelists', repo)
428
if pri_pkgs != fil_pkgs:
429
raise Errors.RepoError
431
for (rep,cache) in self.filelistsdb.items():
437
dirname = os.path.dirname(name)
438
dirname_check = "dirname = \"%s\" and " % dirname
440
# grab the entries that are a single file in the
441
# filenames section, use sqlites globbing if it is a glob
442
executeSQL(cur, "select pkgKey from filelist where \
443
%s length(filetypes) = 1 and \
444
dirname || ? || filenames \
445
%s ?" % (dirname_check, querytype), ('/', name))
446
self._sql_pkgKey2po(rep, cur, pkgs)
448
def filelist_globber(dirname, filenames):
449
files = filenames.split('/')
450
fns = map(lambda f: '%s/%s' % (dirname, f), files)
452
matches = fnmatch.filter(fns, name)
454
matches = filter(lambda x: name==x, fns)
457
cache.create_function("filelist_globber", 2, filelist_globber)
458
# for all the ones where filenames is multiple files,
459
# make the files up whole and use python's globbing method
460
executeSQL(cur, "select pkgKey from filelist where \
461
%s length(filetypes) > 1 \
462
and filelist_globber(dirname,filenames)" % dirname_check)
464
self._sql_pkgKey2po(rep, cur, pkgs)
466
pkgs = misc.unique(pkgs)
469
@catchSqliteException
470
def searchPrimaryFields(self, fields, searchstring):
471
"""search arbitrary fields from the primarydb for a string"""
192
quotename = name.replace("'","''")
193
for (rep,cache) in self.primarydb.items():
195
cur.execute("select DISTINCT packages.pkgId as pkgId from provides,packages where provides.name LIKE '%%%s%%' AND provides.pkgKey = packages.pkgKey" % quotename)
196
for ob in cur.fetchall():
197
if (self.excludes[rep].has_key(ob['pkgId'])):
199
pkg = self.getPackageDetails(ob['pkgId'])
200
result.append((self.pc(pkg,rep)))
202
for (rep,cache) in self.filelistsdb.items():
204
(dir,filename) = os.path.split(quotename)
206
# Either name is a substring of dirname or the directory part
207
# in name is a substring of dirname and the file part is part
209
cur.execute("select packages.pkgId as pkgId,\
210
filelist.dirname as dirname,\
211
filelist.filetypes as filetypes,\
212
filelist.filenames as filenames \
213
from packages,filelist where \
214
(filelist.dirname LIKE '%%%s%%' \
215
OR (filelist.dirname LIKE '%%%s%%' AND\
216
filelist.filenames LIKE '%%%s%%'))\
217
AND (filelist.pkgKey = packages.pkgKey)" % (quotename,dir,filename))
218
for ob in cur.fetchall():
219
# Check if it is an actual match
220
# The query above can give false positives, when
221
# a package provides /foo/aaabar it will also match /foo/bar
222
if (self.excludes[rep].has_key(ob['pkgId'])):
225
for filename in decodefilenamelist(ob['filenames']):
226
if (ob['dirname']+'/'+filename).find(name) != -1:
230
pkg = self.getPackageDetails(ob['pkgId'])
231
result.append((self.pc(pkg,rep)))
234
def returnObsoletes(self):
476
basestring="select DISTINCT pkgKey from packages where %s like '%%%s%%' " % (fields[0], searchstring)
479
basestring = "%s or %s like '%%%s%%' " % (basestring, f, searchstring)
481
for (rep,cache) in self.primarydb.items():
483
executeSQL(cur, basestring)
484
self._sql_pkgKey2po(rep, cur, result)
487
@catchSqliteException
488
def searchPrimaryFieldsMultipleStrings(self, fields, searchstrings):
489
"""search arbitrary fields from the primarydb for a multiple strings
490
return packages, number of items it matched as a list of tuples"""
492
result = [] # (pkg, num matches)
497
unionstring = "select pkgKey, SUM(cumul) AS total from ( "
498
endunionstring = ")GROUP BY pkgKey ORDER BY total DESC"
500
#SELECT pkgkey, SUM(cumul) AS total FROM (SELECT pkgkey, 1
501
#AS cumul FROM packages WHERE description LIKE '%foo%' UNION ... )
502
#GROUP BY pkgkey ORDER BY total DESC;
505
# select pkgKey, 1 AS cumul from packages where description
506
# like '%devel%' or description like '%python%' or description like '%ssh%'
508
# basestring = "select pkgKey, 1 AS cumul from packages where %s like '%%%s%%' " % (f,searchstrings[0])
509
# for s in searchstrings[1:]:
510
# basestring = "%s or %s like '%%%s%%' " % (basestring, f, s)
511
# selects.append(basestring)
513
for s in searchstrings:
514
basestring="select pkgKey,1 AS cumul from packages where %s like '%%%s%%' " % (fields[0], s)
516
basestring = "%s or %s like '%%%s%%' " % (basestring, f, s)
517
selects.append(basestring)
519
totalstring = unionstring + " UNION ALL ".join(selects) + endunionstring
522
for (rep,cache) in self.primarydb.items():
524
executeSQL(cur, totalstring)
526
if self._pkgKeyExcluded(rep, ob['pkgKey']):
528
result.append((self._packageByKey(rep, ob['pkgKey']), ob['total']))
531
@catchSqliteException
532
def returnObsoletes(self, newest=False):
534
raise NotImplementedError()
236
537
for (rep,cache) in self.primarydb.items():
237
538
cur = cache.cursor()
238
cur.execute("select packages.name as name,\
239
packages.pkgId as pkgId,\
539
executeSQL(cur, "select packages.name as name,\
540
packages.pkgKey as pkgKey,\
240
541
packages.arch as arch, packages.epoch as epoch,\
241
542
packages.release as release, packages.version as version,\
242
543
obsoletes.name as oname, obsoletes.epoch as oepoch,\
243
544
obsoletes.release as orelease, obsoletes.version as oversion,\
244
545
obsoletes.flags as oflags\
245
546
from obsoletes,packages where obsoletes.pkgKey = packages.pkgKey")
246
for ob in cur.fetchall():
247
548
# If the package that is causing the obsoletes is excluded
248
549
# continue without processing the obsoletes
249
if (self.excludes[rep].has_key(ob['pkgId'])):
550
if self._pkgKeyExcluded(rep, ob['pkgKey']):
251
key = ( ob['name'],ob['arch'],
252
ob['epoch'],ob['version'],
254
(n,f,e,v,r) = ( ob['oname'],ob['oflags'],
255
ob['oepoch'],ob['oversion'],
553
key = ( _share_data(ob['name']), _share_data(ob['arch']),
554
_share_data(ob['epoch']), _share_data(ob['version']),
555
_share_data(ob['release']))
556
(n,f,e,v,r) = ( _share_data(ob['oname']),
557
_share_data(ob['oflags']),
558
_share_data(ob['oepoch']),
559
_share_data(ob['oversion']),
560
_share_data(ob['orelease']))
258
obsoletes.setdefault(key,[]).append((n,f,(e,v,r)))
562
key = _share_data(key)
563
val = _share_data((n,f,(e,v,r)))
564
obsoletes.setdefault(key,[]).append(val)
568
@catchSqliteException
262
569
def getPackageDetails(self,pkgId):
263
570
for (rep,cache) in self.primarydb.items():
264
571
cur = cache.cursor()
265
cur.execute("select * from packages where pkgId = %s",pkgId)
266
for ob in cur.fetchall():
267
pkg = self.db2class(ob)
572
executeSQL(cur, "select * from packages where pkgId = ?", (pkgId,))
576
@catchSqliteException
577
def _getListofPackageDetails(self, pkgId_list):
579
if len(pkgId_list) == 0:
581
pkgid_query = str(tuple(pkgId_list))
583
for (rep,cache) in self.primarydb.items():
585
executeSQL(cur, "select * from packages where pkgId in %s" %(pkgid_query,))
586
#executeSQL(cur, "select * from packages where pkgId in %s" %(pkgid_query,))
593
@catchSqliteException
594
def _search(self, prcotype, name, flags, version):
597
if type(version) in (str, type(None), unicode):
598
req = (name, flags, rpmUtils.miscutils.stringToVersion(
600
elif type(version) in (tuple, list): # would this ever be a list?
601
req = (name, flags, version)
603
if self._search_cache[prcotype].has_key(req):
604
return self._search_cache[prcotype][req]
608
for (rep,cache) in self.primarydb.items():
610
executeSQL(cur, "select * from %s where name=?" % prcotype,
614
val = (_share_data(x['name']), _share_data(x['flags']),
615
(_share_data(x['epoch']), _share_data(x['version']),
616
_share_data(x['release'])))
617
val = _share_data(val)
618
if rpmUtils.miscutils.rangeCompare(req, val):
619
tmp.setdefault(x['pkgKey'], []).append(val)
620
for pkgKey, hits in tmp.iteritems():
621
if self._pkgKeyExcluded(rep, pkgKey):
623
result[self._packageByKey(rep, pkgKey)] = hits
625
if prcotype != 'provides' or name[0] != '/':
626
self._search_cache[prcotype][req] = result
630
globs = ['.*bin\/.*', '^\/etc\/.*', '^\/usr\/lib\/sendmail$']
631
for thisglob in globs:
632
globc = re.compile(thisglob)
633
if globc.match(name):
636
if not matched: # if its not in the primary.xml files
637
# search the files.xml file info
638
for pkg in self.searchFiles(name, strict=True):
639
result[pkg] = [(name, None, None)]
640
self._search_cache[prcotype][req] = result
643
# If it is a filename, search the primary.xml file info
644
for (rep,cache) in self.primarydb.items():
646
executeSQL(cur, "select DISTINCT pkgKey from files where name = ?", (name,))
648
if self._pkgKeyExcluded(rep, ob['pkgKey']):
650
result[self._packageByKey(rep, ob['pkgKey'])] = [(name, None, None)]
651
self._search_cache[prcotype][req] = result
654
def getProvides(self, name, flags=None, version=(None, None, None)):
655
return self._search("provides", name, flags, version)
657
def getRequires(self, name, flags=None, version=(None, None, None)):
658
return self._search("requires", name, flags, version)
661
@catchSqliteException
270
662
def searchPrco(self, name, prcotype):
271
663
"""return list of packages having prcotype name (any evr and flag)"""
666
if not re.match('.*[\*\?\[\]].*', name):
273
671
for (rep,cache) in self.primarydb.items():
274
672
cur = cache.cursor()
275
cur.execute("select * from %s where name = %s" , (prcotype, name))
276
prcos = cur.fetchall()
278
cur.execute("select * from packages where pkgKey = %s" , (res['pkgKey']))
279
for x in cur.fetchall():
280
pkg = self.db2class(x)
281
if (self.excludes[rep].has_key(pkg.pkgId)):
284
# Add this provides to prco otherwise yum doesn't understand
286
pkg.prco = {prcotype:
296
results.append(self.pc(pkg,rep))
673
executeSQL(cur, "select DISTINCT pkgKey from %s where name %s ?" % (prcotype,querytype), (name,))
674
self._sql_pkgKey2po(rep, cur, results)
299
676
# If it's not a provides or a filename, we are done
300
if (prcotype != "provides" or name.find('/') != 0):
677
if prcotype != "provides" or name[0] != '/':
303
681
# If it is a filename, search the primary.xml file info
304
682
for (rep,cache) in self.primarydb.items():
305
683
cur = cache.cursor()
306
cur.execute("select * from files where name = %s" , (name))
307
files = cur.fetchall()
309
cur.execute("select * from packages where pkgKey = %s" , (res['pkgKey']))
310
for x in cur.fetchall():
311
pkg = self.db2class(x)
312
if (self.excludes[rep].has_key(pkg.pkgId)):
315
pkg.files = {name: res['type']}
316
results.append(self.pc(pkg,rep))
684
executeSQL(cur, "select DISTINCT pkgKey from files where name %s ?" % querytype, (name,))
685
self._sql_pkgKey2po(rep, cur, results)
688
globs = ['.*bin\/.*', '^\/etc\/.*', '^\/usr\/lib\/sendmail$']
689
for thisglob in globs:
690
globc = re.compile(thisglob)
691
if globc.match(name):
694
if matched and not glob: # if its in the primary.xml files then skip the other check
695
return misc.unique(results)
318
697
# If it is a filename, search the files.xml file info
319
for (rep,cache) in self.filelistsdb.items():
321
(dirname,filename) = os.path.split(name)
322
cur.execute("select packages.pkgId as pkgId,\
323
filelist.dirname as dirname,\
324
filelist.filetypes as filetypes,\
325
filelist.filenames as filenames \
326
from filelist,packages where dirname = %s AND filelist.pkgKey = packages.pkgKey" , (dirname))
327
files = cur.fetchall()
329
if (self.excludes[rep].has_key(res['pkgId'])):
332
# If it matches the dirname, that doesnt mean it matches
333
# the filename, check if it does
335
not filename in res['filenames'].split('/'):
337
# If it matches we only know the packageId
338
pkg = self.getPackageDetails(res['pkgId'])
339
results.append(self.pc(pkg,rep))
698
results.extend(self.searchFiles(name))
699
return misc.unique(results)
702
#~ #FIXME - comment this all out below here
703
#~ for (rep,cache) in self.filelistsdb.items():
704
#~ cur = cache.cursor()
705
#~ (dirname,filename) = os.path.split(name)
706
#~ # FIXME: why doesn't this work???
707
#~ if 0: # name.find('%') == -1: # no %'s in the thing safe to LIKE
708
#~ executeSQL(cur, "select packages.pkgId as pkgId,\
709
#~ filelist.dirname as dirname,\
710
#~ filelist.filetypes as filetypes,\
711
#~ filelist.filenames as filenames \
712
#~ from packages,filelist where \
713
#~ (filelist.dirname LIKE ? \
714
#~ OR (filelist.dirname LIKE ? AND\
715
#~ filelist.filenames LIKE ?))\
716
#~ AND (filelist.pkgKey = packages.pkgKey)", (name,dirname,filename))
718
#~ executeSQL(cur, "select packages.pkgId as pkgId,\
719
#~ filelist.dirname as dirname,\
720
#~ filelist.filetypes as filetypes,\
721
#~ filelist.filenames as filenames \
722
#~ from filelist,packages where dirname = ? AND filelist.pkgKey = packages.pkgKey" , (dirname,))
726
#~ if self._excluded(rep, res['pkgId']):
729
#~ #FIXME - optimize the look up here by checking for single-entry filenames
731
#~ for fn in decodefilenamelist(res['filenames']):
732
#~ quicklookup[fn] = 1
734
#~ # If it matches the dirname, that doesnt mean it matches
735
#~ # the filename, check if it does
736
#~ if filename and not quicklookup.has_key(filename):
739
#~ matching_ids.append(str(res['pkgId']))
742
#~ pkgs = self._getListofPackageDetails(matching_ids)
744
#~ results.append(self.pc(rep,pkg))
342
748
def searchProvides(self, name):
343
749
"""return list of packages providing name (any evr and flag)"""
355
761
"""return list of packages conflicting with name (any evr and flag)"""
356
762
return self.searchPrco(name, "conflicts")
358
# TODO this seems a bit ugly and hackish
359
def db2class(self,db,nevra_only=False):
765
def db2class(self, db, nevra_only=False):
766
print 'die die die die die db2class'
363
y.nevra = (db.name,db.epoch,db.version,db.release,db.arch)
772
y.nevra = (db['name'],db['epoch'],db['version'],db['release'],db['arch'])
774
y.pkgId = db['pkgId']
778
y.hdrange = {'start': db['rpm_header_start'],'end': db['rpm_header_end']}
779
y.location = {'href': db['location_href'],'value': '', 'base': db['location_base']}
780
y.checksum = {'pkgid': 'YES','type': db['checksum_type'],
781
'value': db['pkgId'] }
782
y.time = {'build': db['time_build'], 'file': db['time_file'] }
783
y.size = {'package': db['size_package'], 'archive': db['size_archive'], 'installed': db['size_installed'] }
784
y.info = {'summary': db['summary'], 'description': db['description'],
785
'packager': db['rpm_packager'], 'group': db['rpm_group'],
786
'buildhost': db['rpm_buildhost'], 'sourcerpm': db['rpm_sourcerpm'],
787
'url': db['url'], 'vendor': db['rpm_vendor'], 'license': db['rpm_license'] }
368
y.hdrange = {'start': db.rpm_header_start,'end': db.rpm_header_end}
369
y.location = {'href': db.location_href,'value':''}
370
y.checksum = {'pkgid': 'YES','type': db.checksum_type,
371
'value': db.checksum_value }
372
y.time = {'build': db.time_build, 'file': db.time_file }
373
y.size = {'package': db.size_package, 'archive': db.size_archive, 'installed': db.size_installed }
374
y.info = {'summary': db.summary, 'description': db['description'],
375
'packager': db.rpm_packager, 'group': db.rpm_group,
376
'buildhost': db.rpm_buildhost, 'sourcerpm': db.rpm_sourcerpm,
377
'url': db.url, 'vendor': db.rpm_vendor, 'license': db.rpm_license }
380
def simplePkgList(self, repoid=None):
381
"""returns a list of pkg tuples (n, a, e, v, r) optionally from a single repoid"""
383
for (rep,cache) in self.primarydb.items():
384
if (repoid == None or repoid == rep):
386
cur.execute("select pkgId,name,epoch,version,release,arch from packages")
387
for pkg in cur.fetchall():
388
if (self.excludes[rep].has_key(pkg.pkgId)):
390
simplelist.append((pkg.name, pkg.arch, pkg.epoch, pkg.version, pkg.release))
394
def returnNewestByNameArch(self, naTup=None):
790
@catchSqliteException
791
def returnNewestByNameArch(self, naTup=None, patterns=None):
395
793
# If naTup is set do it from the database otherwise use our parent's
396
794
# returnNewestByNameArch
398
# TODO process excludes here
399
return repos.YumPackageSack.returnNewestByNameArch(self, naTup)
796
return yumRepo.YumPackageSack.returnNewestByNameArch(self, naTup,
401
799
# First find all packages that fulfill naTup
403
801
for (rep,cache) in self.primarydb.items():
404
802
cur = cache.cursor()
405
cur.execute("select pkgId,name,epoch,version,release,arch from packages where name=%s and arch=%s",naTup)
406
for x in cur.fetchall():
407
if (self.excludes[rep].has_key(x.pkgId)):
409
allpkg.append(self.pc(self.db2class(x,True),rep))
411
# if we've got zilch then raise
413
raise mdErrors.PackageSackError, 'No Package Matching %s.%s' % naTup
414
# Now find the newest one
415
newest = allpkg.pop()
417
(e2, v2, r2) = newest.returnEVR()
418
(e,v,r) = pkg.returnEVR()
419
rc = mdUtils.compareEVR((e,v,r), (e2, v2, r2))
424
def returnPackages(self, repoid=None):
425
"""Returns a list of packages, only containing nevra information """
803
executeSQL(cur, "select pkgKey from packages where name=? and arch=?",naTup)
804
self._sql_pkgKey2po(rep, cur, allpkg)
806
# if we've got zilch then raise
808
raise Errors.PackageSackError, 'No Package Matching %s.%s' % naTup
809
return misc.newestInList(allpkg)
811
@catchSqliteException
812
def returnNewestByName(self, name=None):
813
# If name is set do it from the database otherwise use our parent's
816
return yumRepo.YumPackageSack.returnNewestByName(self, name)
818
# First find all packages that fulfill name
820
for (rep,cache) in self.primarydb.items():
822
executeSQL(cur, "select pkgKey from packages where name=?", (name,))
823
self._sql_pkgKey2po(rep, cur, allpkg)
825
# if we've got zilch then raise
827
raise Errors.PackageSackError, 'No Package Matching %s' % name
828
return misc.newestInList(allpkg)
830
# Do what packages.matchPackageNames does, but query the DB directly
831
@catchSqliteException
832
def matchPackageNames(self, pkgspecs):
835
unmatched = list(pkgspecs)
838
if re.match('.*[\*\?\[\]].*', p):
839
query = PARSE_QUERY % ({ "op": "glob", "q": p })
842
query = PARSE_QUERY % ({ "op": "=", "q": p })
843
matchres = exactmatch
845
for (rep, db) in self.primarydb.items():
847
executeSQL(cur, query)
848
pmatches = self._sql_pkgKey2po(rep, cur)
851
matchres.extend(pmatches)
853
exactmatch = misc.unique(exactmatch)
854
matched = misc.unique(matched)
855
unmatched = misc.unique(unmatched)
856
return exactmatch, matched, unmatched
858
@catchSqliteException
859
def _buildPkgObjList(self, repoid=None, patterns=None):
860
"""Builds a list of packages, only containing nevra information. No
861
excludes are done at this stage. """
867
for (repo,cache) in self.primarydb.items():
868
if (repoid == None or repoid == repo.id):
871
qsql = """select pkgId, pkgKey, name,epoch,version,release,arch
876
for pattern in patterns:
877
for field in ['name', 'sql_nameArch', 'sql_nameVerRelArch',
878
'sql_nameVer', 'sql_nameVerRel',
879
'sql_envra', 'sql_nevra']:
880
pat_sqls.append("%s GLOB ?" % field)
881
pat_data.append(pattern)
883
qsql = _FULL_PARSE_QUERY_BEG + " OR ".join(pat_sqls)
884
executeSQL(cur, qsql, pat_data)
886
if self._key2pkg.get(repo, {}).has_key(x['pkgKey']):
887
po = self._key2pkg[repo][x['pkgKey']]
890
self._key2pkg.setdefault(repo, {})[po.pkgKey] = po
891
returnList.append(po)
893
self.pkgobjlist = returnList
896
def returnPackages(self, repoid=None, patterns=None):
897
"""Returns a list of packages, only containing nevra information. The
898
packages are processed for excludes. Note that patterns is just
899
a hint, we are free it ignore it. """
901
# Skip unused repos completely, Eg. *-source
903
for repo in self.added:
904
if repo not in self._all_excludes:
910
if hasattr(self, 'pkgobjlist'):
911
pkgobjlist = self.pkgobjlist
913
pkgobjlist = self._buildPkgObjList(repoid, patterns)
427
for (rep,cache) in self.primarydb.items():
428
if (repoid == None or repoid == rep):
430
cur.execute("select pkgId,name,epoch,version,release,arch from packages")
431
for x in cur.fetchall():
432
if (self.excludes[rep].has_key(x.pkgId)):
434
returnList.append(self.pc(self.db2class(x,True),rep))
916
for po in pkgobjlist:
917
if self._pkgExcluded(po):
919
returnList.append(po)
435
921
return returnList
923
@catchSqliteException
437
924
def searchNevra(self, name=None, epoch=None, ver=None, rel=None, arch=None):
438
925
"""return list of pkgobjects matching the nevra requested"""
440
# Search all repositories
928
# make sure some dumbass didn't pass us NOTHING to search on
930
for arg in (name, epoch, ver, rel, arch):
936
# make up our execute string
937
q = "select pkgKey from packages WHERE"
938
for (col, var) in [('name', name), ('epoch', epoch), ('version', ver),
939
('arch', arch), ('release', rel)]:
941
if q[-5:] != 'WHERE':
942
q = q + ' AND %s = "%s"' % (col, var)
944
q = q + ' %s = "%s"' % (col, var)
946
# Search all repositories
441
947
for (rep,cache) in self.primarydb.items():
442
948
cur = cache.cursor()
443
cur.execute("select * from packages WHERE name = %s AND epoch = %s AND version = %s AND release = %s AND arch = %s" , (name,epoch,ver,rel,arch))
444
for x in cur.fetchall():
445
if (self.excludes[rep].has_key(x.pkgId)):
447
returnList.append(self.pc(self.db2class(x),rep))
950
self._sql_pkgKey2po(rep, cur, returnList)
448
951
return returnList
953
@catchSqliteException
450
954
def excludeArchs(self, archlist):
451
955
"""excludes incompatible arches - archlist is a list of compat arches"""
452
tmpstring = "select * from packages WHERE "
453
for arch in archlist:
454
tmpstring = tmpstring + 'arch != "%s" AND ' % arch
456
last = tmpstring.rfind('AND') # clip that last AND
457
querystring = tmpstring[:last]
957
sarchlist = map(lambda x: "'%s'" % x , archlist)
958
arch_query = ",".join(sarchlist)
458
960
for (rep, cache) in self.primarydb.items():
459
961
cur = cache.cursor()
460
cur.execute(querystring)
461
for x in cur.fetchall():
462
obj = self.pc(self.db2class(x), rep)
963
# First of all, make sure this isn't a *-source repo or something
964
# where we'll be excluding everything.
966
executeSQL(cur, "SELECT DISTINCT arch FROM packages")
968
if row[0] in archlist:
972
self._delAllPackages(rep)
975
myq = "select pkgId, pkgKey from packages where arch not in (%s)" % arch_query
978
obj = self.pc(rep, row)
463
979
self.delPackage(obj)
465
981
# Simple helper functions