1
import metakit, re, pickle, string, os.path
3
import gourmet.gglobals as gglobals
4
from gourmet import keymanager,shopping,convert
5
from gourmet.defaults import lang as defaults
6
from gourmet.gdebug import *
7
from gettext import gettext as _
11
class RecData (rdatabase.RecData):
13
"""A class to keep recipe data in. This class basically is a wrapper for interactions
14
with metakit (stored in self.db). Ideally, interactions with metakit should be abstracted
15
through this class so that I could easily change to a different database backend."""
17
database_change_title = _('Database format has changed')
18
database_change_message = _('%(progname)s %(version)s has changed the format of its database. Your database will no longer work with older versions of %(progname)s. A backup has been saved in %(backupfile)s')%{
19
'version':gourmet.version.version,
20
'progname':gourmet.version.appname,
21
'backupfile':"%(backupfile)s"}
24
def __init__ (self, file=os.path.join(gglobals.gourmetdir,'recipes.mk')):
25
debug('RecData.__init__()',3)
28
self.backed_up = False
29
self.import_when_done = None
30
rdatabase.RecData.__init__(self)
34
def initialize_connection (self):
35
debug('using file: %s'%self.file,1)
36
self.file = os.path.expandvars(os.path.expanduser(self.file))
37
mydir = os.path.split(self.file)[0]
38
# create the directory if it does not yet exist
39
if not os.path.exists(mydir):
41
self.db = metakit.storage(str(self.file),1) #filename must be string, not unicode
42
self.contentview=self.db.contents()
46
def setup_tables (self):
48
# This is unique to metakit and not part of the normal setup_tables routine
49
# since other DBs will presumably have auto-increment built into them.
50
self.increment_vw = self.db.getas('incrementer[view:S,field:S,n:I]')
51
self.increment_vw = self.increment_vw.ordered() #ordered vw
53
# we check for old, incompatible table names
54
# and fix them before calling our regular setup stuff
55
debug('setup_tables called!',3)
56
self.move_old_tables()
57
debug('Setup tables',3)
58
rdatabase.RecData.setup_tables(self)
59
# If we've dumped our data, we want to re-import it!
60
if self.import_when_done:
61
debug('Do import of old recipes',3)
62
old_db,ifi = self.import_when_done
63
from gourmet.importers.gxml2_importer import converter
65
ifi, self, threaded=False,
66
progress=lambda p,m: self.pd.set_progress(p*0.5+0.5,m)
70
for tabl,desc in [('shopcats_table',self.SHOPCATS_TABLE_DESC),
71
('shopcatsorder_table',self.SHOPCATSORDER_TABLE_DESC),
72
('pantry_table',self.PANTRY_TABLE_DESC)]:
76
table_cols=[i[0] for i in desc[1]],
77
prog=lambda p,m: self.pd.set_progress(p/tot+(n*p/tot),m),
81
self.pd.set_progress(1.0,'Database successfully converted!')
82
debug('Delete reference to old database',3)
86
def setup_table (self, name, data, key=None):
87
"""Setup a metakit view (table) for generic table description (see superclass rdatabase)."""
88
debug('setup_table(name=%(name)s,data=%(data)s,key=%(key)s)'%locals(),1)
89
getstring = name + "["
90
# We want to make our "key" the first item in the database
92
key_index = [x[0] for x in data].index(key)
93
data = [data[key_index]] + data[0:key_index] + data[key_index+1:]
94
for col,typ,flags in data:
95
if 'AUTOINCREMENT' in flags:
96
debug('Setup autoincrement for %s'%name,3)
97
row = self.fetch_one(self.increment_vw,**{'view':name,
100
debug('Looked up autoincrement row',3)
102
debug('Add new autoincrement row',3)
103
self.increment_vw.append(view=name,field=col,n=1)
104
debug('Building metakit getstring %s'%getstring,3)
105
getstring += "%s:%s,"%(col,self.type_to_metakit_type(typ))
107
# Hack to allow sorting to work...
108
getstring = getstring+'categoryname:S,'
109
getstring = getstring[0:-1] + "]"
110
debug('Metakit: getting view: %s'%getstring,5)
111
vw = self.db.getas(getstring)
114
if data[key_index][1]=='int': #if typ of key is int
115
debug('Make ordered',3)
117
debug('Made ordered',3)
119
#debug('Make hash',3)
120
rhsh = self.db.getas("__%s_hash__[_H:I,_R:I]"%name)
122
#debug('Made hash!',3)
123
# Make sure our increment fields are right...
124
self.vw_to_name[vw]=name
125
debug('Investigate increment rows',3)
126
increment_rows = self.increment_vw.select(view=name)
128
#for field,row in self.increment_dict[name].items():
129
for dbrow in self.increment_vw.select(view=name):
131
debug("look at row for field:%s"%field,3)
132
svw=vw.sort(getattr(vw,field))
135
if tot>getattr(svw[-1],field):
136
print """WTF: increment dicts are foobared. If you see this message, please
137
submit a bug report with the terminal output included.
141
# Setting increment row's n to the highest number in our DB
142
dbrow.n = getattr(svw[-1],field)
143
debug('setup_table done!',2)
147
def type_to_metakit_type (self, typ):
148
"""Convert a generic database type to a metakit property description."""
149
if typ.find('char') >= 0:
151
if typ.find('text') >= 0:
153
if typ.find('bool') >= 0:
155
if typ == 'unicode': return 's'
156
if typ == 'float': return "F"
157
if typ == 'int': return "I"
158
if typ == 'binary': return 'B'
160
raise "Can't Understand TYPE %s"%typ
163
"""Commit our metakit database to file."""
164
debug('saving database to file %s'%self.file,0)
165
debug('there are %s recipes in the database'%len(self.recipe_table),0)
170
def load (self, file=None):
173
debug('loading database from file %s'%self.file,0)
174
fo = open(self.file,'rb')
181
def search (self, table, colname, regexp, exact=0, use_regexp=True, recurse=True):
182
"""Handed a table, a column name, and a regular expression, search
183
for an item. Alternatively, the regular expression can just be a value."""
184
debug('search %(table)s, %(colname)s, %(regexp)s, %(exact)s, %(use_regexp)s, %(recurse)s'%locals(),5)
187
self.normalizations.has_key(colname)
189
isinstance(table,rdatabase.NormalizedView)
191
nsrch = self.search(self.normalizations[colname],colname,regexp,exact,use_regexp,recurse=False)
192
if not nsrch: return []
193
nsrch = nsrch.rename(colname,'text')
194
nsrch = nsrch.rename('id',colname)
195
rvw = table.join(nsrch,getattr(table.__view__,colname))
197
if type(regexp)==type(""):
199
if exact and not use_regexp: return table.select(**{colname:regexp})
200
if not use_regexp: regexp = re.escape(regexp)
202
indexvw = table.filter(lambda r: re.match(regexp, "%s"%getattr(r,colname)))
204
indexvw = table.filter(lambda r: re.search(regexp,"%s"%getattr(r,colname),re.I))
206
resultvw = table.remapwith(indexvw)
207
resultvw = resultvw.unique()
211
def ings_search (self, ings, keyed=None, recipe_table=None, use_regexp=True, exact=False):
212
"""Handed a list of regexps, return a list of recipes containing all
215
recipe_table = self.ing_search(i,keyed=keyed,recipe_table=recipe_table,exact=exact,use_regexp=use_regexp)
218
def joined_search (self, table1, table2, search_by, search_str,
219
use_regexp=True, exact=False, join_on='id'):
220
table2 = table2.join(table1,getattr(table1,join_on))
221
vw = self.search(table2, search_by, search_str, use_regexp=use_regexp, exact=exact)
223
result_ids = vw.counts(getattr(vw,join_on),
225
props = result_ids.properties()
226
#del props['joinedcount']
227
result_ids = result_ids.project(props[join_on])
228
return table1.join(result_ids,getattr(result_ids,join_on))
230
def filter (self, table, func):
231
ivw = table.filter(func)
233
return table.remapwith(ivw)
237
# convenience function
238
def delete_by_criteria (self, table, criteria):
239
"""Delete table by criteria"""
240
cur = table.select(**criteria)
242
table.remove(table.indices(cur))
244
# Our versions of convenience functions for adding/modifying
247
def do_add_rec (self, rdict):
248
self.remove_unicode(rdict)
249
debug('adding recipe: %s'%rdict,5)
250
r=rdatabase.RecData.do_add_rec(self,rdict)
254
def do_modify_rec (self, rec, dic):
255
if not rec or not dic: return
256
# This is a bit ugly, but we need to grab the rec object
257
# afresh for changes to "stick".
259
rec = self.get_rec(rid)
261
print 'Odd: we find no recipe for ID ',rid
262
print 'We cannot modify it with: ',dic
264
for k,v in dic.items():
267
debug('do_modify_rec: setattr %s %s->%s'%(rec,k,v),10)
270
debug("Warning: rec has no attribute %s (tried to set to %s)" %(k,v),1)
271
debug('running hooks',3)
272
self.run_hooks(self.modify_hooks,rec)
274
## delete this code when we've figured out wtf is going on with this not sticking
275
#for attr in dic.keys():
276
# debug('modified recipe %s->%s'%(attr,getattr(rec,attr)),1)
279
def do_add_ing (self, ingdic):
280
"""Add ingredient to ingredients_table based on ingdict and return
281
ingredient object. Ingdict contains:
285
key: keyed descriptor
286
alternative: not yet implemented (alternative)
288
optional: True|False (boolean)
289
position: INTEGER [position in list]
290
refid: id of reference recipe. If ref is provided, everything
291
else is irrelevant except for amount.
293
self.remove_unicode(ingdic)
294
if ingdic.has_key('amount') and not ingdic['amount']: del ingdic['amount']
295
self.ingredients_table.append(ingdic)
296
if self.add_ing_hooks: self.run_hooks(self.add_ing_hooks, self.ingredients_table[-1])
298
return self.ingredients_table[-1]
300
def delete_ing (self, ing):
301
self.ingredients_table.delete(ing.__index__)
304
# Convenience functions
305
def fetch_one (self, table, *args, **kwargs):
307
indx,cnt=table.locate(*args,**kwargs)
312
new_indx = table.find(*args,**kwargs)
314
return table[new_indx]
316
rows = table.select(*args,**kwargs)
320
def remove_unicode (self, mydict):
321
for k,v in mydict.items():
322
if v.__class__ == u'hello'.__class__:
323
mydict[k]=v.encode('utf8','replace')
324
if k.__class__ == u'hello'.__class__:
326
mydict.__delitem__(k)
327
mydict[k.encode('utf8','replace')] = v
329
def increment_field (self, table, field):
332
table = self.vw_to_name[table]
335
table = self.vw_to_name[table.__view__]
337
print "I don't know about the table ",table,'(',field,')'
339
row = self.fetch_one(self.increment_vw,
343
print 'Here are the guts of increment_vw:'
344
metakit.dump(self.increment_vw)
345
raise 'Very odd: we find no row for table: %s, field: %s'%(table,field)
349
# Backup / Restructuring methods -- these are special methods to
350
# help us with our restructuring of the database - updating from
353
def move_old_tables (self):
354
self._move_row(table='ingredients',old=('group','text'),new=('inggroup','text'))
355
self._move_row(table='ingredients',old=('key','char(200)'),new=('ingkey','char(200)'))
356
self._move_row(table='shopcats',old=('key','char(50)'),new=('shopkey','char(50)'))
357
self._move_row(table='density',old=('key','char(50)'),new=('dkey','char(50)'))
358
self._move_row(table='convtable',old=('key','char(50)'),new=('ckey','char(50)'))
359
self._move_row(table='crossunitdict',old=('key','char(50)'),new=('cukey','char(50)'))
360
self._move_row(table='unitdict',old=('key','char(50)'),new=('ukey','char(50)'))
361
# If our recipe or ingredient tables are of the wrong type,
362
# we're simply going to dump the recipe portion of our DB into
363
# a file and reload it. This is ugly, but changing row types
364
# is simply too tricky in metakit and I've had it with
365
# segfaults and data corruption! At the very least, this
366
# method ensures the user has an XML copy of their data if the
367
# DB gets screwed up.
369
(hasattr(self.contentview[0],'ingredients') and
370
(self._row_type('ingredients','optional') != 'i' or
371
self._row_type('ingredients','id')!='i' or
372
self._row_type('ingredients','refid')!='i')
375
(hasattr(self.contentview[0],'recipe') and
376
(self._row_type('recipe','id')!='i'
377
or self._row_type('recipe','rating')!='i'
378
or self._row_type('recipe','cooktime')!='i' # To be implemented in the future
379
or self._row_type('recipe','preptime')!='i'
380
or self._row_type('recipe','cuisine')!='i' # of if we're not normalized
381
or (hasattr(self.contentview[0],'recipe') and # of ir we still have 1-category-only
382
hasattr(self.contentview[0].recipe,'category')
387
debug('cleaning rec table and dumping data',1)
388
self.clean_recs_table_and_dump_data()
390
def copy_table (self, old_db, table_name, table_cols,
391
prog=None,convert_pickles=False):
392
"""Copy columns of table from old database to ourselves.
394
Old database should be an instance of RecDataOld. Any format
395
changes that need to happen must happen magically, not here."""
396
oldvw = getattr(old_db,table_name)
398
newvw = getattr(self,table_name)
402
try: return pickle.loads(o)
404
for n,row in enumerate(oldvw):
405
if n % 10: prog(float(n)/tot, "Copying other data...%s"%table_name)
408
dict([(col,unpickle(getattr(row,col))) for col in table_cols])
412
dict([(col,getattr(row,col)) for col in table_cols])
415
def clean_recs_table_and_dump_data (self):
416
"""Clean out our recipe table and dump our data for later recovery.
417
We return an old version of the DB and a file with our recipes.
420
self._backup_database_file()
421
# Get ourselves out of memory
422
subrm = RecDataOldDB(self.backupfile)
423
from gourmet.exporters import gxml2_exporter
424
# dump our recipe db to a backup file
425
dumpfile = os.path.join(
426
os.path.split(self.file)[0],
427
'recipe_backup_%s.grmt'%(time.strftime('%m-%d-%y'))
429
self._backup_database_and_make_progress_dialog(dumpfile)
430
ofi = file(dumpfile,'w')
431
gxml2_exporter.recipe_table_to_xml(
436
progress_func=lambda p,m: self.pd.set_progress(p*0.5,m)
439
# Now we drop our tables...
440
#self.db.getas('ingredients')
441
#self.db.getas('recipe')
445
# Move our current file out of the way so we can start a new
446
# one, deleting reference to the file so Windows won't
449
shutil.move(self.file,self.file+'.trash')
450
# Restart our connection
451
self.initialize_connection()
452
# and later on, we'd better import file
453
self.import_when_done = subrm,dumpfile
455
def _change_row_types (self, changes):
456
"""Change row named 'name' from type 'old' to type 'new'
458
changes = [(table, name, old, new, converter),...]
460
We have to make all changes at once or bad things will happen.
462
# This is quite a complex little problem in pymetakit. I got
463
# help from Brian Kelley <fustigator@gmail.com> over the
464
# metakit mailing list on this one. Basically, in order to
465
# change the datatype of a column we have to add a dummy
466
# column, drop the old column, add the old column back, and
467
# copy the information over from our dummy. This is made even
468
# stranger by the fact that to drop a column in metakit, you
469
# have to call getas() without the column and then call
470
# commit() and nuke any reference to the db.
472
# if we don't have this table yet, then we don't need to do anything
477
for table,name,old,new,converter in changes:
478
if not hasattr(self.contentview[0],table):
480
if not default_descs.has_key(table):
481
default_descs[table]=self.db.description(table)
482
if not dummy_descs.has_key(table):
483
dummy_descs[table]=default_descs[table]
484
if self._row_type(table,name) == self.type_to_metakit_type(new).lower():
486
self._backup_database()
487
# we create a view with our old self and a new temporary self
488
DUMMIES[name] = 'TMP%s'%name
489
if not change_dic.has_key(table):
491
change_dic[table][name]={}
492
change_dic[table][name]['old']=old
493
change_dic[table][name]['new']=new
494
change_dic[table][name]['converter']=converter
495
self._move_row(table,(name,old),(DUMMIES[name],old))
496
# We're going to modify our default setup arguments to
497
# drop the proper column and add the new one in its sted
498
# before committing. This allows us to "drop" the old
499
# version of column 'name'
500
dummy_descs[table] = re.sub("(,|^)%s:"%re.escape(name),
501
r"\1%s:"%re.escape(DUMMIES[name]),
504
# Drop our old columns...
505
for table,dummy_desc in dummy_descs.items():
506
self.db.getas("%s[%s]"%(table,dummy_desc))
507
debug('dropping columns by committing database',3)
509
debug('deleting reference to our db',3)
511
debug('reinitialize our connection to a new db',3)
512
#self.initialize_connection() # reinitialize ourselves
513
#with our new DUMMYNAME column
514
self.db = metakit.storage(self.file,1)
515
self.contentview = self.db.contents()
516
# now we get our new self as a new datatype and copy our
517
# new information over...
518
# Loop through the changes we have to make
519
for table,cd in change_dic.items():
520
for name,change in cd.items():
521
newvw = self.setup_table(table,[(name,change['new']),(DUMMIES[name],change['old'])])
522
vw = newvw.filter(lambda x: getattr(x,DUMMIES[name]))
523
to_move_vw = newvw.remapwith(vw)
524
debug('converting attributes',4)
525
for n,r in enumerate(to_move_vw):
526
# convert our attributes
527
setattr(r,name,change['converter'](getattr(r,DUMMIES[name])))
528
# erase our temporary/holder attribute
529
setattr(r,DUMMIES[name],None)
530
debug('moved attribute %s times'%n,3)
531
default_descs[table] = re.sub(
532
"(,|^)%s:%s"%(name,self.type_to_metakit_type(change['old'])),
533
r"\1%s:%s"%(name,self.type_to_metakit_type(change['new'])),
536
for table,finished_desc in default_descs.items():
537
self.db.getas("%s[%s]"%(table,finished_desc)) #setup our table with the right attrs
538
self.db.commit() # and drop our dummy column
540
def _move_row (self, table, old, new, converter=None):
541
"""Move data from old (propertyname, type) to new (propertyname, type).
543
This is designed for backwards compatability (to allow
544
for other database backends)."""
545
debug('_move_row(table=%(table)s old=%(old)s new=%(new)s converter=%(converter)s'%locals(),1)
546
if not hasattr(self.contentview[0],table) or not hasattr(getattr(self.contentview[0],table),old[0]):
547
debug('Old property %s doesn\'t exist'%old[0],9)
549
tmpantry_table = self.setup_table(table, [new,old])
550
vw = tmpantry_table.filter(lambda x: getattr(x,old[0]))
551
to_move_vw = tmpantry_table.remapwith(vw)
552
to_move = len(to_move_vw)
554
self._backup_database()
557
setattr(r,new[0],converter(getattr(r,old[0])))
559
setattr(r,new[0],getattr(r,old[0]))
560
setattr(r,old[0],None)
562
def _backup_database_file (self):
563
"""Create a backup copy of our database tagged with ".OLD" in case we muck things up."""
564
if not self.backed_up:
565
backupfile = self.file + '.OLD'
567
while os.path.exists(backupfile):
568
backupfile = re.split('[0-9]*[.]OLD',backupfile)[0]
569
backupfile = backupfile + str(n) + '.OLD'
571
shutil.copy(self.file, backupfile)
572
self.backupfile = backupfile
573
self.backed_up = True
575
def _backup_database (self):
576
if not self.backed_up:
577
self._backup_database_file()
579
Your database will not work with older
581
A backup has been saved in %s"""%self.backupfile
582
import gourmet.dialog_extras, gourmet.version
583
gourmet.dialog_extras.show_message(
584
label=self.database_change_title,
585
sublabel=self.database_change_message%{'backupfile':self.backupfile},
588
def _backup_database_and_make_progress_dialog (self, xmlbackup):
589
self._backup_database_file()
590
from gourmet.dialog_extras import ProgressDialog
591
subl = self.database_change_message%{'backupfile':self.backupfile}
593
subl += _("In case anything goes wrong, a backup copy of your recipe database is being exported to %s")%xmlbackup
594
self.pd = ProgressDialog(
595
title=_("Transferring data to new database format"),
596
label=self.database_change_title,
602
def _row_type (self, table, name):
603
"""Return the row type for the table named name"""
604
if not hasattr(self.contentview[0],table): return
605
prop = getattr(getattr(self.contentview[0],table),name) # get ourselves the property object
606
return prop.type.lower()
608
class RecDataOldDB (RecData):
609
"""We initialize our database with whatever the old settings were.
611
This is done to simplify database transitions, or rather, to move
612
the onus of db transitions onto our gxml exporters. Those
613
exporters are now responsible for exporting
618
def initialize_connection (self):
619
# identical to parent class, except that we open the DB read-only
620
debug('using file: %s'%self.file,1)
621
self.file = str(os.path.expandvars(os.path.expanduser(self.file)))
622
mydir = os.path.split(self.file)[0]
623
# create the directory if it does not yet exist
624
if not os.path.exists(mydir):
626
self.db = metakit.storage(self.file,0) #filename must be string, not unicode
627
self.contentview=self.db.contents()
629
def setup_tables (self):
630
self.NORMALIZED_TABLES = []
631
rdatabase.RecData.setup_tables(self)
632
# We have some columns that need renaming...
633
for table,old,new in [('shopcats_table','shopkey','ingkey'),
634
('shopcats_table','category','shopcategory'),
635
('shopcatsorder_table','category','shopcategory'),
636
('pantry_table','itm','ingkey'),]:
637
if hasattr(getattr(self,table,),old):
638
setattr(self,table,getattr(self,table).rename(old,new))
640
def setup_table (self, name, data, key=None):
642
desc = self.db.description(name)
645
getstring = name+'['+desc+']'
646
db = self.db.getas(getstring)
648
rhsh = self.db.getas("__%s_hash__[_H:I,_R:I]"%name)
652
class RecipeManager (RecData,rdatabase.RecipeManager):
653
def __init__ (self, file=os.path.join(gglobals.gourmetdir,'recipes.mk')):
654
RecData.__init__(self,file)
655
self.km = keymanager.KeyManager(rm=self)
658
dbDic = rdatabase.dbDic
660
class MetakitUnitTest (rdatabase.DatabaseUnitTest):
661
db_class = RecipeManager
662
db_kwargs = {'file':'/tmp/test3.mk'}
664
if __name__ == '__main__':
667
fi = '/tmp/fooeybooey'
669
#while os.path.exists(fi+str(n)+'.mk'):
671
#MetakitUnitTest.db_kwargs['file']=fi+str(n)+'.mk'
678
db = RecipeManager(MetakitUnitTest.db_kwargs['file'])
679
rdatabase.test_db(db)