1
# -*- test-case-name: axiom.test -*-
4
from twisted.python.reflect import qual
6
from axiom import slotmachine, _schema
8
from axiom.attributes import SQLAttribute, ColumnComparer, inmemory
10
_typeNameToMostRecentClass = {}
12
class NoInheritance(RuntimeError):
14
Inheritance is as-yet unsupported by XAtop.
17
class NotInStore(RuntimeError):
21
class MetaItem(slotmachine.SchemaMetaMachine):
22
"""Simple metaclass for Item that adds Item (and its subclasses) to
23
_typeNameToMostRecentClass mapping.
26
def __new__(meta, name, bases, dictionary):
27
T = slotmachine.SchemaMetaMachine.__new__(meta, name, bases, dictionary)
28
if T.__name__ == 'Item' and T.__module__ == __name__:
30
T.__already_inherited__ += 1
31
if T.__already_inherited__ >= 2:
32
raise NoInheritance("already inherited from item once: "
33
"in-database inheritance not yet supported")
34
if T.typeName is None:
35
raise NotImplementedError(
36
"%s did not specify a typeName attribute" % (qual(T),))
37
if T.schemaVersion is None:
38
raise NotImplementedError(
39
"%s did not specify a schemaVersion attribute" % (qual(T),))
40
if T.typeName in _typeNameToMostRecentClass:
43
raise RuntimeError("2 definitions of atop typename %r: %r %r" % (
44
T.typeName, T, _typeNameToMostRecentClass[T.typeName]))
45
_typeNameToMostRecentClass[T.typeName] = T
48
def TABLE_NAME(typename, version):
49
return "item_%s_v%d" % (typename, version)
54
class _SpecialStoreIDAttribute(slotmachine.SetOnce):
56
def __get__(self, oself, type=None):
57
if type is not None and oself is None:
58
return ColumnComparer(self, type)
59
return super(_SpecialStoreIDAttribute, self).__get__(oself, type)
61
# attributes required by ColumnComparer
63
def infilter(self, pyval, oself):
69
# Python-Special Attributes
70
__metaclass__ = MetaItem
72
# Atop-Special Attributes
73
__dirty__ = inmemory()
76
__already_inherited__ = 0
79
__store = inmemory() # underlying reference to the store.
81
__everInserted = inmemory() # has this object ever been inserted into the
84
__justCreated = inmemory() # was this object just created, i.e. is there
85
# no committed database representation of it
88
__deleting = inmemory() # has this been marked for deletion at
91
__deletingObject = inmemory() # being marked for deletion at checkpoint,
92
# are we also deleting the central object row
93
# (True: as in an actual delete) or are we
94
# simply deleting the data row (False: as in
97
storeID = _SpecialStoreIDAttribute(default=None)
102
def set(self, store):
103
if self.__store is not None:
104
raise AttributeError(
105
"Store already set - can't move between stores")
107
oid = self.storeID = self.store.executeSQL(
108
_schema.CREATE_OBJECT, [self.store.getTypeID(type(self))])
109
store.objectCache.cache(oid, self)
116
A reference to a Store; when set for the first time, inserts this object
117
into that store. Cannot be set twice; once inserted, objects are
118
'stuck' to a particular store and must be copied by creating a new
123
store = property(*store())
125
# XXX: Think about how to do this _safely_ (e.g. not recursing infinitely
126
# through circular references) before turning it on
127
# def __repr__(self):
128
# L = [self.__name__]
131
# for nam, atr in self.getSchema():
133
# val = atr.__get__(self)
138
# traceback.print_exc(file=sys.stdout)
140
# A.append('%s=%s' % (nam, V))
141
# A.append('storeID=' + str(self.storeID))
142
# L.append(', '.join(A))
144
# L.append('@' + str(id(self)))
148
def __subinit__(self, **kw):
150
Initializer called regardless of whether this object was created by
151
instantiation or loading from the database.
154
to__store = kw.pop('__store', None)
155
to__everInserted = kw.pop('__everInserted', False)
156
self.__store = to__store
157
self.__everInserted = to__everInserted
158
self.__deletingObject = False
159
self.__deleting = False
160
tostore = kw.pop('store',None)
163
for k, v in kw.iteritems():
166
def __init__(self, **kw):
167
self.__justCreated = True
168
self.__subinit__(**kw)
170
def __finalizer__(self):
173
def existingInStore(cls, store, storeID, attrs):
174
self = cls.__new__(cls)
176
self.__justCreated = False
177
self.__subinit__(__store=store,
181
scm = list(self.getSchema())
182
assert len(scm) == len(attrs), "invalid number of attributes"
183
for data, (nam, atr) in zip(attrs, scm):
184
atr.loaded(self, data)
188
existingInStore = classmethod(existingInStore)
192
return all persistent class attributes
194
for nam, atr in cls.__attributes__:
195
if isinstance(atr, SQLAttribute):
198
getSchema = classmethod(getSchema)
202
if self.store is None or self.store.transaction is None:
204
self.store.transaction.add(self)
207
if self.__justCreated:
208
# The SQL revert has already been taken care of.
209
self.store.objectCache.uncache(self.storeID)
211
self.__dirty__.clear()
212
dbattrs = self.store.querySQL(
213
self.store.getTableQuery(self.typeName,
217
for data, (nam, atr) in zip(dbattrs, self.getSchema()):
218
atr.loaded(self, data)
220
self.__deleting = False
221
self.__deletingObject = False
224
"""User-definable callback that is invoked when an object is well and truly
225
gone from the database; the transaction which deleted it has been
231
Called after the database is brought into a consistent state with this
236
self.store.objectCache.uncache(self.storeID)
238
self.__justCreated = False
240
def checkpoint(self):
241
""" Update the database to reflect in-memory changes made to this item; for
242
example, to make it show up in store.query() calls where it is now
243
valid, but was not the last time it was persisted to the database.
245
This is called automatically when in 'autocommit mode' (i.e. not in a
246
transaction) and at the end of each transaction for every object that
250
if self.store is None:
251
raise NotInStore("You can't checkpoint %r: not in a store" % (self,))
254
self.store.executeSQL(self._baseDeleteSQL(), [self.storeID])
255
if self.__deletingObject:
256
self.store.executeSQL(_schema.DELETE_OBJECT, [self.storeID])
258
assert self.__legacy__
260
if self.__everInserted:
261
if not self.__dirty__:
262
# we might have been checkpointed twice within the same
263
# transaction; just don't do anything.
265
self.store.executeSQL(*self._updateSQL())
267
# we are in the middle of creating the object.
268
attrs = self.getSchema()
270
# XXX this isn't atomic, gross.
271
self.store.executeSQL(self._baseInsertSQL(),
273
[self.__dirty__.get(a[1].attrname) for a in attrs])
274
self.__everInserted = True
276
if self.store.autocommit:
279
def upgradeVersion(self, typename, oldversion, newversion):
280
# right now there is only ever one acceptable series of arguments here
281
# but it is useful to pass them anyway to make sure the code is
282
# functioning as expected
283
assert typename == self.typeName
284
assert oldversion == self.schemaVersion
285
assert newversion == oldversion + 1
286
key = typename, newversion
288
if key in _legacyTypes:
289
T = _legacyTypes[key]
290
elif typename in _typeNameToMostRecentClass:
291
mostRecent = _typeNameToMostRecentClass[typename]
292
if mostRecent.schemaVersion == newversion:
295
raise RuntimeError("don't know about type/version pair %s:%d" % (
296
typename, newversion))
297
newTypeID = self.store.getTypeID(T) # call first to make sure the table
298
# exists for doInsert below
300
# set store privately so we don't hit the CREATE_OBJECT logic in
301
# store's set() above; set storeID because it's already been allocated;
302
# don't set __everInserted to True because we want to run insert logic
304
new = T(__store=self.store,
305
storeID=self.storeID)
309
# AAAAA crap; this needs to be forced to fall out of cache in the case
310
# of an in memory revert (not implemented yet)
311
self.store.objectCache.cache(self.storeID, new)
313
self.store.executeSQL(_schema.CHANGE_TYPE,
314
[newTypeID, self.storeID])
315
self.deleteFromStore(False)
318
def deleteFromStore(self, deleteObject=True):
320
self.__deleting = True
321
self.__deletingObject = deleteObject
323
if self.store.autocommit:
326
# You _MUST_ specify version in subclasses
330
###### SQL generation ######
332
def getTableName(cls):
333
return TABLE_NAME(cls.typeName, cls.schemaVersion)
335
getTableName = classmethod(getTableName)
337
_cachedInsertSQL = None
339
def _baseInsertSQL(cls):
340
if cls._cachedInsertSQL is None:
341
attrs = list(cls.getSchema())
342
qs = ', '.join((['?']*(len(attrs)+1)))
343
cls._cachedInsertSQL = ('INSERT INTO '+
344
cls.getTableName()+' (' + ', '.join(
346
[a[1].attrname for a in attrs]) +
347
') VALUES (' + qs + ')')
348
return cls._cachedInsertSQL
350
_baseInsertSQL = classmethod(_baseInsertSQL)
352
_cachedDeleteSQL = None
354
def _baseDeleteSQL(cls):
355
if cls._cachedDeleteSQL is None:
356
stmt = ' '.join(['DELETE FROM',
362
_baseDeleteSQL = classmethod(_baseDeleteSQL)
364
def _updateSQL(self):
365
# XXX no point in caching for every possible combination of attribute
366
# values - probably. check out how prepared statements are used in
368
dirty = self.__dirty__.items()
370
raise RuntimeError("Non-dirty item trying to generate SQL.")
373
'UPDATE', self.getTableName(), 'SET',
374
( ', '.join(['%s = ?'] * len(dirty)) %
375
tuple([d[0] for d in dirty])),
377
args = [d[1] for d in dirty]
378
args.append(self.storeID)
382
_legacyTypes = {} # map (typeName, schemaVersion) to dummy class
384
def dummyItemSubclass(typeName, schemaVersion, attributes, dummyBases):
386
Generate a dummy subclass of Item that will have the given attributes,
387
and the base Item methods, but no methods of its own. This is for use
390
@param typeName: a string, the Atop TypeName to have attributes for.
391
@param schemaVersion: an int, the (old) version of the schema this is a proxy
393
@param attributes: a dict mapping {columnName: attr instance}
395
@param dummyBases: a sequence of 4-tuples of (baseTypeName,
396
baseSchemaVersion, baseAttributes, baseBases) representing the dummy bases
397
of this legacy class.
399
if (typeName, schemaVersion) in _legacyTypes:
400
return _legacyTypes[typeName, schemaVersion]
402
realBases = [dummyItemSubclass(*A) for A in dummyBases]
405
attributes = attributes.copy()
406
attributes['__module__'] = 'item_dummy'
407
attributes['__legacy__'] = True
408
attributes['typeName'] = typeName
409
attributes['schemaVersion'] = schemaVersion
410
result = type(str('DummyItem<%s,%d>' % (typeName, schemaVersion)),
413
assert result is not None, 'wtf, %r' % (type,)
414
_legacyTypes[(typeName, schemaVersion)] = result