337
by Jamu Kakar
Merged mysql-reserved-words [r=gabriel,jamesh] [f=433833] |
1 |
from unittest import defaultTestLoader |
2 |
||
203
by Gustavo Niemeyer
Fixing cache issue by correcting ObjectInfo.__eq__, with a set |
3 |
from storm.properties import Int |
4 |
from storm.info import get_obj_info |
|
292
by Gustavo Niemeyer
Added the new GenerationalCache with a faster implementation of |
5 |
from storm.cache import Cache, GenerationalCache |
196.1.4
by Bernd Dorn
implementation of strong reference cache finished |
6 |
|
7 |
from tests.helper import TestHelper |
|
8 |
||
196.1.6
by Gustavo Niemeyer
These changes address a few issues and perform a few stylistic changes |
9 |
|
10 |
class StubObjectInfo(object): |
|
196.1.4
by Bernd Dorn
implementation of strong reference cache finished |
11 |
|
12 |
def __init__(self, id): |
|
13 |
self.id = id |
|
196.1.6
by Gustavo Niemeyer
These changes address a few issues and perform a few stylistic changes |
14 |
self.hashed = False |
196.1.4
by Bernd Dorn
implementation of strong reference cache finished |
15 |
|
16 |
def get_obj(self): |
|
196.1.6
by Gustavo Niemeyer
These changes address a few issues and perform a few stylistic changes |
17 |
return str(self.id) |
18 |
||
19 |
def __repr__(self): |
|
20 |
return "%s(%s)" % (self.__class__.__name__, self.id) |
|
196.1.4
by Bernd Dorn
implementation of strong reference cache finished |
21 |
|
196.1.5
by Bernd Dorn
added more tests and changed according to review from niemeyer |
22 |
def __hash__(self): |
196.1.6
by Gustavo Niemeyer
These changes address a few issues and perform a few stylistic changes |
23 |
self.hashed = True |
196.1.5
by Bernd Dorn
added more tests and changed according to review from niemeyer |
24 |
return self.id |
25 |
||
292
by Gustavo Niemeyer
Added the new GenerationalCache with a faster implementation of |
26 |
def __lt__(self, other): |
27 |
return self.id < other.id |
|
28 |
||
196.1.6
by Gustavo Niemeyer
These changes address a few issues and perform a few stylistic changes |
29 |
|
203
by Gustavo Niemeyer
Fixing cache issue by correcting ObjectInfo.__eq__, with a set |
30 |
class StubClass(object): |
31 |
||
32 |
__storm_table__ = "stub_class" |
|
33 |
||
34 |
id = Int(primary=True) |
|
35 |
||
36 |
||
292
by Gustavo Niemeyer
Added the new GenerationalCache with a faster implementation of |
37 |
class BaseCacheTest(TestHelper): |
38 |
||
39 |
Cache = Cache |
|
196.1.4
by Bernd Dorn
implementation of strong reference cache finished |
40 |
|
196.1.5
by Bernd Dorn
added more tests and changed according to review from niemeyer |
41 |
def setUp(self): |
292
by Gustavo Niemeyer
Added the new GenerationalCache with a faster implementation of |
42 |
super(BaseCacheTest, self).setUp() |
196.1.6
by Gustavo Niemeyer
These changes address a few issues and perform a few stylistic changes |
43 |
self.obj_infos = [StubObjectInfo(i) for i in range(10)] |
292
by Gustavo Niemeyer
Added the new GenerationalCache with a faster implementation of |
44 |
for i in range(len(self.obj_infos)): |
45 |
setattr(self, "obj%d" % (i+1), self.obj_infos[i]) |
|
196.1.6
by Gustavo Niemeyer
These changes address a few issues and perform a few stylistic changes |
46 |
|
47 |
def clear_hashed(self): |
|
48 |
for obj_info in self.obj_infos: |
|
49 |
obj_info.hashed = False |
|
50 |
||
292
by Gustavo Niemeyer
Added the new GenerationalCache with a faster implementation of |
51 |
def test_initially_empty(self): |
52 |
cache = self.Cache() |
|
53 |
self.assertEqual(cache.get_cached(), []) |
|
54 |
||
196.1.6
by Gustavo Niemeyer
These changes address a few issues and perform a few stylistic changes |
55 |
def test_add(self): |
292
by Gustavo Niemeyer
Added the new GenerationalCache with a faster implementation of |
56 |
cache = self.Cache(5) |
57 |
cache.add(self.obj1) |
|
58 |
cache.add(self.obj2) |
|
59 |
cache.add(self.obj3) |
|
60 |
self.assertEquals(sorted(cache.get_cached()), |
|
61 |
[self.obj1, self.obj2, self.obj3]) |
|
196.1.6
by Gustavo Niemeyer
These changes address a few issues and perform a few stylistic changes |
62 |
|
203
by Gustavo Niemeyer
Fixing cache issue by correcting ObjectInfo.__eq__, with a set |
63 |
def test_adding_similar_obj_infos(self): |
64 |
"""If __eq__ is broken, this fails."""
|
|
65 |
obj_info1 = get_obj_info(StubClass()) |
|
66 |
obj_info2 = get_obj_info(StubClass()) |
|
292
by Gustavo Niemeyer
Added the new GenerationalCache with a faster implementation of |
67 |
cache = self.Cache(5) |
203
by Gustavo Niemeyer
Fixing cache issue by correcting ObjectInfo.__eq__, with a set |
68 |
cache.add(obj_info1) |
69 |
cache.add(obj_info2) |
|
70 |
cache.add(obj_info2) |
|
71 |
cache.add(obj_info1) |
|
72 |
self.assertEquals([hash(obj_info) for obj_info in cache.get_cached()], |
|
73 |
[hash(obj_info1), hash(obj_info2)]) |
|
74 |
||
196.1.6
by Gustavo Niemeyer
These changes address a few issues and perform a few stylistic changes |
75 |
def test_remove(self): |
292
by Gustavo Niemeyer
Added the new GenerationalCache with a faster implementation of |
76 |
cache = self.Cache(5) |
77 |
cache.add(self.obj1) |
|
78 |
cache.add(self.obj2) |
|
79 |
cache.add(self.obj3) |
|
80 |
cache.remove(self.obj2) |
|
81 |
self.assertEquals(sorted(cache.get_cached()), |
|
82 |
[self.obj1, self.obj3]) |
|
196.1.6
by Gustavo Niemeyer
These changes address a few issues and perform a few stylistic changes |
83 |
|
84 |
def test_add_existing(self): |
|
292
by Gustavo Niemeyer
Added the new GenerationalCache with a faster implementation of |
85 |
cache = self.Cache(5) |
86 |
cache.add(self.obj1) |
|
87 |
cache.add(self.obj2) |
|
88 |
cache.add(self.obj3) |
|
89 |
cache.add(self.obj2) |
|
90 |
self.assertEquals(sorted(cache.get_cached()), |
|
91 |
[self.obj1, self.obj2, self.obj3]) |
|
196.1.6
by Gustavo Niemeyer
These changes address a few issues and perform a few stylistic changes |
92 |
|
93 |
def test_add_with_size_zero(self): |
|
94 |
"""Cache is disabled entirely on add() if size is 0."""
|
|
292
by Gustavo Niemeyer
Added the new GenerationalCache with a faster implementation of |
95 |
cache = self.Cache(0) |
96 |
cache.add(self.obj1) |
|
196.1.7
by Gustavo Niemeyer
Introducing new changes coded with Chris about his review on #172357. |
97 |
# Ensure that we don't even check if obj_info is in the
|
98 |
# cache, by testing if it was hashed. Hopefully, that means
|
|
99 |
# we got a faster path.
|
|
292
by Gustavo Niemeyer
Added the new GenerationalCache with a faster implementation of |
100 |
self.assertEquals(self.obj1.hashed, False) |
196.1.6
by Gustavo Niemeyer
These changes address a few issues and perform a few stylistic changes |
101 |
|
102 |
def test_remove_with_size_zero(self): |
|
103 |
"""Cache is disabled entirely on remove() if size is 0."""
|
|
292
by Gustavo Niemeyer
Added the new GenerationalCache with a faster implementation of |
104 |
cache = self.Cache(0) |
105 |
cache.remove(self.obj1) |
|
106 |
||
107 |
def test_clear(self): |
|
108 |
"""The clear method empties the cache."""
|
|
109 |
cache = self.Cache(5) |
|
110 |
for obj_info in self.obj_infos: |
|
111 |
cache.add(obj_info) |
|
112 |
cache.clear() |
|
113 |
self.assertEquals(cache.get_cached(), []) |
|
114 |
||
115 |
# Just an additional check ensuring that any additional structures
|
|
116 |
# which may be used were cleaned properly as well.
|
|
117 |
for obj_info in self.obj_infos: |
|
118 |
self.assertEquals(cache.remove(obj_info), False) |
|
119 |
||
120 |
def test_set_zero_size(self): |
|
121 |
"""
|
|
122 |
Setting a cache's size to zero clears the cache.
|
|
123 |
"""
|
|
124 |
cache = self.Cache() |
|
125 |
cache.add(self.obj1) |
|
126 |
cache.add(self.obj2) |
|
127 |
cache.set_size(0) |
|
128 |
self.assertEquals(cache.get_cached(), []) |
|
129 |
||
130 |
def test_fit_size(self): |
|
131 |
"""
|
|
132 |
A cache of size n can hold at least n objects.
|
|
133 |
"""
|
|
134 |
size = 10 |
|
135 |
cache = self.Cache(size) |
|
136 |
for value in xrange(size): |
|
137 |
cache.add(StubObjectInfo(value)) |
|
138 |
self.assertEqual(len(cache.get_cached()), size) |
|
139 |
||
140 |
||
141 |
class CacheTest(BaseCacheTest): |
|
142 |
||
143 |
def test_size_and_fifo_behaviour(self): |
|
144 |
cache = Cache(5) |
|
145 |
for obj_info in self.obj_infos: |
|
146 |
cache.add(obj_info) |
|
147 |
self.assertEquals([obj_info.id for obj_info in cache.get_cached()], |
|
148 |
[9, 8, 7, 6, 5]) |
|
196.1.5
by Bernd Dorn
added more tests and changed according to review from niemeyer |
149 |
|
150 |
def test_reduce_max_size_to_zero(self): |
|
196.1.6
by Gustavo Niemeyer
These changes address a few issues and perform a few stylistic changes |
151 |
"""When setting the size to zero, there's an optimization."""
|
152 |
cache = Cache(5) |
|
153 |
obj_info = self.obj_infos[0] |
|
154 |
cache.add(obj_info) |
|
155 |
obj_info.hashed = False |
|
156 |
cache.set_size(0) |
|
157 |
self.assertEquals(cache.get_cached(), []) |
|
196.1.7
by Gustavo Niemeyer
Introducing new changes coded with Chris about his review on #172357. |
158 |
# Ensure that we don't even check if obj_info is in the
|
159 |
# cache, by testing if it was hashed. Hopefully, that means
|
|
160 |
# we got a faster path.
|
|
196.1.6
by Gustavo Niemeyer
These changes address a few issues and perform a few stylistic changes |
161 |
self.assertEquals(obj_info.hashed, False) |
162 |
||
163 |
def test_reduce_max_size(self): |
|
164 |
cache = Cache(5) |
|
165 |
for obj_info in self.obj_infos: |
|
166 |
cache.add(obj_info) |
|
167 |
cache.set_size(3) |
|
168 |
self.assertEquals([obj_info.id for obj_info in cache.get_cached()], |
|
169 |
[9, 8, 7]) |
|
170 |
||
171 |
# Adding items past the new maximum size should drop older ones.
|
|
172 |
for obj_info in self.obj_infos[:2]: |
|
173 |
cache.add(obj_info) |
|
174 |
self.assertEquals([obj_info.id for obj_info in cache.get_cached()], |
|
175 |
[1, 0, 9]) |
|
176 |
||
177 |
def test_increase_max_size(self): |
|
178 |
cache = Cache(5) |
|
179 |
for obj_info in self.obj_infos: |
|
180 |
cache.add(obj_info) |
|
181 |
cache.set_size(10) |
|
182 |
self.assertEquals([obj_info.id for obj_info in cache.get_cached()], |
|
183 |
[9, 8, 7, 6, 5]) |
|
184 |
||
185 |
# Adding items past the new maximum size should drop older ones.
|
|
186 |
for obj_info in self.obj_infos[:6]: |
|
187 |
cache.add(obj_info) |
|
188 |
self.assertEquals([obj_info.id for obj_info in cache.get_cached()], |
|
189 |
[5, 4, 3, 2, 1, 0, 9, 8, 7, 6]) |
|
196.1.4
by Bernd Dorn
implementation of strong reference cache finished |
190 |
|
292
by Gustavo Niemeyer
Added the new GenerationalCache with a faster implementation of |
191 |
|
192 |
class TestGenerationalCache(BaseCacheTest): |
|
193 |
||
194 |
Cache = GenerationalCache |
|
195 |
||
196 |
def setUp(self): |
|
197 |
super(TestGenerationalCache, self).setUp() |
|
198 |
self.obj1 = StubObjectInfo(1) |
|
199 |
self.obj2 = StubObjectInfo(2) |
|
200 |
self.obj3 = StubObjectInfo(3) |
|
201 |
self.obj4 = StubObjectInfo(4) |
|
202 |
||
203 |
def test_initially_empty(self): |
|
204 |
cache = GenerationalCache() |
|
205 |
self.assertEqual(cache.get_cached(), []) |
|
206 |
||
207 |
def test_cache_one_object(self): |
|
208 |
cache = GenerationalCache() |
|
209 |
cache.add(self.obj1) |
|
210 |
self.assertEqual(cache.get_cached(), [self.obj1]) |
|
211 |
||
212 |
def test_cache_multiple_objects(self): |
|
213 |
cache = GenerationalCache() |
|
214 |
cache.add(self.obj1) |
|
215 |
cache.add(self.obj2) |
|
216 |
self.assertEqual(sorted(cache.get_cached()), [self.obj1, self.obj2]) |
|
217 |
||
218 |
def test_clear_cache(self): |
|
219 |
cache = GenerationalCache() |
|
220 |
cache.add(self.obj1) |
|
221 |
cache.clear() |
|
222 |
self.assertEqual(cache.get_cached(), []) |
|
223 |
||
224 |
def test_clear_cache_clears_the_second_generation(self): |
|
225 |
cache = GenerationalCache(1) |
|
226 |
cache.add(self.obj1) |
|
227 |
cache.add(self.obj2) |
|
228 |
cache.clear() |
|
229 |
self.assertEqual(cache.get_cached(), []) |
|
230 |
||
231 |
def test_remove_object(self): |
|
232 |
cache = GenerationalCache() |
|
233 |
cache.add(self.obj1) |
|
234 |
cache.add(self.obj2) |
|
235 |
cache.add(self.obj3) |
|
236 |
||
237 |
present = cache.remove(self.obj2) |
|
238 |
self.assertTrue(present) |
|
239 |
self.assertEqual(sorted(cache.get_cached()), [self.obj1, self.obj3]) |
|
240 |
||
241 |
def test_remove_nothing(self): |
|
242 |
cache = GenerationalCache() |
|
243 |
cache.add(self.obj1) |
|
244 |
||
245 |
present = cache.remove(self.obj2) |
|
246 |
self.assertFalse(present) |
|
247 |
self.assertEqual(cache.get_cached(), [self.obj1]) |
|
248 |
||
249 |
def test_size_limit(self): |
|
250 |
"""
|
|
251 |
A cache will never hold more than twice its size in objects. The
|
|
252 |
generational system is what prevents it from holding exactly the
|
|
253 |
requested number of objects.
|
|
254 |
"""
|
|
255 |
size = 10 |
|
256 |
cache = GenerationalCache(size) |
|
257 |
for value in xrange(5 * size): |
|
258 |
cache.add(StubObjectInfo(value)) |
|
259 |
self.assertEquals(len(cache.get_cached()), size * 2) |
|
260 |
||
261 |
def test_set_size_smaller_than_current_size(self): |
|
262 |
"""
|
|
263 |
Setting the size to a smaller size than the number of objects
|
|
264 |
currently cached will drop some of the extra content. Note that
|
|
265 |
because of the generation system, it can actually hold two times
|
|
266 |
the size requested in edge cases.
|
|
267 |
"""
|
|
268 |
cache = GenerationalCache(150) |
|
269 |
for i in range(250): |
|
270 |
cache.add(StubObjectInfo(i)) |
|
271 |
cache.set_size(100) |
|
272 |
cached = cache.get_cached() |
|
273 |
self.assertEquals(len(cached), 100) |
|
274 |
for obj_info in cache.get_cached(): |
|
275 |
self.assertTrue(obj_info.id >= 100) |
|
276 |
||
277 |
def test_set_size_larger_than_current_size(self): |
|
278 |
"""
|
|
279 |
Setting the cache size to something more than the number of
|
|
280 |
objects in the cache does not affect its current contents,
|
|
281 |
and will merge any elements from the second generation into
|
|
282 |
the first one.
|
|
283 |
"""
|
|
284 |
cache = GenerationalCache(1) |
|
285 |
cache.add(self.obj1) # new=[1] old=[] |
|
286 |
cache.add(self.obj2) # new=[2] old=[1] |
|
287 |
cache.set_size(2) # new=[1, 2] old=[] |
|
288 |
cache.add(self.obj3) # new=[3] old=[1, 2] |
|
289 |
self.assertEqual(sorted(cache.get_cached()), |
|
290 |
[self.obj1, self.obj2, self.obj3]) |
|
291 |
||
292 |
def test_set_size_limit(self): |
|
293 |
"""
|
|
294 |
Setting the size limits the cache's size just like passing an
|
|
295 |
initial size would.
|
|
296 |
"""
|
|
297 |
size = 10 |
|
298 |
cache = GenerationalCache(size * 100) |
|
299 |
cache.set_size(size) |
|
300 |
for value in xrange(size * 10): |
|
301 |
cache.add(StubObjectInfo(value)) |
|
302 |
self.assertEquals(len(cache.get_cached()), size * 2) |
|
303 |
||
304 |
def test_two_generations(self): |
|
305 |
"""
|
|
306 |
Inserting more objects than the cache's size causes the cache
|
|
307 |
to contain two generations, each holding up to <size> objects.
|
|
308 |
"""
|
|
309 |
cache = GenerationalCache(1) |
|
310 |
cache.add(self.obj1) |
|
311 |
cache.add(self.obj2) |
|
312 |
||
313 |
self.assertEqual(sorted(cache.get_cached()), [self.obj1, self.obj2]) |
|
314 |
||
315 |
def test_three_generations(self): |
|
316 |
"""
|
|
317 |
If more than 2*<size> objects come along, only 2*<size>
|
|
318 |
objects are retained.
|
|
319 |
"""
|
|
320 |
cache = GenerationalCache(1) |
|
321 |
cache.add(self.obj1) |
|
322 |
cache.add(self.obj2) |
|
323 |
cache.add(self.obj3) |
|
324 |
||
325 |
self.assertEqual(sorted(cache.get_cached()), [self.obj2, self.obj3]) |
|
326 |
||
327 |
def test_generational_overlap(self): |
|
328 |
"""
|
|
329 |
An object that is both in the primary and the secondary
|
|
330 |
generation is listed only once in the cache's contents.
|
|
331 |
"""
|
|
332 |
cache = GenerationalCache(2) |
|
333 |
cache.add(self.obj1) # new=[1] old=[] |
|
334 |
cache.add(self.obj2) # new=[1, 2] old=[] |
|
335 |
cache.add(self.obj3) # new=[3] old=[1, 2] |
|
336 |
cache.add(self.obj1) # new=[3, 1] old=[1, 2] |
|
337 |
||
338 |
self.assertEqual(sorted(cache.get_cached()), |
|
339 |
[self.obj1, self.obj2, self.obj3]) |
|
340 |
||
341 |
def test_remove_from_overlap(self): |
|
342 |
"""
|
|
343 |
Removing an object from the cache removes it from both its
|
|
344 |
primary and secondary generations.
|
|
345 |
"""
|
|
346 |
cache = GenerationalCache(2) |
|
347 |
cache.add(self.obj1) # new=[1] old=[] |
|
348 |
cache.add(self.obj2) # new=[1, 2] old=[] |
|
349 |
cache.add(self.obj3) # new=[3] old=[1, 2] |
|
350 |
cache.add(self.obj1) # new=[3, 1] old=[1, 2] |
|
351 |
||
352 |
present = cache.remove(self.obj1) |
|
353 |
self.assertTrue(present) |
|
354 |
self.assertEqual(sorted(cache.get_cached()), [self.obj2, self.obj3]) |
|
355 |
||
356 |
def test_evict_oldest(self): |
|
357 |
"""The "oldest" object is the first to be evicted."""
|
|
358 |
cache = GenerationalCache(1) |
|
359 |
cache.add(self.obj1) |
|
360 |
cache.add(self.obj2) |
|
361 |
cache.add(self.obj3) |
|
362 |
||
363 |
self.assertEqual(sorted(cache.get_cached()), [self.obj2, self.obj3]) |
|
364 |
||
365 |
def test_evict_LRU(self): |
|
366 |
"""
|
|
367 |
Actually, it's not the oldest but the LRU object that is first
|
|
368 |
to be evicted. Re-adding the oldest object makes it not be
|
|
369 |
the LRU.
|
|
370 |
"""
|
|
371 |
cache = GenerationalCache(1) |
|
372 |
cache.add(self.obj1) |
|
373 |
cache.add(self.obj2) |
|
374 |
||
375 |
# This "refreshes" the oldest object in the cache.
|
|
376 |
cache.add(self.obj1) |
|
377 |
||
378 |
cache.add(self.obj3) |
|
379 |
||
380 |
self.assertEqual(sorted(cache.get_cached()), [self.obj1, self.obj3]) |
|
381 |
||
382 |
||
383 |
def test_suite(): |
|
337
by Jamu Kakar
Merged mysql-reserved-words [r=gabriel,jamesh] [f=433833] |
384 |
return defaultTestLoader.loadTestsFromName(__name__) |