1
1
"File-based cache backend"
3
from django.core.cache.backends.simple import CacheClass as SimpleCacheClass
4
import os, time, urllib
6
6
import cPickle as pickle
10
class CacheClass(SimpleCacheClass):
10
from django.core.cache.backends.base import BaseCache
11
from django.utils.hashcompat import md5_constructor
13
class CacheClass(BaseCache):
11
14
def __init__(self, dir, params):
15
BaseCache.__init__(self, params)
17
max_entries = params.get('max_entries', 300)
19
self._max_entries = int(max_entries)
20
except (ValueError, TypeError):
21
self._max_entries = 300
23
cull_frequency = params.get('cull_frequency', 3)
25
self._cull_frequency = int(cull_frequency)
26
except (ValueError, TypeError):
27
self._cull_frequency = 3
13
30
if not os.path.exists(self._dir):
15
SimpleCacheClass.__init__(self, dir, params)
33
def add(self, key, value, timeout=None):
37
self.set(key, value, timeout)
19
40
def get(self, key, default=None):
20
41
fname = self._key_to_file(key)
34
55
def set(self, key, value, timeout=None):
35
56
fname = self._key_to_file(key)
57
dirname = os.path.dirname(fname)
36
59
if timeout is None:
37
60
timeout = self.default_timeout
39
filelist = os.listdir(self._dir)
40
except (IOError, OSError):
43
if len(filelist) > self._max_entries:
65
if not os.path.exists(dirname):
46
68
f = open(fname, 'wb')
48
pickle.dump(now + timeout, f, 2)
49
pickle.dump(value, f, 2)
70
pickle.dump(now + timeout, f, pickle.HIGHEST_PROTOCOL)
71
pickle.dump(value, f, pickle.HIGHEST_PROTOCOL)
50
72
except (IOError, OSError):
53
75
def delete(self, key):
55
os.remove(self._key_to_file(key))
77
self._delete(self._key_to_file(key))
78
except (IOError, OSError):
81
def _delete(self, fname):
84
# Remove the 2 subdirs if they're empty
85
dirname = os.path.dirname(fname)
87
os.rmdir(os.path.dirname(dirname))
56
88
except (IOError, OSError):
59
91
def has_key(self, key):
60
return os.path.exists(self._key_to_file(key))
62
def _cull(self, filelist):
92
fname = self._key_to_file(key)
103
except (IOError, OSError, EOFError, pickle.PickleError):
107
if int(self._num_entries) < self._max_entries:
111
filelist = os.listdir(self._dir)
112
except (IOError, OSError):
63
115
if self._cull_frequency == 0:
66
doomed = [k for (i, k) in enumerate(filelist) if i % self._cull_frequency == 0]
118
doomed = [os.path.join(self._dir, k) for (i, k) in enumerate(filelist) if i % self._cull_frequency == 0]
120
for topdir in doomed:
69
os.remove(os.path.join(self._dir, fname))
122
for root, _, files in os.walk(topdir):
124
self._delete(os.path.join(root, f))
70
125
except (IOError, OSError):
77
132
raise EnvironmentError, "Cache directory '%s' does not exist and could not be created'" % self._dir
79
134
def _key_to_file(self, key):
80
return os.path.join(self._dir, urllib.quote_plus(key))
136
Convert the filename into an md5 string. We'll turn the first couple
137
bits of the path into directory prefixes to be nice to filesystems
138
that have problems with large numbers of files in a directory.
140
Thus, a cache key of "foo" gets turnned into a file named
141
``{cache-dir}ac/bd/18db4cc2f85cedef654fccc4a4d8``.
143
path = md5_constructor(key.encode('utf-8')).hexdigest()
144
path = os.path.join(path[:2], path[2:4], path[4:])
145
return os.path.join(self._dir, path)
147
def _get_num_entries(self):
149
for _,_,files in os.walk(self._dir):
152
_num_entries = property(_get_num_entries)