Code

Ticket #13795: cache_key_prefix_13448.diff

File cache_key_prefix_13448.diff, 17.2 KB (added by bruth, 4 years ago)
Line 
1Index: django/conf/global_settings.py
2===================================================================
3--- django/conf/global_settings.py      (revision 13448)
4+++ django/conf/global_settings.py      (working copy)
5@@ -427,6 +427,7 @@
6 # The cache backend to use.  See the docstring in django.core.cache for the
7 # possible values.
8 CACHE_BACKEND = 'locmem://'
9+CACHE_KEY_PREFIX = ''
10 CACHE_MIDDLEWARE_KEY_PREFIX = ''
11 CACHE_MIDDLEWARE_SECONDS = 600
12 
13Index: django/core/cache/__init__.py
14===================================================================
15--- django/core/cache/__init__.py       (revision 13448)
16+++ django/core/cache/__init__.py       (working copy)
17@@ -56,18 +56,18 @@
18 
19     return scheme, host, params
20 
21-def get_cache(backend_uri):
22+def get_cache(backend_uri, key_prefix=settings.CACHE_KEY_PREFIX):
23     scheme, host, params = parse_backend_uri(backend_uri)
24     if scheme in BACKENDS:
25         name = 'django.core.cache.backends.%s' % BACKENDS[scheme]
26     else:
27         name = scheme
28     module = importlib.import_module(name)
29-    return getattr(module, 'CacheClass')(host, params)
30+    return getattr(module, 'CacheClass')(host, params, key_prefix)
31 
32 cache = get_cache(settings.CACHE_BACKEND)
33 
34-# Some caches -- pythont-memcached in particular -- need to do a cleanup at the
35+# Some caches -- python-memcached in particular -- need to do a cleanup at the
36 # end of a request cycle. If the cache provides a close() method, wire it up
37 # here.
38 if hasattr(cache, 'close'):
39Index: django/core/cache/backends/base.py
40===================================================================
41--- django/core/cache/backends/base.py  (revision 13448)
42+++ django/core/cache/backends/base.py  (working copy)
43@@ -1,19 +1,28 @@
44 "Base Cache class."
45 
46 from django.core.exceptions import ImproperlyConfigured
47+from django.utils.encoding import smart_str
48 
49 class InvalidCacheBackendError(ImproperlyConfigured):
50     pass
51 
52 class BaseCache(object):
53-    def __init__(self, params):
54+    def __init__(self, params, key_prefix=''):
55         timeout = params.get('timeout', 300)
56         try:
57             timeout = int(timeout)
58         except (ValueError, TypeError):
59             timeout = 300
60         self.default_timeout = timeout
61+        self.key_prefix = smart_str(key_prefix)
62 
63+    def make_key(self, key):
64+        """Constructs the key used by all other methods. By default it prepends
65+        the `key_prefix'. In cache backend subclasses this can be overriden to
66+        provide custom key making behavior.
67+        """
68+        return self.key_prefix + smart_str(key)
69+
70     def add(self, key, value, timeout=None):
71         """
72         Set a value in the cache if the key does not already exist. If
73Index: django/core/cache/backends/locmem.py
74===================================================================
75--- django/core/cache/backends/locmem.py        (revision 13448)
76+++ django/core/cache/backends/locmem.py        (working copy)
77@@ -10,8 +10,8 @@
78 from django.utils.synch import RWLock
79 
80 class CacheClass(BaseCache):
81-    def __init__(self, _, params):
82-        BaseCache.__init__(self, params)
83+    def __init__(self, _, params, key_prefix):
84+        BaseCache.__init__(self, params, key_prefix='')
85         self._cache = {}
86         self._expire_info = {}
87 
88@@ -30,6 +30,7 @@
89         self._lock = RWLock()
90 
91     def add(self, key, value, timeout=None):
92+        key = self.make_key(key)
93         self._lock.writer_enters()
94         try:
95             exp = self._expire_info.get(key)
96@@ -44,6 +45,7 @@
97             self._lock.writer_leaves()
98 
99     def get(self, key, default=None):
100+        key = self.make_key(key)
101         self._lock.reader_enters()
102         try:
103             exp = self._expire_info.get(key)
104@@ -76,6 +78,7 @@
105         self._expire_info[key] = time.time() + timeout
106 
107     def set(self, key, value, timeout=None):
108+        key = self.make_key(key)
109         self._lock.writer_enters()
110         # Python 2.4 doesn't allow combined try-except-finally blocks.
111         try:
112@@ -87,6 +90,7 @@
113             self._lock.writer_leaves()
114 
115     def has_key(self, key):
116+        key = self.make_key(key)
117         self._lock.reader_enters()
118         try:
119             exp = self._expire_info.get(key)
120@@ -127,6 +131,7 @@
121             pass
122 
123     def delete(self, key):
124+        key = self.make_key(key)
125         self._lock.writer_enters()
126         try:
127             self._delete(key)
128Index: django/core/cache/backends/filebased.py
129===================================================================
130--- django/core/cache/backends/filebased.py     (revision 13448)
131+++ django/core/cache/backends/filebased.py     (working copy)
132@@ -12,8 +12,8 @@
133 from django.utils.hashcompat import md5_constructor
134 
135 class CacheClass(BaseCache):
136-    def __init__(self, dir, params):
137-        BaseCache.__init__(self, params)
138+    def __init__(self, dir, params, key_prefix=''):
139+        BaseCache.__init__(self, params, key_prefix)
140 
141         max_entries = params.get('max_entries', 300)
142         try:
143@@ -32,6 +32,7 @@
144             self._createdir()
145 
146     def add(self, key, value, timeout=None):
147+        key = self.make_key(key)
148         if self.has_key(key):
149             return False
150 
151@@ -39,6 +40,7 @@
152         return True
153 
154     def get(self, key, default=None):
155+        key = self.make_key(key)
156         fname = self._key_to_file(key)
157         try:
158             f = open(fname, 'rb')
159@@ -56,6 +58,7 @@
160         return default
161 
162     def set(self, key, value, timeout=None):
163+        key = self.make_key(key)
164         fname = self._key_to_file(key)
165         dirname = os.path.dirname(fname)
166 
167@@ -79,6 +82,7 @@
168             pass
169 
170     def delete(self, key):
171+        key = self.make_key(key)
172         try:
173             self._delete(self._key_to_file(key))
174         except (IOError, OSError):
175@@ -95,6 +99,7 @@
176             pass
177 
178     def has_key(self, key):
179+        key = self.make_key(key)
180         fname = self._key_to_file(key)
181         try:
182             f = open(fname, 'rb')
183@@ -148,7 +153,8 @@
184         Thus, a cache key of "foo" gets turnned into a file named
185         ``{cache-dir}ac/bd/18db4cc2f85cedef654fccc4a4d8``.
186         """
187-        path = md5_constructor(key.encode('utf-8')).hexdigest()
188+        key = self.make_key(key)
189+        path = md5_constructor(key).hexdigest()
190         path = os.path.join(path[:2], path[2:4], path[4:])
191         return os.path.join(self._dir, path)
192 
193Index: django/core/cache/backends/db.py
194===================================================================
195--- django/core/cache/backends/db.py    (revision 13448)
196+++ django/core/cache/backends/db.py    (working copy)
197@@ -10,8 +10,8 @@
198     import pickle
199 
200 class CacheClass(BaseCache):
201-    def __init__(self, table, params):
202-        BaseCache.__init__(self, params)
203+    def __init__(self, table, params, key_prefix=''):
204+        BaseCache.__init__(self, params, key_prefix)
205         self._table = connection.ops.quote_name(table)
206         max_entries = params.get('max_entries', 300)
207         try:
208@@ -25,6 +25,7 @@
209             self._cull_frequency = 3
210 
211     def get(self, key, default=None):
212+        key = self.make_key(key)
213         cursor = connection.cursor()
214         cursor.execute("SELECT cache_key, value, expires FROM %s WHERE cache_key = %%s" % self._table, [key])
215         row = cursor.fetchone()
216@@ -45,6 +46,7 @@
217         return self._base_set('add', key, value, timeout)
218 
219     def _base_set(self, mode, key, value, timeout=None):
220+        key = self.make_key(key)
221         if timeout is None:
222             timeout = self.default_timeout
223         cursor = connection.cursor()
224@@ -74,11 +76,13 @@
225             return True
226 
227     def delete(self, key):
228+        key = self.make_key(key)
229         cursor = connection.cursor()
230         cursor.execute("DELETE FROM %s WHERE cache_key = %%s" % self._table, [key])
231         transaction.commit_unless_managed()
232 
233     def has_key(self, key):
234+        key = self.make_key(key)
235         now = datetime.now().replace(microsecond=0)
236         cursor = connection.cursor()
237         cursor.execute("SELECT cache_key FROM %s WHERE cache_key = %%s and expires > %%s" % self._table,
238Index: django/core/cache/backends/memcached.py
239===================================================================
240--- django/core/cache/backends/memcached.py     (revision 13448)
241+++ django/core/cache/backends/memcached.py     (working copy)
242@@ -3,7 +3,6 @@
243 import time
244 
245 from django.core.cache.backends.base import BaseCache, InvalidCacheBackendError
246-from django.utils.encoding import smart_unicode, smart_str
247 
248 try:
249     import cmemcache as memcache
250@@ -19,8 +18,8 @@
251         raise InvalidCacheBackendError("Memcached cache backend requires either the 'memcache' or 'cmemcache' library")
252 
253 class CacheClass(BaseCache):
254-    def __init__(self, server, params):
255-        BaseCache.__init__(self, params)
256+    def __init__(self, server, params, key_prefix=''):
257+        BaseCache.__init__(self, params, key_prefix)
258         self._cache = memcache.Client(server.split(';'))
259 
260     def _get_memcache_timeout(self, timeout):
261@@ -40,29 +39,35 @@
262         return timeout
263 
264     def add(self, key, value, timeout=0):
265+        key = self.make_key(key)
266         if isinstance(value, unicode):
267             value = value.encode('utf-8')
268-        return self._cache.add(smart_str(key), value, self._get_memcache_timeout(timeout))
269+        return self._cache.add(key, value, self._get_memcache_timeout(timeout))
270 
271     def get(self, key, default=None):
272-        val = self._cache.get(smart_str(key))
273+        key = self.make_key(key)
274+        val = self._cache.get(key)
275         if val is None:
276             return default
277         return val
278 
279     def set(self, key, value, timeout=0):
280-        self._cache.set(smart_str(key), value, self._get_memcache_timeout(timeout))
281+        key = self.make_key(key)
282+        self._cache.set(key, value, self._get_memcache_timeout(timeout))
283 
284     def delete(self, key):
285-        self._cache.delete(smart_str(key))
286+        key = self.make_key(key)
287+        self._cache.delete(key)
288 
289     def get_many(self, keys):
290-        return self._cache.get_multi(map(smart_str,keys))
291+        keys = map(self.make_key, keys)
292+        return self._cache.get_multi(keys)
293 
294     def close(self, **kwargs):
295         self._cache.disconnect_all()
296 
297     def incr(self, key, delta=1):
298+        key = self.make_key(key)
299         try:
300             val = self._cache.incr(key, delta)
301 
302@@ -77,6 +82,7 @@
303         return val
304 
305     def decr(self, key, delta=1):
306+        key = self.make_key(key)
307         try:
308             val = self._cache.decr(key, delta)
309 
310@@ -92,13 +98,15 @@
311     def set_many(self, data, timeout=0):
312         safe_data = {}
313         for key, value in data.items():
314+            key = self.make_key(key)
315             if isinstance(value, unicode):
316                 value = value.encode('utf-8')
317-            safe_data[smart_str(key)] = value
318+            safe_data[key] = value
319         self._cache.set_multi(safe_data, self._get_memcache_timeout(timeout))
320 
321     def delete_many(self, keys):
322-        self._cache.delete_multi(map(smart_str, keys))
323+        keys = map(self.make_key, keys)
324+        self._cache.delete_multi(keys)
325 
326     def clear(self):
327         self._cache.flush_all()
328Index: tests/regressiontests/cache/tests.py
329===================================================================
330--- tests/regressiontests/cache/tests.py        (revision 13448)
331+++ tests/regressiontests/cache/tests.py        (working copy)
332@@ -143,7 +143,6 @@
333         "clear does nothing for the dummy cache backend"
334         self.cache.clear()
335 
336-
337 class BaseCacheTests(object):
338     # A common set of tests to apply to all cache backends
339     def tearDown(self):
340@@ -161,6 +160,18 @@
341         self.assertEqual(result, False)
342         self.assertEqual(self.cache.get("addkey1"), "value")
343 
344+    def test_prefix(self):
345+        # Test for same cache key conflicts between shared backend
346+        self.cache.set('somekey', 'value')
347+
348+        # should not be set in the prefixed cache
349+        self.assertFalse(self.pfx_cache.has_key('somekey'))
350+
351+        self.pfx_cache.set('somekey', 'value2')
352+
353+        self.assertEqual(self.cache.get('somekey'), 'value')
354+        self.assertEqual(self.pfx_cache.get('somekey'), 'value2')
355+
356     def test_non_existent(self):
357         # Non-existent cache keys return as None/default
358         # get with non-existent keys
359@@ -358,6 +369,7 @@
360         self._table_name = 'test cache table'
361         management.call_command('createcachetable', self._table_name, verbosity=0, interactive=False)
362         self.cache = get_cache('db://%s' % self._table_name)
363+        self.pfx_cache = get_cache('db://%s' % self._table_name, 'cacheprefix')
364 
365     def tearDown(self):
366         from django.db import connection
367@@ -367,6 +379,7 @@
368 class LocMemCacheTests(unittest.TestCase, BaseCacheTests):
369     def setUp(self):
370         self.cache = get_cache('locmem://')
371+        self.pfx_cache = get_cache('locmem://', 'cacheprefix')
372 
373 # memcached backend isn't guaranteed to be available.
374 # To check the memcached backend, the test settings file will
375@@ -376,6 +389,7 @@
376     class MemcachedCacheTests(unittest.TestCase, BaseCacheTests):
377         def setUp(self):
378             self.cache = get_cache(settings.CACHE_BACKEND)
379+            self.pfx_cache = get_cache(settings.CACHE_BACKEND, 'cacheprefix')
380 
381 class FileBasedCacheTests(unittest.TestCase, BaseCacheTests):
382     """
383@@ -384,6 +398,7 @@
384     def setUp(self):
385         self.dirname = tempfile.mkdtemp()
386         self.cache = get_cache('file://%s' % self.dirname)
387+        self.pfx_cache = get_cache('file://%s' % self.dirname, 'cacheprefix')
388 
389     def test_hashing(self):
390         """Test that keys are hashed into subdirectories correctly"""
391@@ -411,16 +426,16 @@
392 
393     def setUp(self):
394         self.path = '/cache/test/'
395-        self.old_settings_key_prefix = settings.CACHE_MIDDLEWARE_KEY_PREFIX
396-        self.old_middleware_seconds = settings.CACHE_MIDDLEWARE_SECONDS
397+        self.old_cache_middleware_key_prefix = settings.CACHE_MIDDLEWARE_KEY_PREFIX
398+        self.old_cache_middleware_seconds = settings.CACHE_MIDDLEWARE_SECONDS
399         self.orig_use_i18n = settings.USE_I18N
400         settings.CACHE_MIDDLEWARE_KEY_PREFIX = 'settingsprefix'
401         settings.CACHE_MIDDLEWARE_SECONDS = 1
402         settings.USE_I18N = False
403 
404     def tearDown(self):
405-        settings.CACHE_MIDDLEWARE_KEY_PREFIX = self.old_settings_key_prefix
406-        settings.CACHE_MIDDLEWARE_SECONDS = self.old_middleware_seconds
407+        settings.CACHE_MIDDLEWARE_KEY_PREFIX = self.old_cache_middleware_key_prefix
408+        settings.CACHE_MIDDLEWARE_SECONDS = self.old_cache_middleware_seconds
409         settings.USE_I18N = self.orig_use_i18n
410 
411     def _get_request(self, path):
412@@ -473,6 +488,16 @@
413         learn_cache_key(request, response)
414         self.assertEqual(get_cache_key(request), 'views.decorators.cache.cache_page.settingsprefix.a8c87a3d8c44853d7f79474f7ffe4ad5.d41d8cd98f00b204e9800998ecf8427e')
415 
416+class PrefixedCacheUtils(CacheUtils):
417+    def setUp(self):
418+        super(PrefixedCacheUtils, self).setUp()
419+        self.old_cache_key_prefix = settings.CACHE_KEY_PREFIX
420+        settings.CACHE_KEY_PREFIX = 'cacheprefix'
421+
422+    def tearDown(self):
423+        super(PrefixedCacheUtils, self).tearDown()
424+        settings.CACHE_KEY_PREFIX = self.old_cache_key_prefix
425+
426 class CacheI18nTest(unittest.TestCase):
427 
428     def setUp(self):
429@@ -568,5 +593,15 @@
430         get_cache_data = FetchFromCacheMiddleware().process_request(request)
431         self.assertEqual(get_cache_data.content, es_message)
432 
433+class PrefixedCacheI18nTest(CacheI18nTest):
434+    def setUp(self):
435+        super(PrefixedCacheI18nTest, self).setUp()
436+        self.old_cache_key_prefix = settings.CACHE_KEY_PREFIX
437+        settings.CACHE_KEY_PREFIX = 'cacheprefix'
438+
439+    def tearDown(self):
440+        super(PrefixedCacheI18nTest, self).tearDown()
441+        settings.CACHE_KEY_PREFIX = self.old_cache_key_prefix
442+
443 if __name__ == '__main__':
444     unittest.main()
445Index: docs/topics/cache.txt
446===================================================================
447--- docs/topics/cache.txt       (revision 13448)
448+++ docs/topics/cache.txt       (working copy)
449@@ -600,6 +600,34 @@
450     However, if the backend doesn't natively provide an increment/decrement
451     operation, it will be implemented using a two-step retrieve/update.
452 
453+CACHE_KEY_PREFIX
454+----------------
455+
456+It is a common occurence to have a shared cache instance running on your development
457+or production server for use across multiple projects, i.e. all sites are pointing
458+to the memcached instance running on port 11211. As a result, cache key conflicts
459+may arise which could result in data from one site being used by another.
460+
461+To alleviate this, CACHE_KEY_PREFIX can be set. This will be prepended to all keys
462+that are used with the cache backend, transparently::
463+
464+    # in settings, CACHE_KEY_PREFIX = 'myproject_'
465+
466+    >>> cache.set('my_key', 'hello world!') # set with key 'myproject_my_key'
467+    >>> cache.get('my_key') # retrieved with key 'myproject_my_key'
468+    'hello world!'
469+
470+Of course, for sites that _do_ share content, simply set the CACHE_KEY_PREFIX for
471+both sites to the same value. The default value for CACHE_KEY_PREFIX is the empty
472+string ``''``.
473+
474+.. note::
475+   
476+   This does *not* conflict with the CACHE_MIDDLEWARE_KEY_PREFIX and can be used
477+   in conjunction with it. CACHE_KEY_PREFIX acts as a global prefix for a
478+   particular cache instance, therefore it will be prepended to the
479+   CACHE_MIDDLEWARE_KEY_PREFIX transparently.
480+
481 Upstream caches
482 ===============
483