diff --git a/django/core/cache/backends/filebased.py b/django/core/cache/backends/filebased.py
index 72cb877..1464901 100644
|
a
|
b
|
|
| 1 | 1 | "File-based cache backend" |
| 2 | 2 | |
| | 3 | import md5 |
| 3 | 4 | import os, time |
| 4 | 5 | try: |
| 5 | 6 | import cPickle as pickle |
| 6 | 7 | except ImportError: |
| 7 | 8 | import pickle |
| 8 | 9 | from django.core.cache.backends.base import BaseCache |
| 9 | | from django.utils.http import urlquote_plus |
| | 10 | |
| | 11 | class EntryCount: |
| | 12 | #TODO: I assume it would be a worthwhile performance improvement (on large caches) |
| | 13 | #to keep a count in a file, and use that here instead of recounting everytime. |
| | 14 | # |
| | 15 | #The hooks are all in place below to allow that just implement |
| | 16 | #increment/decrement/__int__ here, as you see fit. |
| | 17 | def __init__(self, dir): |
| | 18 | self._dir = dir |
| | 19 | |
| | 20 | def increment(self): |
| | 21 | pass |
| | 22 | def decrement(self): |
| | 23 | pass |
| | 24 | def __int__(self): |
| | 25 | count = 0 |
| | 26 | for _,_,files in os.walk(self._dir): |
| | 27 | count += len(files) |
| | 28 | return count |
| | 29 | |
| 10 | 30 | |
| 11 | 31 | class CacheClass(BaseCache): |
| 12 | 32 | def __init__(self, dir, params): |
| … |
… |
class CacheClass(BaseCache):
|
| 27 | 47 | self._dir = dir |
| 28 | 48 | if not os.path.exists(self._dir): |
| 29 | 49 | self._createdir() |
| | 50 | |
| | 51 | self._num_entries = EntryCount(self._dir) |
| 30 | 52 | |
| 31 | 53 | def add(self, key, value, timeout=None): |
| 32 | | fname = self._key_to_file(key) |
| 33 | | if timeout is None: |
| 34 | | timeout = self.default_timeout |
| 35 | | try: |
| 36 | | filelist = os.listdir(self._dir) |
| 37 | | except (IOError, OSError): |
| 38 | | self._createdir() |
| 39 | | filelist = [] |
| 40 | | if len(filelist) > self._max_entries: |
| 41 | | self._cull(filelist) |
| 42 | | if os.path.basename(fname) not in filelist: |
| 43 | | try: |
| 44 | | f = open(fname, 'wb') |
| 45 | | now = time.time() |
| 46 | | pickle.dump(now + timeout, f, 2) |
| 47 | | pickle.dump(value, f, 2) |
| 48 | | except (IOError, OSError): |
| 49 | | pass |
| | 54 | if self.has_key(key): |
| | 55 | return None |
| | 56 | |
| | 57 | self.set(key, value, timeout) |
| 50 | 58 | |
| 51 | 59 | def get(self, key, default=None): |
| 52 | 60 | fname = self._key_to_file(key) |
| … |
… |
class CacheClass(BaseCache):
|
| 56 | 64 | now = time.time() |
| 57 | 65 | if exp < now: |
| 58 | 66 | f.close() |
| 59 | | os.remove(fname) |
| | 67 | self._delete(fname) |
| 60 | 68 | else: |
| 61 | 69 | return pickle.load(f) |
| 62 | 70 | except (IOError, OSError, EOFError, pickle.PickleError): |
| … |
… |
class CacheClass(BaseCache):
|
| 65 | 73 | |
| 66 | 74 | def set(self, key, value, timeout=None): |
| 67 | 75 | fname = self._key_to_file(key) |
| | 76 | dir = os.path.dirname(fname) |
| | 77 | |
| 68 | 78 | if timeout is None: |
| 69 | 79 | timeout = self.default_timeout |
| | 80 | |
| | 81 | self._cull() |
| | 82 | |
| 70 | 83 | try: |
| 71 | | filelist = os.listdir(self._dir) |
| 72 | | except (IOError, OSError): |
| 73 | | self._createdir() |
| 74 | | filelist = [] |
| 75 | | if len(filelist) > self._max_entries: |
| 76 | | self._cull(filelist) |
| 77 | | try: |
| | 84 | if not os.path.exists(dir): |
| | 85 | os.makedirs(dir) |
| | 86 | |
| 78 | 87 | f = open(fname, 'wb') |
| 79 | 88 | now = time.time() |
| 80 | 89 | pickle.dump(now + timeout, f, 2) |
| 81 | 90 | pickle.dump(value, f, 2) |
| | 91 | self._num_entries.increment() |
| 82 | 92 | except (IOError, OSError): |
| 83 | 93 | pass |
| 84 | 94 | |
| 85 | 95 | def delete(self, key): |
| 86 | 96 | try: |
| 87 | | os.remove(self._key_to_file(key)) |
| | 97 | self._delete(self._key_to_file(key)) |
| 88 | 98 | except (IOError, OSError): |
| 89 | 99 | pass |
| 90 | 100 | |
| | 101 | def _delete(self, file): |
| | 102 | os.remove(file) |
| | 103 | self._num_entries.decrement() |
| | 104 | try: |
| | 105 | #remove the 2 subdirs if they're empty |
| | 106 | dir = os.path.dirname(file) |
| | 107 | os.rmdir(dir) |
| | 108 | os.rmdir(os.path.dirname(dir)) |
| | 109 | except: |
| | 110 | pass |
| | 111 | |
| 91 | 112 | def has_key(self, key): |
| 92 | | return os.path.exists(self._key_to_file(key)) |
| | 113 | fname = self._key_to_file(key) |
| | 114 | try: |
| | 115 | f = open(fname, 'rb') |
| | 116 | exp = pickle.load(f) |
| | 117 | now = time.time() |
| | 118 | if exp < now: |
| | 119 | f.close() |
| | 120 | self._delete(fname) |
| | 121 | return False |
| | 122 | else: |
| | 123 | return True |
| | 124 | except (IOError, OSError, EOFError, pickle.PickleError): |
| | 125 | return False |
| 93 | 126 | |
| 94 | | def _cull(self, filelist): |
| | 127 | def _cull(self): |
| | 128 | if int(self._num_entries) < self._max_entries: |
| | 129 | return |
| | 130 | |
| | 131 | try: |
| | 132 | filelist = os.listdir(self._dir) |
| | 133 | except (IOError, OSError): |
| | 134 | return |
| | 135 | |
| 95 | 136 | if self._cull_frequency == 0: |
| 96 | 137 | doomed = filelist |
| 97 | 138 | else: |
| 98 | | doomed = [k for (i, k) in enumerate(filelist) if i % self._cull_frequency == 0] |
| 99 | | for fname in doomed: |
| | 139 | doomed = [os.path.join(self._dir, k) for (i, k) in enumerate(filelist) if i % self._cull_frequency == 0] |
| | 140 | |
| | 141 | for topdir in doomed: |
| 100 | 142 | try: |
| 101 | | os.remove(os.path.join(self._dir, fname)) |
| | 143 | for root, _, files in os.walk(topdir): |
| | 144 | for file in files: |
| | 145 | self._delete(os.path.join(root,file)) |
| 102 | 146 | except (IOError, OSError): |
| 103 | 147 | pass |
| 104 | | |
| | 148 | |
| 105 | 149 | def _createdir(self): |
| 106 | 150 | try: |
| 107 | 151 | os.makedirs(self._dir) |
| … |
… |
class CacheClass(BaseCache):
|
| 109 | 153 | raise EnvironmentError, "Cache directory '%s' does not exist and could not be created'" % self._dir |
| 110 | 154 | |
| 111 | 155 | def _key_to_file(self, key): |
| 112 | | return os.path.join(self._dir, urlquote_plus(key)) |
| | 156 | path = md5.new(key.encode('utf-8')).hexdigest() |
| | 157 | path = os.path.join(path[:2], path[2:4], path[4:]) |
| | 158 | return os.path.join(self._dir, path) |
diff --git a/tests/regressiontests/cache/tests.py b/tests/regressiontests/cache/tests.py
index 9ac2722..c6b8742 100644
|
a
|
b
|
class Cache(unittest.TestCase):
|
| 24 | 24 | |
| 25 | 25 | def test_add(self): |
| 26 | 26 | # test add (only add if key isn't already in cache) |
| 27 | | cache.add("addkey1", "value") |
| | 27 | cache.set("addkey1", "value") |
| 28 | 28 | cache.add("addkey1", "newvalue") |
| 29 | 29 | self.assertEqual(cache.get("addkey1"), "value") |
| 30 | | |
| | 30 | |
| 31 | 31 | def test_non_existent(self): |
| 32 | 32 | # get with non-existent keys |
| 33 | 33 | self.assertEqual(cache.get("does_not_exist"), None) |
| … |
… |
class Cache(unittest.TestCase):
|
| 77 | 77 | |
| 78 | 78 | def test_expiration(self): |
| 79 | 79 | # expiration |
| 80 | | cache.set('expire', 'very quickly', 1) |
| | 80 | cache.set('expire1', 'very quickly', 1) |
| | 81 | cache.set('expire2', 'very quickly', 1) |
| | 82 | cache.set('expire3', 'very quickly', 1) |
| 81 | 83 | time.sleep(2) |
| 82 | | self.assertEqual(cache.get("expire"), None) |
| | 84 | |
| | 85 | self.assertEqual(cache.get("expire1"), None) |
| | 86 | |
| | 87 | cache.add("expire2", "newvalue") |
| | 88 | self.assertEqual(cache.get("expire2"), "newvalue") |
| | 89 | |
| | 90 | self.assertEqual(cache.has_key("expire3"), False) |
| 83 | 91 | |
| 84 | 92 | def test_unicode(self): |
| 85 | 93 | stuff = { |