Source code django/core/cache/backends/filebased.py

  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
"File-based cache backend"
import glob
import hashlib
import os
import pickle
import random
import tempfile
import time
import zlib

from django.core.cache.backends.base import DEFAULT_TIMEOUT, BaseCache
from django.core.files import locks
from django.core.files.move import file_move_safe


class FileBasedCache(BaseCache):
    cache_suffix = '.djcache'
    pickle_protocol = pickle.HIGHEST_PROTOCOL

    def __init__(self, dir, params):
        super().__init__(params)
        self._dir = os.path.abspath(dir)
        self._createdir()

    def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
        if self.has_key(key, version):
            return False
        self.set(key, value, timeout, version)
        return True

    def get(self, key, default=None, version=None):
        fname = self._key_to_file(key, version)
        try:
            with open(fname, 'rb') as f:
                if not self._is_expired(f):
                    return pickle.loads(zlib.decompress(f.read()))
        except FileNotFoundError:
            pass
        return default

    def _write_content(self, file, timeout, value):
        expiry = self.get_backend_timeout(timeout)
        file.write(pickle.dumps(expiry, self.pickle_protocol))
        file.write(zlib.compress(pickle.dumps(value, self.pickle_protocol)))

    def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
        self._createdir()  # Cache dir can be deleted at any time.
        fname = self._key_to_file(key, version)
        self._cull()  # make some room if necessary
        fd, tmp_path = tempfile.mkstemp(dir=self._dir)
        renamed = False
        try:
            with open(fd, 'wb') as f:
                self._write_content(f, timeout, value)
            file_move_safe(tmp_path, fname, allow_overwrite=True)
            renamed = True
        finally:
            if not renamed:
                os.remove(tmp_path)

    def touch(self, key, timeout=DEFAULT_TIMEOUT, version=None):
        try:
            with open(self._key_to_file(key, version), 'r+b') as f:
                try:
                    locks.lock(f, locks.LOCK_EX)
                    if self._is_expired(f):
                        return False
                    else:
                        previous_value = pickle.loads(zlib.decompress(f.read()))
                        f.seek(0)
                        self._write_content(f, timeout, previous_value)
                        return True
                finally:
                    locks.unlock(f)
        except FileNotFoundError:
            return False

    def delete(self, key, version=None):
        return self._delete(self._key_to_file(key, version))

    def _delete(self, fname):
        if not fname.startswith(self._dir) or not os.path.exists(fname):
            return False
        try:
            os.remove(fname)
        except FileNotFoundError:
            # The file may have been removed by another process.
            return False
        return True

    def has_key(self, key, version=None):
        fname = self._key_to_file(key, version)
        if os.path.exists(fname):
            with open(fname, 'rb') as f:
                return not self._is_expired(f)
        return False

    def _cull(self):
        """
        Remove random cache entries if max_entries is reached at a ratio
        of num_entries / cull_frequency. A value of 0 for CULL_FREQUENCY means
        that the entire cache will be purged.
        """
        filelist = self._list_cache_files()
        num_entries = len(filelist)
        if num_entries < self._max_entries:
            return  # return early if no culling is required
        if self._cull_frequency == 0:
            return self.clear()  # Clear the cache when CULL_FREQUENCY = 0
        # Delete a random selection of entries
        filelist = random.sample(filelist,
                                 int(num_entries / self._cull_frequency))
        for fname in filelist:
            self._delete(fname)

    def _createdir(self):
        # Set the umask because os.makedirs() doesn't apply the "mode" argument
        # to intermediate-level directories.
        old_umask = os.umask(0o077)
        try:
            os.makedirs(self._dir, 0o700, exist_ok=True)
        finally:
            os.umask(old_umask)

    def _key_to_file(self, key, version=None):
        """
        Convert a key into a cache file path. Basically this is the
        root cache path joined with the md5sum of the key and a suffix.
        """
        key = self.make_key(key, version=version)
        self.validate_key(key)
        return os.path.join(self._dir, ''.join(
            [hashlib.md5(key.encode()).hexdigest(), self.cache_suffix]))

    def clear(self):
        """
        Remove all the cache files.
        """
        for fname in self._list_cache_files():
            self._delete(fname)

    def _is_expired(self, f):
        """
        Take an open cache file `f` and delete it if it's expired.
        """
        try:
            exp = pickle.load(f)
        except EOFError:
            exp = 0  # An empty file is considered expired.
        if exp is not None and exp < time.time():
            f.close()  # On Windows a file has to be closed before deleting
            self._delete(f.name)
            return True
        return False

    def _list_cache_files(self):
        """
        Get a list of paths to all the cache files. These are all the files
        in the root cache dir that end on the cache_suffix.
        """
        return [
            os.path.join(self._dir, fname)
            for fname in glob.glob1(self._dir, '*%s' % self.cache_suffix)
        ]