Do access logging via custom request header instead of global flag
We don't want any weird things to happen due to multithreading...
This commit is contained in:
parent
60c46a3fc4
commit
0a78c92407
3 changed files with 13 additions and 21 deletions
|
|
@ -815,9 +815,13 @@ class Server(object):
|
|||
self._logger.info("Preemptively caching {} (plugin {}) for {!r}".format(route, plugin, kwargs))
|
||||
else:
|
||||
self._logger.info("Preemptively caching {} for {!r}".format(route, kwargs))
|
||||
|
||||
headers = kwargs.get("headers", dict())
|
||||
headers["X-Preemptive-Record"] = "no"
|
||||
kwargs["headers"] = headers
|
||||
|
||||
builder = EnvironBuilder(**kwargs)
|
||||
with preemptive_cache.disable_access_logging():
|
||||
app(builder.get_environ(), lambda *a, **kw: None)
|
||||
app(builder.get_environ(), lambda *a, **kw: None)
|
||||
except:
|
||||
self._logger.exception("Error while trying to preemptively cache {} for {!r}".format(route, kwargs))
|
||||
|
||||
|
|
|
|||
|
|
@ -402,9 +402,7 @@ class PreemptiveCache(object):
|
|||
self.cachefile = cachefile
|
||||
|
||||
self._lock = threading.RLock()
|
||||
self._log_lock = threading.RLock()
|
||||
self._logger = logging.getLogger(__name__ + "." + self.__class__.__name__)
|
||||
self._log_access = True
|
||||
|
||||
def record(self, data, unless=None, root=None):
|
||||
if callable(unless) and unless():
|
||||
|
|
@ -438,13 +436,6 @@ class PreemptiveCache(object):
|
|||
|
||||
return False
|
||||
|
||||
@contextlib.contextmanager
|
||||
def disable_access_logging(self):
|
||||
with self._log_lock:
|
||||
self._log_access = False
|
||||
yield
|
||||
self._log_access = True
|
||||
|
||||
def clean_all_data(self, cleanup_function):
|
||||
assert callable(cleanup_function)
|
||||
|
||||
|
|
@ -505,13 +496,6 @@ class PreemptiveCache(object):
|
|||
self.set_all_data(all_data)
|
||||
|
||||
def add_data(self, root, data):
|
||||
with self._log_lock:
|
||||
if not self._log_access:
|
||||
self._logger.debug(
|
||||
"Not updating timestamp and counter for {} and {!r}, currently flagged as disabled".format(root,
|
||||
data))
|
||||
return
|
||||
|
||||
def split_matched_and_unmatched(entry, entries):
|
||||
matched = []
|
||||
unmatched = []
|
||||
|
|
|
|||
|
|
@ -34,9 +34,13 @@ def _preemptive_unless(base_url=None):
|
|||
if base_url is None:
|
||||
base_url = request.url_root
|
||||
|
||||
return not settings().getBoolean(["devel", "cache", "preemptive"]) \
|
||||
or base_url in settings().get(["server", "preemptiveCache", "exceptions"]) \
|
||||
or not (base_url.startswith("http://") or base_url.startswith("https://"))
|
||||
cache_disabled = not settings().getBoolean(["devel", "cache", "preemptive"]) \
|
||||
or base_url in settings().get(["server", "preemptiveCache", "exceptions"]) \
|
||||
or not (base_url.startswith("http://") or base_url.startswith("https://"))
|
||||
|
||||
recording_disabled = request.headers.get("X-Preemptive-Record", "yes") == "no"
|
||||
|
||||
return cache_disabled or recording_disabled
|
||||
|
||||
def _preemptive_data(path=None, base_url=None):
|
||||
if path is None:
|
||||
|
|
|
|||
Loading…
Reference in a new issue