From 8e1ab8657b930f95918483c0b678af3295b58a88 Mon Sep 17 00:00:00 2001 From: Matthias Kestenholz Date: Mon, 9 May 2022 20:39:30 +0200 Subject: [PATCH 01/60] Explicit external link, hopefully fixes the docs linting action --- docs/checks.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/checks.rst b/docs/checks.rst index 1140d6b49..b76f761a0 100644 --- a/docs/checks.rst +++ b/docs/checks.rst @@ -2,8 +2,8 @@ System checks ============= -The following :doc:`system checks ` help verify the Django -Debug Toolbar setup and configuration: +The following :external:doc:`system checks ` help verify the +Django Debug Toolbar setup and configuration: * **debug_toolbar.W001**: ``debug_toolbar.middleware.DebugToolbarMiddleware`` is missing from ``MIDDLEWARE``. From bea1ec2dc67fa34fba4d4f2160cd9910354d26ab Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 9 May 2022 20:55:10 +0200 Subject: [PATCH 02/60] [pre-commit.ci] pre-commit autoupdate (#1617) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/asottile/pyupgrade: v2.32.0 → v2.32.1](https://github.com/asottile/pyupgrade/compare/v2.32.0...v2.32.1) - [github.com/adamchainz/django-upgrade: 1.5.0 → 1.6.1](https://github.com/adamchainz/django-upgrade/compare/1.5.0...1.6.1) - [github.com/pre-commit/mirrors-eslint: v8.14.0 → v8.15.0](https://github.com/pre-commit/mirrors-eslint/compare/v8.14.0...v8.15.0) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 172c4ddeb..dffb24f6f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -15,12 +15,12 @@ repos: hooks: - id: doc8 - repo: https://github.com/asottile/pyupgrade - rev: v2.32.0 + rev: v2.32.1 hooks: - id: pyupgrade args: [--py37-plus] - repo: https://github.com/adamchainz/django-upgrade - rev: 1.5.0 + rev: 1.6.1 hooks: - id: django-upgrade args: [--target-version, "3.2"] @@ -43,7 +43,7 @@ repos: - id: prettier types_or: [javascript, css] - repo: https://github.com/pre-commit/mirrors-eslint - rev: v8.14.0 + rev: v8.15.0 hooks: - id: eslint files: \.js?$ From 99d488455d1f855a88559680e6474e2a74391b7f Mon Sep 17 00:00:00 2001 From: Matthias Kestenholz Date: Tue, 10 May 2022 08:04:03 +0200 Subject: [PATCH 03/60] Allow easily building the docs locally using 'tox -e docs html' --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index c2f5ad8ee..aa8827cd6 100644 --- a/tox.ini +++ b/tox.ini @@ -66,7 +66,7 @@ setenv = DB_NAME = ":memory:" [testenv:docs] -commands = make -C {toxinidir}/docs spelling +commands = make -C {toxinidir}/docs {posargs:spelling} deps = Sphinx sphinxcontrib-spelling From 091cb14b3f126380e019b84c63b328d4288d820b Mon Sep 17 00:00:00 2001 From: Matthias Kestenholz Date: Wed, 18 May 2022 12:39:40 +0200 Subject: [PATCH 04/60] Add the upcoming Django 4.1 to the CI matrix (#1623) --- tox.ini | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/tox.ini b/tox.ini index aa8827cd6..f2f62a3a1 100644 --- a/tox.ini +++ b/tox.ini @@ -3,12 +3,13 @@ envlist = docs packaging py{37}-dj{32}-{sqlite,postgresql,postgis,mysql} - py{38,39,310}-dj{32,40,main}-{sqlite,postgresql,postgis,mysql} + py{38,39,310}-dj{32,40,41,main}-{sqlite,postgresql,postgis,mysql} [testenv] deps = dj32: django~=3.2.9 dj40: django~=4.0.0 + dj41: django>=4.1a1,<4.2 postgresql: psycopg2-binary postgis: psycopg2-binary mysql: mysqlclient @@ -41,25 +42,25 @@ whitelist_externals = make pip_pre = True commands = python -b -W always -m coverage run -m django test -v2 {posargs:tests} -[testenv:py{37,38,39,310}-dj{40,main}-postgresql] +[testenv:py{37,38,39,310}-dj{40,41,main}-postgresql] setenv = {[testenv]setenv} DB_BACKEND = postgresql DB_PORT = {env:DB_PORT:5432} -[testenv:py{37,38,39,310}-dj{32,40,main}-postgis] +[testenv:py{37,38,39,310}-dj{32,40,41,main}-postgis] setenv = {[testenv]setenv} DB_BACKEND = postgis DB_PORT = {env:DB_PORT:5432} -[testenv:py{37,38,39,310}-dj{32,40,main}-mysql] +[testenv:py{37,38,39,310}-dj{32,40,41,main}-mysql] setenv = {[testenv]setenv} DB_BACKEND = mysql DB_PORT = {env:DB_PORT:3306} -[testenv:py{37,38,39,310}-dj{32,40,main}-sqlite] +[testenv:py{37,38,39,310}-dj{32,40,41,main}-sqlite] setenv = {[testenv]setenv} DB_BACKEND = sqlite3 From 8c7f604e5344092749d99f4e2b2d490c4298dc7d Mon Sep 17 00:00:00 2001 From: Christian Clauss Date: Wed, 18 May 2022 14:54:26 +0200 Subject: [PATCH 05/60] Remove a couple of archived third-party repos (#1622) --- docs/panels.rst | 20 -------------------- 1 file changed, 20 deletions(-) diff --git a/docs/panels.rst b/docs/panels.rst index fc75763f7..ff7f2eb39 100644 --- a/docs/panels.rst +++ b/docs/panels.rst @@ -141,26 +141,6 @@ Third-party panels If you'd like to add a panel to this list, please submit a pull request! -Flamegraph -~~~~~~~~~~ - -URL: https://github.com/23andMe/djdt-flamegraph - -Path: ``djdt_flamegraph.FlamegraphPanel`` - -Generates a flame graph from your current request. - -Haystack -~~~~~~~~ - -URL: https://github.com/streeter/django-haystack-panel - -Path: ``haystack_panel.panel.HaystackDebugPanel`` - -See queries made by your Haystack_ backends. - -.. _Haystack: http://haystacksearch.org/ - HTML Tidy/Validator ~~~~~~~~~~~~~~~~~~~ From 4d5df404aa805057230db3cce6f2a3a61ff06684 Mon Sep 17 00:00:00 2001 From: Matthias Kestenholz Date: Wed, 18 May 2022 15:03:12 +0200 Subject: [PATCH 06/60] Amend the changelog --- docs/changes.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/changes.rst b/docs/changes.rst index bad3bc033..e4256d11d 100644 --- a/docs/changes.rst +++ b/docs/changes.rst @@ -1,6 +1,9 @@ Change log ========== +* Removed third party panels which have been archived on GitHub. +* Added Django 4.1a1 to the CI matrix. + 3.4.0 (2022-05-03) ------------------ From 1be8c45d278054d7ac8309c1dd0ba0c67c01bdb1 Mon Sep 17 00:00:00 2001 From: Daniel Harding Date: Wed, 18 May 2022 15:24:06 +0300 Subject: [PATCH 07/60] Replace OrderedDict() usage when possible Since Python 3.7, regular dicts preserve insertion order, meaning most usages of OrderedDict can be replaced with regular dicts. Add a comment for the one case that cannot in the DebugToolbar class. --- debug_toolbar/panels/cache.py | 37 ++++++++++++------------- debug_toolbar/panels/headers.py | 14 ++++------ debug_toolbar/panels/history/panel.py | 3 +- debug_toolbar/panels/settings.py | 8 +----- debug_toolbar/panels/staticfiles.py | 3 +- debug_toolbar/panels/templates/panel.py | 3 +- debug_toolbar/toolbar.py | 3 ++ 7 files changed, 29 insertions(+), 42 deletions(-) diff --git a/debug_toolbar/panels/cache.py b/debug_toolbar/panels/cache.py index 74b2f3ab6..862515d8b 100644 --- a/debug_toolbar/panels/cache.py +++ b/debug_toolbar/panels/cache.py @@ -1,7 +1,6 @@ import inspect import sys import time -from collections import OrderedDict try: from django.utils.connection import ConnectionProxy @@ -168,25 +167,23 @@ def __init__(self, *args, **kwargs): self.hits = 0 self.misses = 0 self.calls = [] - self.counts = OrderedDict( - ( - ("add", 0), - ("get", 0), - ("set", 0), - ("get_or_set", 0), - ("touch", 0), - ("delete", 0), - ("clear", 0), - ("get_many", 0), - ("set_many", 0), - ("delete_many", 0), - ("has_key", 0), - ("incr", 0), - ("decr", 0), - ("incr_version", 0), - ("decr_version", 0), - ) - ) + self.counts = { + "add": 0, + "get": 0, + "set": 0, + "get_or_set": 0, + "touch": 0, + "delete": 0, + "clear": 0, + "get_many": 0, + "set_many": 0, + "delete_many": 0, + "has_key": 0, + "incr": 0, + "decr": 0, + "incr_version": 0, + "decr_version": 0, + } cache_called.connect(self._store_call_info) def _store_call_info( diff --git a/debug_toolbar/panels/headers.py b/debug_toolbar/panels/headers.py index 280cc5df0..ed20d6178 100644 --- a/debug_toolbar/panels/headers.py +++ b/debug_toolbar/panels/headers.py @@ -1,5 +1,3 @@ -from collections import OrderedDict - from django.utils.translation import gettext_lazy as _ from debug_toolbar.panels import Panel @@ -36,21 +34,19 @@ class HeadersPanel(Panel): def process_request(self, request): wsgi_env = list(sorted(request.META.items())) - self.request_headers = OrderedDict( - (unmangle(k), v) for (k, v) in wsgi_env if is_http_header(k) - ) + self.request_headers = { + unmangle(k): v for (k, v) in wsgi_env if is_http_header(k) + } if "Cookie" in self.request_headers: self.request_headers["Cookie"] = "=> see Request panel" - self.environ = OrderedDict( - (k, v) for (k, v) in wsgi_env if k in self.ENVIRON_FILTER - ) + self.environ = {k: v for (k, v) in wsgi_env if k in self.ENVIRON_FILTER} self.record_stats( {"request_headers": self.request_headers, "environ": self.environ} ) return super().process_request(request) def generate_stats(self, request, response): - self.response_headers = OrderedDict(sorted(response.items())) + self.response_headers = dict(sorted(response.items())) self.record_stats({"response_headers": self.response_headers}) diff --git a/debug_toolbar/panels/history/panel.py b/debug_toolbar/panels/history/panel.py index 00b350b3c..596bcfb4a 100644 --- a/debug_toolbar/panels/history/panel.py +++ b/debug_toolbar/panels/history/panel.py @@ -1,5 +1,4 @@ import json -from collections import OrderedDict from django.http.request import RawPostDataException from django.template.loader import render_to_string @@ -87,7 +86,7 @@ def content(self): Fetch every store for the toolbar and include it in the template. """ - stores = OrderedDict() + stores = {} for id, toolbar in reversed(self.toolbar._store.items()): stores[id] = { "toolbar": toolbar, diff --git a/debug_toolbar/panels/settings.py b/debug_toolbar/panels/settings.py index 37bba8727..6b7715da7 100644 --- a/debug_toolbar/panels/settings.py +++ b/debug_toolbar/panels/settings.py @@ -1,5 +1,3 @@ -from collections import OrderedDict - from django.conf import settings from django.utils.translation import gettext_lazy as _ from django.views.debug import get_default_exception_reporter_filter @@ -23,9 +21,5 @@ def title(self): def generate_stats(self, request, response): self.record_stats( - { - "settings": OrderedDict( - sorted(get_safe_settings().items(), key=lambda s: s[0]) - ) - } + {"settings": dict(sorted(get_safe_settings().items(), key=lambda s: s[0]))} ) diff --git a/debug_toolbar/panels/staticfiles.py b/debug_toolbar/panels/staticfiles.py index d90b6501a..c386ee145 100644 --- a/debug_toolbar/panels/staticfiles.py +++ b/debug_toolbar/panels/staticfiles.py @@ -1,4 +1,3 @@ -from collections import OrderedDict from os.path import join, normpath from django.conf import settings @@ -137,7 +136,7 @@ def get_staticfiles_finders(self): of relative and file system paths which that finder was able to find. """ - finders_mapping = OrderedDict() + finders_mapping = {} for finder in finders.get_finders(): try: for path, finder_storage in finder.list([]): diff --git a/debug_toolbar/panels/templates/panel.py b/debug_toolbar/panels/templates/panel.py index 1c2c96e09..35d5b5191 100644 --- a/debug_toolbar/panels/templates/panel.py +++ b/debug_toolbar/panels/templates/panel.py @@ -1,4 +1,3 @@ -from collections import OrderedDict from contextlib import contextmanager from os.path import normpath from pprint import pformat, saferepr @@ -39,7 +38,7 @@ def _request_context_bind_template(self, template): self.template = template # Set context processors according to the template engine's settings. processors = template.engine.template_context_processors + self._processors - self.context_processors = OrderedDict() + self.context_processors = {} updates = {} for processor in processors: name = f"{processor.__module__}.{processor.__name__}" diff --git a/debug_toolbar/toolbar.py b/debug_toolbar/toolbar.py index 79e5ac1c7..a7af36013 100644 --- a/debug_toolbar/toolbar.py +++ b/debug_toolbar/toolbar.py @@ -28,6 +28,9 @@ def __init__(self, request, get_response): if panel.enabled: get_response = panel.process_request self.process_request = get_response + # Use OrderedDict for the _panels attribute so that items can be efficiently + # removed using FIFO order in the DebugToolbar.store() method. The .popitem() + # method of Python's built-in dict only supports LIFO removal. self._panels = OrderedDict() while panels: panel = panels.pop() From 145c828b811422c04cb41f0b3a5a02e16c4f0b13 Mon Sep 17 00:00:00 2001 From: Daniel Harding Date: Wed, 18 May 2022 15:47:49 +0300 Subject: [PATCH 08/60] Remove unnecessary sorting in SignedDataForm The signature does not depend on a canonical representation of the data, so don't bother sorting it. Tweak the data for the tests as a consequence. However, note that the tests are still deterministic since as of version 3.7, Python guarantees dictionary order. --- debug_toolbar/forms.py | 3 +-- tests/test_forms.py | 2 +- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/debug_toolbar/forms.py b/debug_toolbar/forms.py index 2b4e048b4..3c7a45a07 100644 --- a/debug_toolbar/forms.py +++ b/debug_toolbar/forms.py @@ -47,7 +47,6 @@ def verified_data(self): @classmethod def sign(cls, data): - items = sorted(data.items(), key=lambda item: item[0]) return signing.Signer(salt=cls.salt).sign( - json.dumps({key: force_str(value) for key, value in items}) + json.dumps({key: force_str(value) for key, value in data.items()}) ) diff --git a/tests/test_forms.py b/tests/test_forms.py index da144e108..a619ae89d 100644 --- a/tests/test_forms.py +++ b/tests/test_forms.py @@ -7,7 +7,7 @@ SIGNATURE = "-WiogJKyy4E8Om00CrFSy0T6XHObwBa6Zb46u-vmeYE" -DATA = {"value": "foo", "date": datetime(2020, 1, 1, tzinfo=timezone.utc)} +DATA = {"date": datetime(2020, 1, 1, tzinfo=timezone.utc), "value": "foo"} SIGNED_DATA = f'{{"date": "2020-01-01 00:00:00+00:00", "value": "foo"}}:{SIGNATURE}' From f071cff70f8509fc5a8df30b117ad09db8b88f74 Mon Sep 17 00:00:00 2001 From: Daniel Harding Date: Wed, 18 May 2022 15:38:11 +0300 Subject: [PATCH 09/60] Drop unneeded key arguments for sorting dict items Passing a key argument of the form `key=lambda item: item[0]` to sorted() when sorting dict items is unneeded, because tuples already compare element-wise, and the first elements are known to be unique since they are dictionary keys. --- debug_toolbar/panels/settings.py | 4 +--- debug_toolbar/panels/signals.py | 2 +- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/debug_toolbar/panels/settings.py b/debug_toolbar/panels/settings.py index 6b7715da7..7b27c6243 100644 --- a/debug_toolbar/panels/settings.py +++ b/debug_toolbar/panels/settings.py @@ -20,6 +20,4 @@ def title(self): return _("Settings from %s") % settings.SETTINGS_MODULE def generate_stats(self, request, response): - self.record_stats( - {"settings": dict(sorted(get_safe_settings().items(), key=lambda s: s[0]))} - ) + self.record_stats({"settings": dict(sorted(get_safe_settings().items()))}) diff --git a/debug_toolbar/panels/signals.py b/debug_toolbar/panels/signals.py index 41f669f2c..574948d6e 100644 --- a/debug_toolbar/panels/signals.py +++ b/debug_toolbar/panels/signals.py @@ -76,7 +76,7 @@ def signals(self): def generate_stats(self, request, response): signals = [] - for name, signal in sorted(self.signals.items(), key=lambda x: x[0]): + for name, signal in sorted(self.signals.items()): receivers = [] for receiver in signal.receivers: receiver = receiver[1] From d61b4d45f6d3071d11297bf893723c4f5f4b7529 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 16 May 2022 18:25:39 +0000 Subject: [PATCH 10/60] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/adamchainz/django-upgrade: 1.6.1 → 1.7.0](https://github.com/adamchainz/django-upgrade/compare/1.6.1...1.7.0) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index dffb24f6f..71967ab0e 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -20,7 +20,7 @@ repos: - id: pyupgrade args: [--py37-plus] - repo: https://github.com/adamchainz/django-upgrade - rev: 1.6.1 + rev: 1.7.0 hooks: - id: django-upgrade args: [--target-version, "3.2"] From c5f691f2e1c376cf2220ff2a30267e22d084bea6 Mon Sep 17 00:00:00 2001 From: Matthias Kestenholz Date: Wed, 18 May 2022 12:19:56 +0200 Subject: [PATCH 11/60] Fix #1621: Do not crash when encountering unexpected data in the request --- debug_toolbar/panels/request.py | 10 ++++++---- debug_toolbar/static/debug_toolbar/css/toolbar.css | 7 +++++++ .../templates/debug_toolbar/panels/request.html | 8 ++++---- .../debug_toolbar/panels/request_variables.html | 6 +++++- debug_toolbar/utils.py | 14 +++++++++----- docs/changes.rst | 4 ++++ tests/panels/test_request.py | 13 +++++++++++++ 7 files changed, 48 insertions(+), 14 deletions(-) diff --git a/debug_toolbar/panels/request.py b/debug_toolbar/panels/request.py index 5255624b2..966301d97 100644 --- a/debug_toolbar/panels/request.py +++ b/debug_toolbar/panels/request.py @@ -61,9 +61,11 @@ def generate_stats(self, request, response): if hasattr(request, "session"): self.record_stats( { - "session": [ - (k, request.session.get(k)) - for k in sorted(request.session.keys()) - ] + "session": { + "list": [ + (k, request.session.get(k)) + for k in sorted(request.session.keys()) + ] + } } ) diff --git a/debug_toolbar/static/debug_toolbar/css/toolbar.css b/debug_toolbar/static/debug_toolbar/css/toolbar.css index 2d36049f1..a105bfd11 100644 --- a/debug_toolbar/static/debug_toolbar/css/toolbar.css +++ b/debug_toolbar/static/debug_toolbar/css/toolbar.css @@ -591,6 +591,13 @@ #djDebug .djdt-stack pre.djdt-locals { margin: 0 27px 27px 27px; } +#djDebug .djdt-raw { + background-color: #fff; + border: 1px solid #ccc; + margin-top: 0.8em; + padding: 5px; + white-space: pre-wrap; +} #djDebug .djdt-width-20 { width: 20%; diff --git a/debug_toolbar/templates/debug_toolbar/panels/request.html b/debug_toolbar/templates/debug_toolbar/panels/request.html index 3f9b068be..076d5f74f 100644 --- a/debug_toolbar/templates/debug_toolbar/panels/request.html +++ b/debug_toolbar/templates/debug_toolbar/panels/request.html @@ -20,28 +20,28 @@

{% trans "View information" %}

-{% if cookies %} +{% if cookies.list or cookies.raw %}

{% trans "Cookies" %}

{% include 'debug_toolbar/panels/request_variables.html' with variables=cookies %} {% else %}

{% trans "No cookies" %}

{% endif %} -{% if session %} +{% if session.list or session.raw %}

{% trans "Session data" %}

{% include 'debug_toolbar/panels/request_variables.html' with variables=session %} {% else %}

{% trans "No session data" %}

{% endif %} -{% if get %} +{% if get.list or get.raw %}

{% trans "GET data" %}

{% include 'debug_toolbar/panels/request_variables.html' with variables=get %} {% else %}

{% trans "No GET data" %}

{% endif %} -{% if post %} +{% if post.list or post.raw %}

{% trans "POST data" %}

{% include 'debug_toolbar/panels/request_variables.html' with variables=post %} {% else %} diff --git a/debug_toolbar/templates/debug_toolbar/panels/request_variables.html b/debug_toolbar/templates/debug_toolbar/panels/request_variables.html index 7e9118c7d..92200f867 100644 --- a/debug_toolbar/templates/debug_toolbar/panels/request_variables.html +++ b/debug_toolbar/templates/debug_toolbar/panels/request_variables.html @@ -1,5 +1,6 @@ {% load i18n %} +{% if variables.list %} @@ -12,7 +13,7 @@ - {% for key, value in variables %} + {% for key, value in variables.list %} @@ -20,3 +21,6 @@ {% endfor %}
{{ key|pprint }} {{ value|pprint }}
+{% elif variables.raw %} +{{ variables.raw|pprint }} +{% endif %} diff --git a/debug_toolbar/utils.py b/debug_toolbar/utils.py index 6b80c5af0..cc90cf4db 100644 --- a/debug_toolbar/utils.py +++ b/debug_toolbar/utils.py @@ -231,12 +231,16 @@ def getframeinfo(frame, context=1): def get_sorted_request_variable(variable): """ - Get a sorted list of variables from the request data. + Get a data structure for showing a sorted list of variables from the + request data. """ - if isinstance(variable, dict): - return [(k, variable.get(k)) for k in sorted(variable)] - else: - return [(k, variable.getlist(k)) for k in sorted(variable)] + try: + if isinstance(variable, dict): + return {"list": [(k, variable.get(k)) for k in sorted(variable)]} + else: + return {"list": [(k, variable.getlist(k)) for k in sorted(variable)]} + except TypeError: + return {"raw": variable} def get_stack(context=1): diff --git a/docs/changes.rst b/docs/changes.rst index e4256d11d..83e8fe9aa 100644 --- a/docs/changes.rst +++ b/docs/changes.rst @@ -3,6 +3,10 @@ Change log * Removed third party panels which have been archived on GitHub. * Added Django 4.1a1 to the CI matrix. +* Stopped crashing when ``request.GET`` and ``request.POST`` are neither + dictionaries nor ``QueryDict`` instances. Using anything but ``QueryDict`` + instances isn't a valid use of Django but, again, django-debug-toolbar + shouldn't crash. 3.4.0 (2022-05-03) ------------------ diff --git a/tests/panels/test_request.py b/tests/panels/test_request.py index 1d2a33c56..8087203c3 100644 --- a/tests/panels/test_request.py +++ b/tests/panels/test_request.py @@ -85,6 +85,19 @@ def test_dict_for_request_in_method_post(self): self.assertIn("foo", content) self.assertIn("bar", content) + def test_list_for_request_in_method_post(self): + """ + Verify that the toolbar doesn't crash if request.POST contains unexpected data. + + See https://github.com/jazzband/django-debug-toolbar/issues/1621 + """ + self.request.POST = [{"a": 1}, {"b": 2}] + response = self.panel.process_request(self.request) + self.panel.generate_stats(self.request, response) + # ensure the panel POST request data is processed correctly. + content = self.panel.content + self.assertIn("[{'a': 1}, {'b': 2}]", content) + def test_namespaced_url(self): self.request.path = "/admin/login/" response = self.panel.process_request(self.request) From 84c624d1249011c78b824d44cc8435fe62d8831c Mon Sep 17 00:00:00 2001 From: Daniel Harding Date: Sun, 22 May 2022 14:37:19 +0300 Subject: [PATCH 12/60] Add dependency on asgiref Will be used for asgiref.local.Local, a context-aware threadlocal.local replacement also used internally by Django. Depend on the same version required by the earliest supported version of Django (3.2). --- setup.cfg | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.cfg b/setup.cfg index b984e23cc..f8850621b 100644 --- a/setup.cfg +++ b/setup.cfg @@ -31,6 +31,7 @@ classifiers = [options] python_requires = >=3.7 install_requires = + asgiref >= 3.3.2, < 4 Django >= 3.2 sqlparse >= 0.2.0 packages = find: From 7366d2b31d8ef1223eaef9eef26f28ae011fedf8 Mon Sep 17 00:00:00 2001 From: Daniel Harding Date: Sun, 22 May 2022 16:47:11 +0300 Subject: [PATCH 13/60] Allow integration tests to access the toolbar Add a mechanism for integration tests to access the toolbar used for a particular request: Update DebugToolbar to emit a signal on creation referencing itself. Then create and use a custom Django test client that connects to this signal to capture the toolbar that was created while the request was being processed, and to store the toolbar on the response for use by tests. --- debug_toolbar/toolbar.py | 5 +++++ tests/base.py | 26 +++++++++++++++++++++++++- 2 files changed, 30 insertions(+), 1 deletion(-) diff --git a/debug_toolbar/toolbar.py b/debug_toolbar/toolbar.py index a7af36013..419c67418 100644 --- a/debug_toolbar/toolbar.py +++ b/debug_toolbar/toolbar.py @@ -8,6 +8,7 @@ from django.apps import apps from django.core.exceptions import ImproperlyConfigured +from django.dispatch import Signal from django.template import TemplateSyntaxError from django.template.loader import render_to_string from django.urls import path, resolve @@ -18,6 +19,9 @@ class DebugToolbar: + # for internal testing use only + _created = Signal() + def __init__(self, request, get_response): self.request = request self.config = dt_settings.get_config().copy() @@ -38,6 +42,7 @@ def __init__(self, request, get_response): self.stats = {} self.server_timing_stats = {} self.store_id = None + self._created.send(request, toolbar=self) # Manage panels diff --git a/tests/base.py b/tests/base.py index 597a74f29..ccd9f053c 100644 --- a/tests/base.py +++ b/tests/base.py @@ -1,13 +1,37 @@ import html5lib +from asgiref.local import Local from django.http import HttpResponse -from django.test import RequestFactory, TestCase +from django.test import Client, RequestFactory, TestCase from debug_toolbar.toolbar import DebugToolbar + +class ToolbarTestClient(Client): + def request(self, **request): + # Use a thread/async task context-local variable to guard against a + # concurrent _created signal from a different thread/task. + data = Local() + data.toolbar = None + + def handle_toolbar_created(sender, toolbar=None, **kwargs): + data.toolbar = toolbar + + DebugToolbar._created.connect(handle_toolbar_created) + try: + response = super().request(**request) + finally: + DebugToolbar._created.disconnect(handle_toolbar_created) + response.toolbar = data.toolbar + + return response + + rf = RequestFactory() class BaseTestCase(TestCase): + client_class = ToolbarTestClient + panel_id = None def setUp(self): From 3803724c921a314a56d05d0dafadaf03abb34de5 Mon Sep 17 00:00:00 2001 From: Daniel Harding Date: Sun, 22 May 2022 17:00:01 +0300 Subject: [PATCH 14/60] Use request toolbar for cache integration tests For integration tests using the cache panel, check the panel associated with the toolbar created for the request, rather than the toolbar created for the test case. --- tests/test_integration.py | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/tests/test_integration.py b/tests/test_integration.py index 702fa8141..016b52217 100644 --- a/tests/test_integration.py +++ b/tests/test_integration.py @@ -109,10 +109,10 @@ def test_cache_page(self): # Clear the cache before testing the views. Other tests that use cached_view # may run earlier and cause fewer cache calls. cache.clear() - self.client.get("/cached_view/") - self.assertEqual(len(self.toolbar.get_panel_by_id("CachePanel").calls), 3) - self.client.get("/cached_view/") - self.assertEqual(len(self.toolbar.get_panel_by_id("CachePanel").calls), 5) + response = self.client.get("/cached_view/") + self.assertEqual(len(response.toolbar.get_panel_by_id("CachePanel").calls), 3) + response = self.client.get("/cached_view/") + self.assertEqual(len(response.toolbar.get_panel_by_id("CachePanel").calls), 2) @override_settings(ROOT_URLCONF="tests.urls_use_package_urls") def test_include_package_urls(self): @@ -120,17 +120,17 @@ def test_include_package_urls(self): # Clear the cache before testing the views. Other tests that use cached_view # may run earlier and cause fewer cache calls. cache.clear() - self.client.get("/cached_view/") - self.assertEqual(len(self.toolbar.get_panel_by_id("CachePanel").calls), 3) - self.client.get("/cached_view/") - self.assertEqual(len(self.toolbar.get_panel_by_id("CachePanel").calls), 5) + response = self.client.get("/cached_view/") + self.assertEqual(len(response.toolbar.get_panel_by_id("CachePanel").calls), 3) + response = self.client.get("/cached_view/") + self.assertEqual(len(response.toolbar.get_panel_by_id("CachePanel").calls), 2) def test_low_level_cache_view(self): """Test cases when low level caching API is used within a request.""" - self.client.get("/cached_low_level_view/") - self.assertEqual(len(self.toolbar.get_panel_by_id("CachePanel").calls), 2) - self.client.get("/cached_low_level_view/") - self.assertEqual(len(self.toolbar.get_panel_by_id("CachePanel").calls), 3) + response = self.client.get("/cached_low_level_view/") + self.assertEqual(len(response.toolbar.get_panel_by_id("CachePanel").calls), 2) + response = self.client.get("/cached_low_level_view/") + self.assertEqual(len(response.toolbar.get_panel_by_id("CachePanel").calls), 1) def test_cache_disable_instrumentation(self): """ @@ -139,10 +139,10 @@ def test_cache_disable_instrumentation(self): """ self.assertIsNone(cache.set("UseCacheAfterToolbar.before", None)) self.assertIsNone(cache.set("UseCacheAfterToolbar.after", None)) - self.client.get("/execute_sql/") + response = self.client.get("/execute_sql/") self.assertEqual(cache.get("UseCacheAfterToolbar.before"), 1) self.assertEqual(cache.get("UseCacheAfterToolbar.after"), 1) - self.assertEqual(len(self.toolbar.get_panel_by_id("CachePanel").calls), 0) + self.assertEqual(len(response.toolbar.get_panel_by_id("CachePanel").calls), 0) def test_is_toolbar_request(self): self.request.path = "/__debug__/render_panel/" From a8513e5b4160503cee7e01f3ec18c3dc40cc5a21 Mon Sep 17 00:00:00 2001 From: Daniel Harding Date: Sun, 22 May 2022 14:33:47 +0300 Subject: [PATCH 15/60] Add a .ready() method to the panel API This method will be called for all installed panels from the DebugToolbarConfig.ready() method during Django initialization to support initialization that needs to happen unconditionally for the panel regardless of whether it is enabled for a particular request. --- debug_toolbar/apps.py | 7 ++++--- debug_toolbar/panels/__init__.py | 13 +++++++++++++ docs/panels.rst | 2 ++ 3 files changed, 19 insertions(+), 3 deletions(-) diff --git a/debug_toolbar/apps.py b/debug_toolbar/apps.py index 2848e72d5..c55b75392 100644 --- a/debug_toolbar/apps.py +++ b/debug_toolbar/apps.py @@ -17,9 +17,10 @@ class DebugToolbarConfig(AppConfig): def ready(self): from debug_toolbar.toolbar import DebugToolbar - # Import the panels when the app is ready. This allows panels - # like CachePanel to enable the instrumentation immediately. - DebugToolbar.get_panel_classes() + # Import the panels when the app is ready and call their ready() methods. This + # allows panels like CachePanel to enable their instrumentation immediately. + for cls in DebugToolbar.get_panel_classes(): + cls.ready() def check_template_config(config): diff --git a/debug_toolbar/panels/__init__.py b/debug_toolbar/panels/__init__.py index 168166bc6..ea8ff8e9c 100644 --- a/debug_toolbar/panels/__init__.py +++ b/debug_toolbar/panels/__init__.py @@ -114,6 +114,19 @@ def scripts(self): """ return [] + # Panel early initialization + + @classmethod + def ready(cls): + """ + Perform early initialization for the panel. + + This should only include initialization or instrumentation that needs to + be done unconditionally for the panel regardless of whether it is + enabled for a particular request. It should be idempotent. + """ + pass + # URLs for panel-specific views @classmethod diff --git a/docs/panels.rst b/docs/panels.rst index ff7f2eb39..4eba8eba7 100644 --- a/docs/panels.rst +++ b/docs/panels.rst @@ -352,6 +352,8 @@ unauthorized access. There is no public CSS API at this time. .. autoattribute:: debug_toolbar.panels.Panel.scripts + .. automethod:: debug_toolbar.panels.Panel.ready + .. automethod:: debug_toolbar.panels.Panel.get_urls .. automethod:: debug_toolbar.panels.Panel.enable_instrumentation From 4b77ec74f2d326013d715453d7a2219e574c3f6a Mon Sep 17 00:00:00 2001 From: Daniel Harding Date: Sun, 22 May 2022 14:58:27 +0300 Subject: [PATCH 16/60] Re-architect cache call recording Using signals for monitoring cache calls gives incorrect results in the face of concurrency. If two requests are being processed concurrently in different threads, they will store each other's cache calls because both panels will be subscribed to the same signal. Additionally, rework the enable_instrumentation() mechanism to monkey patch methods on the cache instances directly instead of replacing the cache instances with wrapper classes. This should eliminate the corner cases mentioned in the (now-removed) disable_instrumentation() comments. --- debug_toolbar/panels/cache.py | 328 +++++++++++++++------------------- 1 file changed, 142 insertions(+), 186 deletions(-) diff --git a/debug_toolbar/panels/cache.py b/debug_toolbar/panels/cache.py index 862515d8b..7877ef0ab 100644 --- a/debug_toolbar/panels/cache.py +++ b/debug_toolbar/panels/cache.py @@ -1,18 +1,9 @@ -import inspect -import sys +import functools import time -try: - from django.utils.connection import ConnectionProxy -except ImportError: - ConnectionProxy = None - +from asgiref.local import Local from django.conf import settings -from django.core import cache -from django.core.cache import DEFAULT_CACHE_ALIAS, CacheHandler -from django.core.cache.backends.base import BaseCache -from django.dispatch import Signal -from django.middleware import cache as middleware_cache +from django.core.cache import CacheHandler, caches from django.utils.translation import gettext_lazy as _, ngettext from debug_toolbar import settings as dt_settings @@ -24,134 +15,25 @@ tidy_stacktrace, ) -cache_called = Signal() - - -def send_signal(method): - def wrapped(self, *args, **kwargs): - t = time.time() - value = method(self, *args, **kwargs) - t = time.time() - t - - if dt_settings.get_config()["ENABLE_STACKTRACES"]: - stacktrace = tidy_stacktrace(reversed(get_stack())) - else: - stacktrace = [] - - template_info = get_template_info() - cache_called.send( - sender=self.__class__, - time_taken=t, - name=method.__name__, - return_value=value, - args=args, - kwargs=kwargs, - trace=stacktrace, - template_info=template_info, - backend=self.cache, - ) - return value - - return wrapped - - -class CacheStatTracker(BaseCache): - """A small class used to track cache calls.""" - - def __init__(self, cache): - self.cache = cache - - def __repr__(self): - return "" % repr(self.cache) - - def _get_func_info(self): - frame = sys._getframe(3) - info = inspect.getframeinfo(frame) - return (info[0], info[1], info[2], info[3]) - - def __contains__(self, key): - return self.cache.__contains__(key) - - def __getattr__(self, name): - return getattr(self.cache, name) - - @send_signal - def add(self, *args, **kwargs): - return self.cache.add(*args, **kwargs) - - @send_signal - def get(self, *args, **kwargs): - return self.cache.get(*args, **kwargs) - - @send_signal - def set(self, *args, **kwargs): - return self.cache.set(*args, **kwargs) - - @send_signal - def get_or_set(self, *args, **kwargs): - return self.cache.get_or_set(*args, **kwargs) - - @send_signal - def touch(self, *args, **kwargs): - return self.cache.touch(*args, **kwargs) - - @send_signal - def delete(self, *args, **kwargs): - return self.cache.delete(*args, **kwargs) - - @send_signal - def clear(self, *args, **kwargs): - return self.cache.clear(*args, **kwargs) - - @send_signal - def has_key(self, *args, **kwargs): - # Ignore flake8 rules for has_key since we need to support caches - # that may be using has_key. - return self.cache.has_key(*args, **kwargs) # noqa: W601 - - @send_signal - def incr(self, *args, **kwargs): - return self.cache.incr(*args, **kwargs) - - @send_signal - def decr(self, *args, **kwargs): - return self.cache.decr(*args, **kwargs) - - @send_signal - def get_many(self, *args, **kwargs): - return self.cache.get_many(*args, **kwargs) - - @send_signal - def set_many(self, *args, **kwargs): - self.cache.set_many(*args, **kwargs) - - @send_signal - def delete_many(self, *args, **kwargs): - self.cache.delete_many(*args, **kwargs) - - @send_signal - def incr_version(self, *args, **kwargs): - return self.cache.incr_version(*args, **kwargs) - - @send_signal - def decr_version(self, *args, **kwargs): - return self.cache.decr_version(*args, **kwargs) - - -class CacheHandlerPatch(CacheHandler): - def __init__(self, settings=None): - self._djdt_wrap = True - super().__init__(settings=settings) - - def create_connection(self, alias): - actual_cache = super().create_connection(alias) - if self._djdt_wrap: - return CacheStatTracker(actual_cache) - else: - return actual_cache - - -middleware_cache.caches = CacheHandlerPatch() +# The order of the methods in this list determines the order in which they are listed in +# the Commands table in the panel content. +WRAPPED_CACHE_METHODS = [ + "add", + "get", + "set", + "get_or_set", + "touch", + "delete", + "clear", + "get_many", + "set_many", + "delete_many", + "has_key", + "incr", + "decr", + "incr_version", + "decr_version", +] class CachePanel(Panel): @@ -161,43 +43,57 @@ class CachePanel(Panel): template = "debug_toolbar/panels/cache.html" + _context_locals = Local() + def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.total_time = 0 self.hits = 0 self.misses = 0 self.calls = [] - self.counts = { - "add": 0, - "get": 0, - "set": 0, - "get_or_set": 0, - "touch": 0, - "delete": 0, - "clear": 0, - "get_many": 0, - "set_many": 0, - "delete_many": 0, - "has_key": 0, - "incr": 0, - "decr": 0, - "incr_version": 0, - "decr_version": 0, - } - cache_called.connect(self._store_call_info) + self.counts = {name: 0 for name in WRAPPED_CACHE_METHODS} + + @classmethod + def current_instance(cls): + """ + Return the currently enabled CachePanel instance or None. + + If a request is in process with a CachePanel enabled, this will return that + panel (based on the current thread or async task). Otherwise it will return + None. + """ + return getattr(cls._context_locals, "current_instance", None) + + @classmethod + def ready(cls): + if not hasattr(CacheHandler, "_djdt_patched"): + # Wrap the CacheHander.create_connection() method to monkey patch any new + # cache connections that are opened while instrumentation is enabled. In + # the interests of thread safety, this is done once at startup time and + # never removed. + original_method = CacheHandler.create_connection + + @functools.wraps(original_method) + def wrapper(self, alias): + cache = original_method(self, alias) + panel = cls.current_instance() + if panel is not None: + panel._monkey_patch_cache(cache) + return cache + + CacheHandler.create_connection = wrapper + CacheHandler._djdt_patched = True def _store_call_info( self, - sender, - name=None, - time_taken=0, - return_value=None, - args=None, - kwargs=None, - trace=None, - template_info=None, - backend=None, - **kw, + name, + time_taken, + return_value, + args, + kwargs, + trace, + template_info, + backend, ): if name == "get" or name == "get_or_set": if return_value is None: @@ -226,6 +122,69 @@ def _store_call_info( } ) + def _record_call(self, cache, name, original_method, args, kwargs): + # Some cache backends implement certain cache methods in terms of other cache + # methods (e.g. get_or_set() in terms of get() and add()). In order to only + # record the calls made directly by the user code, set the _djdt_recording flag + # here to cause the monkey patched cache methods to skip recording additional + # calls made during the course of this call. + cache._djdt_recording = True + t = time.time() + value = original_method(*args, **kwargs) + t = time.time() - t + cache._djdt_recording = False + + if dt_settings.get_config()["ENABLE_STACKTRACES"]: + stacktrace = tidy_stacktrace(reversed(get_stack())) + else: + stacktrace = [] + + self._store_call_info( + name=name, + time_taken=t, + return_value=value, + args=args, + kwargs=kwargs, + trace=stacktrace, + template_info=get_template_info(), + backend=cache, + ) + return value + + def _monkey_patch_method(self, cache, name): + original_method = getattr(cache, name) + + @functools.wraps(original_method) + def wrapper(*args, **kwargs): + # If this call is being made as part of the implementation of another cache + # method, don't record it. + if cache._djdt_recording: + return original_method(*args, **kwargs) + else: + return self._record_call(cache, name, original_method, args, kwargs) + + wrapper._djdt_wrapped = original_method + setattr(cache, name, wrapper) + + def _monkey_patch_cache(self, cache): + if not hasattr(cache, "_djdt_patched"): + for name in WRAPPED_CACHE_METHODS: + self._monkey_patch_method(cache, name) + cache._djdt_patched = True + cache._djdt_recording = False + + @staticmethod + def _unmonkey_patch_cache(cache): + if hasattr(cache, "_djdt_patched"): + for name in WRAPPED_CACHE_METHODS: + original_method = getattr(cache, name)._djdt_wrapped + if original_method.__func__ == getattr(cache.__class__, name): + delattr(cache, name) + else: + setattr(cache, name, original_method) + del cache._djdt_patched + del cache._djdt_recording + # Implement the Panel API nav_title = _("Cache") @@ -249,26 +208,23 @@ def title(self): ) % {"count": count} def enable_instrumentation(self): - for alias in cache.caches: - if not isinstance(cache.caches[alias], CacheStatTracker): - cache.caches[alias] = CacheStatTracker(cache.caches[alias]) - - if not isinstance(middleware_cache.caches, CacheHandlerPatch): - middleware_cache.caches = cache.caches - - # Wrap the patched cache inside Django's ConnectionProxy - if ConnectionProxy: - cache.cache = ConnectionProxy(cache.caches, DEFAULT_CACHE_ALIAS) + # Monkey patch all open cache connections. Django maintains cache connections + # on a per-thread/async task basis, so this will not affect any concurrent + # requests. The monkey patch of CacheHander.create_connection() installed in + # the .ready() method will ensure that any new cache connections that get opened + # during this request will also be monkey patched. + for cache in caches.all(initialized_only=True): + self._monkey_patch_cache(cache) + # Mark this panel instance as the current one for the active thread/async task + # context. This will be used by the CacheHander.create_connection() monkey + # patch. + self._context_locals.current_instance = self def disable_instrumentation(self): - for alias in cache.caches: - if isinstance(cache.caches[alias], CacheStatTracker): - cache.caches[alias] = cache.caches[alias].cache - if ConnectionProxy: - cache.cache = ConnectionProxy(cache.caches, DEFAULT_CACHE_ALIAS) - # While it can be restored to the original, any views that were - # wrapped with the cache_page decorator will continue to use a - # monkey patched cache. + if hasattr(self._context_locals, "current_instance"): + del self._context_locals.current_instance + for cache in caches.all(initialized_only=True): + self._unmonkey_patch_cache(cache) def generate_stats(self, request, response): self.record_stats( From 4f1e55370475bd115c2c8262feb0641977f3766a Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 23 May 2022 17:40:47 +0000 Subject: [PATCH 17/60] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pycqa/doc8: 0.11.1 → 0.11.2](https://github.com/pycqa/doc8/compare/0.11.1...0.11.2) - [github.com/pre-commit/mirrors-eslint: v8.15.0 → v8.16.0](https://github.com/pre-commit/mirrors-eslint/compare/v8.15.0...v8.16.0) --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 71967ab0e..1fcfa5a1d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -11,7 +11,7 @@ repos: hooks: - id: flake8 - repo: https://github.com/pycqa/doc8 - rev: 0.11.1 + rev: 0.11.2 hooks: - id: doc8 - repo: https://github.com/asottile/pyupgrade @@ -43,7 +43,7 @@ repos: - id: prettier types_or: [javascript, css] - repo: https://github.com/pre-commit/mirrors-eslint - rev: v8.15.0 + rev: v8.16.0 hooks: - id: eslint files: \.js?$ From 514a980b42b9a5a2f487641f0d8914b5cbd397d8 Mon Sep 17 00:00:00 2001 From: Matthias Kestenholz Date: Mon, 23 May 2022 20:11:53 +0200 Subject: [PATCH 18/60] Drop the explicit asgiref dependency Depending on asgiref via Django is fine. Refs #1626. --- setup.cfg | 1 - 1 file changed, 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index f8850621b..b984e23cc 100644 --- a/setup.cfg +++ b/setup.cfg @@ -31,7 +31,6 @@ classifiers = [options] python_requires = >=3.7 install_requires = - asgiref >= 3.3.2, < 4 Django >= 3.2 sqlparse >= 0.2.0 packages = find: From c5ee3f3b69027a48a498ebac0e2e1fc94c132fa5 Mon Sep 17 00:00:00 2001 From: Daniel Harding Date: Thu, 28 Apr 2022 15:29:50 +0300 Subject: [PATCH 19/60] Fix tox testenv generative name dj32 was inadvertently removed from the -postgresql testenv section. --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index f2f62a3a1..212e31f39 100644 --- a/tox.ini +++ b/tox.ini @@ -42,7 +42,7 @@ whitelist_externals = make pip_pre = True commands = python -b -W always -m coverage run -m django test -v2 {posargs:tests} -[testenv:py{37,38,39,310}-dj{40,41,main}-postgresql] +[testenv:py{37,38,39,310}-dj{32,40,41,main}-postgresql] setenv = {[testenv]setenv} DB_BACKEND = postgresql From b3e43a06b1a4bf1eb1c81de0a29b2fb6fa7a98d6 Mon Sep 17 00:00:00 2001 From: Daniel Harding Date: Thu, 28 Apr 2022 14:39:47 +0300 Subject: [PATCH 20/60] Clean up additional connection instrumentation attribute The _djdt_chunked_cursor attribute wasn't being cleaned up in the unwrap_cursor() method. --- debug_toolbar/panels/sql/tracking.py | 1 + 1 file changed, 1 insertion(+) diff --git a/debug_toolbar/panels/sql/tracking.py b/debug_toolbar/panels/sql/tracking.py index 93304b21f..a67727712 100644 --- a/debug_toolbar/panels/sql/tracking.py +++ b/debug_toolbar/panels/sql/tracking.py @@ -61,6 +61,7 @@ def chunked_cursor(*args, **kwargs): def unwrap_cursor(connection): if hasattr(connection, "_djdt_cursor"): del connection._djdt_cursor + del connection._djdt_chunked_cursor del connection.cursor del connection.chunked_cursor From 6ec4b0cd23b097ec45cf55da76f59877fefaa2c6 Mon Sep 17 00:00:00 2001 From: Daniel Harding Date: Thu, 28 Apr 2022 14:48:11 +0300 Subject: [PATCH 21/60] More accurate cursor unwrapping If the cursor()/chunked_cursor() methods of a connection had already been monkey patched by some other code before wrap_cursor() was called, unwrap_cursor() would undo the previous monkey patch as well as the one performed by wrap_cursor(). This can occur when testing if multiple databases are defined but only some of them are allowed to be used by the tests. [1] Django's SimpleTestCase wraps the connections for any disallowed databases to raise an exception if they are accessed. [2] Without this commit, unwrap_cursor() was undoing Django's monkey patch, resulting in an exception when Django tried to undo its monkey patch which was no longer there. Update unwrap_cursor() to preserve a previous monkey patch if present. [1] https://docs.djangoproject.com/en/stable/topics/testing/tools/#django.test.SimpleTestCase.databases [2] https://github.com/django/django/blob/ce586ed6931092d3a5f06df9031cdeb891793ddb/django/test/testcases.py#L350 --- debug_toolbar/panels/sql/tracking.py | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/debug_toolbar/panels/sql/tracking.py b/debug_toolbar/panels/sql/tracking.py index a67727712..f3c33e49f 100644 --- a/debug_toolbar/panels/sql/tracking.py +++ b/debug_toolbar/panels/sql/tracking.py @@ -60,10 +60,22 @@ def chunked_cursor(*args, **kwargs): def unwrap_cursor(connection): if hasattr(connection, "_djdt_cursor"): + # Sometimes the cursor()/chunked_cursor() methods of the DatabaseWrapper + # instance are already monkey patched before wrap_cursor() is called. (In + # particular, Django's SimpleTestCase monkey patches those methods for any + # disallowed databases to raise an exception if they are accessed.) Thus only + # delete our monkey patch if the method we saved is the same as the class + # method. Otherwise, restore the prior monkey patch from our saved method. + if connection._djdt_cursor == connection.__class__.cursor: + del connection.cursor + else: + connection.cursor = connection._djdt_cursor del connection._djdt_cursor + if connection._djdt_chunked_cursor == connection.__class__.chunked_cursor: + del connection.chunked_cursor + else: + connection.chunked_cursor = connection._djdt_chunked_cursor del connection._djdt_chunked_cursor - del connection.cursor - del connection.chunked_cursor class BaseCursorWrapper: From b1814f17e9ca494e7430a31da6435983e827c777 Mon Sep 17 00:00:00 2001 From: Daniel Harding Date: Thu, 28 Apr 2022 15:08:51 +0300 Subject: [PATCH 22/60] Add infrastructure for supporting multi-DB tests --- tests/base.py | 12 ++++++++++-- tests/settings.py | 14 ++++++++++++++ 2 files changed, 24 insertions(+), 2 deletions(-) diff --git a/tests/base.py b/tests/base.py index ccd9f053c..5cc432add 100644 --- a/tests/base.py +++ b/tests/base.py @@ -1,7 +1,7 @@ import html5lib from asgiref.local import Local from django.http import HttpResponse -from django.test import Client, RequestFactory, TestCase +from django.test import Client, RequestFactory, TestCase, TransactionTestCase from debug_toolbar.toolbar import DebugToolbar @@ -29,7 +29,7 @@ def handle_toolbar_created(sender, toolbar=None, **kwargs): rf = RequestFactory() -class BaseTestCase(TestCase): +class BaseMixin: client_class = ToolbarTestClient panel_id = None @@ -67,6 +67,14 @@ def assertValidHTML(self, content): raise self.failureException("\n".join(msg_parts)) +class BaseTestCase(BaseMixin, TestCase): + pass + + +class BaseMultiDBTestCase(BaseMixin, TransactionTestCase): + databases = {"default", "replica"} + + class IntegrationTestCase(TestCase): """Base TestCase for tests involving clients making requests.""" diff --git a/tests/settings.py b/tests/settings.py index da5067fbf..b3c281242 100644 --- a/tests/settings.py +++ b/tests/settings.py @@ -104,6 +104,20 @@ "USER": "default_test", }, }, + "replica": { + "ENGINE": "django.{}db.backends.{}".format( + "contrib.gis." if USE_GIS else "", os.getenv("DB_BACKEND", "sqlite3") + ), + "NAME": os.getenv("DB_NAME", ":memory:"), + "USER": os.getenv("DB_USER"), + "PASSWORD": os.getenv("DB_PASSWORD"), + "HOST": os.getenv("DB_HOST", ""), + "PORT": os.getenv("DB_PORT", ""), + "TEST": { + "USER": "default_test", + "MIRROR": "default", + }, + }, } DEFAULT_AUTO_FIELD = "django.db.models.AutoField" From c27ea4f853000895014e6f93a610978bf1d6e983 Mon Sep 17 00:00:00 2001 From: Daniel Harding Date: Wed, 4 May 2022 17:26:56 +0300 Subject: [PATCH 23/60] Add test for SQL DB alias recording Ensure that queries made to different databases get recorded with the correct alias. --- tests/panels/test_sql.py | 23 ++++++++++++++++++++++- 1 file changed, 22 insertions(+), 1 deletion(-) diff --git a/tests/panels/test_sql.py b/tests/panels/test_sql.py index 9824a1bec..154b414bd 100644 --- a/tests/panels/test_sql.py +++ b/tests/panels/test_sql.py @@ -16,7 +16,7 @@ import debug_toolbar.panels.sql.tracking as sql_tracking from debug_toolbar import settings as dt_settings -from ..base import BaseTestCase +from ..base import BaseMultiDBTestCase, BaseTestCase from ..models import PostgresJSON @@ -506,3 +506,24 @@ def test_nested_template_information(self): self.assertEqual(template_name, "included.html") self.assertEqual(template_info["context"][0]["content"].strip(), "{{ users }}") self.assertEqual(template_info["context"][0]["highlight"], True) + + +class SQLPanelMultiDBTestCase(BaseMultiDBTestCase): + panel_id = "SQLPanel" + + def test_aliases(self): + self.assertFalse(self.panel._queries) + + list(User.objects.all()) + list(User.objects.using("replica").all()) + + response = self.panel.process_request(self.request) + self.panel.generate_stats(self.request, response) + + self.assertTrue(self.panel._queries) + + query = self.panel._queries[0] + self.assertEqual(query[0], "default") + + query = self.panel._queries[-1] + self.assertEqual(query[0], "replica") From c7c97ae32a0a7750753f67f11a605fdefde9ed64 Mon Sep 17 00:00:00 2001 From: Daniel Harding Date: Thu, 28 Apr 2022 17:22:51 +0300 Subject: [PATCH 24/60] Remove unnecessary getattr() call BaseDatabaseWrapper always has an alias attribute. [1] [1] https://github.com/django/django/blob/aa28c392b9491f02330905cb73b7078b1cd18c60/django/db/backends/base/base.py#L70 --- debug_toolbar/panels/sql/tracking.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/debug_toolbar/panels/sql/tracking.py b/debug_toolbar/panels/sql/tracking.py index f3c33e49f..081a8f819 100644 --- a/debug_toolbar/panels/sql/tracking.py +++ b/debug_toolbar/panels/sql/tracking.py @@ -156,7 +156,7 @@ def _record(self, method, sql, params): pass # object not JSON serializable template_info = get_template_info() - alias = getattr(self.db, "alias", "default") + alias = self.db.alias conn = self.db.connection vendor = getattr(conn, "vendor", "unknown") From 6cd5f8ee90e3174caed357328d2558b0c5114cc4 Mon Sep 17 00:00:00 2001 From: Daniel Harding Date: Thu, 28 Apr 2022 17:38:29 +0300 Subject: [PATCH 25/60] Lookup vendor on Django's connection wrapper The vendor attribute does not exist on the underlying DB connection. --- debug_toolbar/panels/sql/panel.py | 7 ++++--- debug_toolbar/panels/sql/tracking.py | 2 +- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/debug_toolbar/panels/sql/panel.py b/debug_toolbar/panels/sql/panel.py index 00737a42d..9e6d913bf 100644 --- a/debug_toolbar/panels/sql/panel.py +++ b/debug_toolbar/panels/sql/panel.py @@ -67,14 +67,15 @@ def __init__(self, *args, **kwargs): def get_transaction_id(self, alias): if alias not in connections: return - conn = connections[alias].connection + connection = connections[alias] + conn = connection.connection if not conn: return - if conn.vendor == "postgresql": + if connection.vendor == "postgresql": cur_status = conn.get_transaction_status() else: - raise ValueError(conn.vendor) + raise ValueError(connection.vendor) last_status = self._transaction_status.get(alias) self._transaction_status[alias] = cur_status diff --git a/debug_toolbar/panels/sql/tracking.py b/debug_toolbar/panels/sql/tracking.py index 081a8f819..e3b905b18 100644 --- a/debug_toolbar/panels/sql/tracking.py +++ b/debug_toolbar/panels/sql/tracking.py @@ -158,7 +158,7 @@ def _record(self, method, sql, params): alias = self.db.alias conn = self.db.connection - vendor = getattr(conn, "vendor", "unknown") + vendor = self.db.vendor # Sql might be an object (such as psycopg Composed). # For logging purposes, make sure it's str. From dacef6bd4ed2de4ac59dc7383629b314a7dd00d3 Mon Sep 17 00:00:00 2001 From: Daniel Harding Date: Wed, 4 May 2022 16:59:34 +0300 Subject: [PATCH 26/60] Move postgresql connection logic to NormalCursorWrapper._record() Previously, the logic for determining when to switch transaction IDs was contained in SQLPanel.get_transaction_id(). However, this was not sufficient, as it only looked at the transaction status after the query was finished. If two queries were issued consecutively but in separate transactions, such as in the following: with transaction.atomic(): list(User.objects.all()) with transaction.atomic(): list(Group.objects.all()) both queries would be given the same transaction ID since they would both have the same transaction status after being executed (TRANSACTION_STATUS_INTRANS). Instead, move the logic to NormalCursorWrapper._record() and compare the transaction status before the query to the transaction status after the query to determine if a new transaction was started. Also, use the conn.status attribute for determining transaction status instead of conn.get_transaction_status(), since the former is a local connection variable, while the latter requires a call to the server. --- debug_toolbar/panels/sql/panel.py | 49 ++++++++++++---------------- debug_toolbar/panels/sql/tracking.py | 36 +++++++++++++++++--- 2 files changed, 52 insertions(+), 33 deletions(-) diff --git a/debug_toolbar/panels/sql/panel.py b/debug_toolbar/panels/sql/panel.py index 9e6d913bf..07da639d3 100644 --- a/debug_toolbar/panels/sql/panel.py +++ b/debug_toolbar/panels/sql/panel.py @@ -61,36 +61,29 @@ def __init__(self, *args, **kwargs): self._num_queries = 0 self._queries = [] self._databases = {} - self._transaction_status = {} + # synthetic transaction IDs, keyed by DB alias self._transaction_ids = {} - def get_transaction_id(self, alias): - if alias not in connections: - return - connection = connections[alias] - conn = connection.connection - if not conn: - return - - if connection.vendor == "postgresql": - cur_status = conn.get_transaction_status() - else: - raise ValueError(connection.vendor) - - last_status = self._transaction_status.get(alias) - self._transaction_status[alias] = cur_status - - if not cur_status: - # No available state - return None - - if cur_status != last_status: - if cur_status: - self._transaction_ids[alias] = uuid.uuid4().hex - else: - self._transaction_ids[alias] = None - - return self._transaction_ids[alias] + def new_transaction_id(self, alias): + """ + Generate and return a new synthetic transaction ID for the specified DB alias. + """ + trans_id = uuid.uuid4().hex + self._transaction_ids[alias] = trans_id + return trans_id + + def current_transaction_id(self, alias): + """ + Return the current synthetic transaction ID for the specified DB alias. + """ + trans_id = self._transaction_ids.get(alias) + # Sometimes it is not possible to detect the beginning of the first transaction, + # so current_transaction_id() will be called before new_transaction_id(). In + # that case there won't yet be a transaction ID. so it is necessary to generate + # one using new_transaction_id(). + if trans_id is None: + trans_id = self.new_transaction_id(alias) + return trans_id def record(self, alias, **kwargs): self._queries.append((alias, kwargs)) diff --git a/debug_toolbar/panels/sql/tracking.py b/debug_toolbar/panels/sql/tracking.py index e3b905b18..3fa07ec41 100644 --- a/debug_toolbar/panels/sql/tracking.py +++ b/debug_toolbar/panels/sql/tracking.py @@ -10,8 +10,10 @@ try: from psycopg2._json import Json as PostgresJson + from psycopg2.extensions import STATUS_IN_TRANSACTION except ImportError: PostgresJson = None + STATUS_IN_TRANSACTION = None # Prevents SQL queries from being sent to the DB. It's used # by the TemplatePanel to prevent the toolbar from issuing @@ -139,6 +141,14 @@ def _decode(self, param): return "(encoded string)" def _record(self, method, sql, params): + alias = self.db.alias + vendor = self.db.vendor + + if vendor == "postgresql": + # The underlying DB connection (as opposed to Django's wrapper) + conn = self.db.connection + initial_conn_status = conn.status + start_time = time() try: return method(sql, params) @@ -156,10 +166,6 @@ def _record(self, method, sql, params): pass # object not JSON serializable template_info = get_template_info() - alias = self.db.alias - conn = self.db.connection - vendor = self.db.vendor - # Sql might be an object (such as psycopg Composed). # For logging purposes, make sure it's str. sql = str(sql) @@ -190,9 +196,29 @@ def _record(self, method, sql, params): iso_level = conn.isolation_level except conn.InternalError: iso_level = "unknown" + # PostgreSQL does not expose any sort of transaction ID, so it is + # necessary to generate synthetic transaction IDs here. If the + # connection was not in a transaction when the query started, and was + # after the query finished, a new transaction definitely started, so get + # a new transaction ID from logger.new_transaction_id(). If the query + # was in a transaction both before and after executing, make the + # assumption that it is the same transaction and get the current + # transaction ID from logger.current_transaction_id(). There is an edge + # case where Django can start a transaction before the first query + # executes, so in that case logger.current_transaction_id() will + # generate a new transaction ID since one does not already exist. + final_conn_status = conn.status + if final_conn_status == STATUS_IN_TRANSACTION: + if initial_conn_status == STATUS_IN_TRANSACTION: + trans_id = self.logger.current_transaction_id(alias) + else: + trans_id = self.logger.new_transaction_id(alias) + else: + trans_id = None + params.update( { - "trans_id": self.logger.get_transaction_id(alias), + "trans_id": trans_id, "trans_status": conn.get_transaction_status(), "iso_level": iso_level, "encoding": conn.encoding, From c1474806f796bc6ed9d8b7963995672b57fa1327 Mon Sep 17 00:00:00 2001 From: Daniel Harding Date: Wed, 4 May 2022 17:06:36 +0300 Subject: [PATCH 27/60] Account for DB aliases when recording transaction state The logic in SQLPanel.generate_stats() did not properly account for the DB alias when marking transactions as ended. It would always mark the previous query as ending the transaction even if the previous query was from a different DB alias. Update the code to track the last query by alias so that the correct query can be marked as ending the transaction. --- debug_toolbar/panels/sql/panel.py | 34 ++++++++++++++++++------------- 1 file changed, 20 insertions(+), 14 deletions(-) diff --git a/debug_toolbar/panels/sql/panel.py b/debug_toolbar/panels/sql/panel.py index 07da639d3..23f453567 100644 --- a/debug_toolbar/panels/sql/panel.py +++ b/debug_toolbar/panels/sql/panel.py @@ -178,23 +178,25 @@ def duplicate_key(query): rgb[nn] = nc db["rgb_color"] = rgb - trans_ids = {} - trans_id = None - i = 0 + # the last query recorded for each DB alias + last_by_alias = {} for alias, query in self._queries: query_similar[alias][similar_key(query)] += 1 query_duplicates[alias][duplicate_key(query)] += 1 trans_id = query.get("trans_id") - last_trans_id = trans_ids.get(alias) - - if trans_id != last_trans_id: - if last_trans_id: - self._queries[(i - 1)][1]["ends_trans"] = True - trans_ids[alias] = trans_id - if trans_id: + prev_query = last_by_alias.get(alias, {}) + prev_trans_id = prev_query.get("trans_id") + + # If two consecutive queries for a given DB alias have different + # transaction ID values, a transaction started, finished, or both, so + # annotate the queries as appropriate. + if trans_id != prev_trans_id: + if prev_trans_id is not None: + prev_query["ends_trans"] = True + if trans_id is not None: query["starts_trans"] = True - if trans_id: + if trans_id is not None: query["in_trans"] = True query["alias"] = alias @@ -222,12 +224,16 @@ def duplicate_key(query): query["end_offset"] = query["width_ratio"] + query["start_offset"] width_ratio_tally += query["width_ratio"] query["stacktrace"] = render_stacktrace(query["stacktrace"]) - i += 1 query["trace_color"] = trace_colors[query["stacktrace"]] - if trans_id: - self._queries[(i - 1)][1]["ends_trans"] = True + last_by_alias[alias] = query + + # Close out any transactions that were in progress, since there is no + # explicit way to know when a transaction finishes. + for final_query in last_by_alias.values(): + if final_query.get("trans_id") is not None: + final_query["ends_trans"] = True # Queries are similar / duplicates only if there's as least 2 of them. # Also, to hide queries, we need to give all the duplicate groups an id From 89f815c4da2b02fd15161efcb83d87d91016e738 Mon Sep 17 00:00:00 2001 From: Daniel Harding Date: Tue, 10 May 2022 15:41:20 +0300 Subject: [PATCH 28/60] Add test for PostgreSQL transaction tracking --- tests/panels/test_sql.py | 92 +++++++++++++++++++++++++++++++++++++++- 1 file changed, 91 insertions(+), 1 deletion(-) diff --git a/tests/panels/test_sql.py b/tests/panels/test_sql.py index 154b414bd..40ec83dbb 100644 --- a/tests/panels/test_sql.py +++ b/tests/panels/test_sql.py @@ -7,7 +7,7 @@ import django from asgiref.sync import sync_to_async from django.contrib.auth.models import User -from django.db import connection +from django.db import connection, transaction from django.db.models import Count from django.db.utils import DatabaseError from django.shortcuts import render @@ -527,3 +527,93 @@ def test_aliases(self): query = self.panel._queries[-1] self.assertEqual(query[0], "replica") + + def test_transaction_status(self): + """ + Test case for tracking the transaction status is properly associated with + queries on PostgreSQL, and that transactions aren't broken on other database + engines. + """ + self.assertEqual(len(self.panel._queries), 0) + + with transaction.atomic(): + list(User.objects.all()) + list(User.objects.using("replica").all()) + + with transaction.atomic(using="replica"): + list(User.objects.all()) + list(User.objects.using("replica").all()) + + with transaction.atomic(): + list(User.objects.all()) + + list(User.objects.using("replica").all()) + + response = self.panel.process_request(self.request) + self.panel.generate_stats(self.request, response) + + if connection.vendor == "postgresql": + # Connection tracking is currently only implemented for PostgreSQL. + self.assertEqual(len(self.panel._queries), 6) + + query = self.panel._queries[0] + self.assertEqual(query[0], "default") + self.assertIsNotNone(query[1]["trans_id"]) + self.assertTrue(query[1]["starts_trans"]) + self.assertTrue(query[1]["in_trans"]) + self.assertFalse("end_trans" in query[1]) + + query = self.panel._queries[-1] + self.assertEqual(query[0], "replica") + self.assertIsNone(query[1]["trans_id"]) + self.assertFalse("starts_trans" in query[1]) + self.assertFalse("in_trans" in query[1]) + self.assertFalse("end_trans" in query[1]) + + query = self.panel._queries[2] + self.assertEqual(query[0], "default") + self.assertIsNotNone(query[1]["trans_id"]) + self.assertEqual( + query[1]["trans_id"], self.panel._queries[0][1]["trans_id"] + ) + self.assertFalse("starts_trans" in query[1]) + self.assertTrue(query[1]["in_trans"]) + self.assertTrue(query[1]["ends_trans"]) + + query = self.panel._queries[3] + self.assertEqual(query[0], "replica") + self.assertIsNotNone(query[1]["trans_id"]) + self.assertNotEqual( + query[1]["trans_id"], self.panel._queries[0][1]["trans_id"] + ) + self.assertTrue(query[1]["starts_trans"]) + self.assertTrue(query[1]["in_trans"]) + self.assertTrue(query[1]["ends_trans"]) + + query = self.panel._queries[4] + self.assertEqual(query[0], "default") + self.assertIsNotNone(query[1]["trans_id"]) + self.assertNotEqual( + query[1]["trans_id"], self.panel._queries[0][1]["trans_id"] + ) + self.assertNotEqual( + query[1]["trans_id"], self.panel._queries[3][1]["trans_id"] + ) + self.assertTrue(query[1]["starts_trans"]) + self.assertTrue(query[1]["in_trans"]) + self.assertTrue(query[1]["ends_trans"]) + + query = self.panel._queries[5] + self.assertEqual(query[0], "replica") + self.assertIsNone(query[1]["trans_id"]) + self.assertFalse("starts_trans" in query[1]) + self.assertFalse("in_trans" in query[1]) + self.assertFalse("end_trans" in query[1]) + else: + # Ensure that nothing was recorded for other database engines. + self.assertTrue(self.panel._queries) + for query in self.panel._queries: + self.assertFalse("trans_id" in query[1]) + self.assertFalse("starts_trans" in query[1]) + self.assertFalse("in_trans" in query[1]) + self.assertFalse("end_trans" in query[1]) From 826915348cdce844022820b1eb9696a656dc428e Mon Sep 17 00:00:00 2001 From: Daniel Harding Date: Wed, 4 May 2022 17:46:50 +0300 Subject: [PATCH 29/60] Drop unused query attribute --- debug_toolbar/panels/sql/tracking.py | 1 - 1 file changed, 1 deletion(-) diff --git a/debug_toolbar/panels/sql/tracking.py b/debug_toolbar/panels/sql/tracking.py index 3fa07ec41..c479a8b5d 100644 --- a/debug_toolbar/panels/sql/tracking.py +++ b/debug_toolbar/panels/sql/tracking.py @@ -221,7 +221,6 @@ def _record(self, method, sql, params): "trans_id": trans_id, "trans_status": conn.get_transaction_status(), "iso_level": iso_level, - "encoding": conn.encoding, } ) From 69b1a662982f30cd175581c4064ba4aa8b95c894 Mon Sep 17 00:00:00 2001 From: Daniel Harding Date: Tue, 24 May 2022 15:36:12 +0300 Subject: [PATCH 30/60] Add pyflame to the list of third-party panels --- docs/changes.rst | 1 + docs/panels.rst | 12 ++++++++++++ docs/spelling_wordlist.txt | 1 + 3 files changed, 14 insertions(+) diff --git a/docs/changes.rst b/docs/changes.rst index 83e8fe9aa..9b50631ba 100644 --- a/docs/changes.rst +++ b/docs/changes.rst @@ -7,6 +7,7 @@ Change log dictionaries nor ``QueryDict`` instances. Using anything but ``QueryDict`` instances isn't a valid use of Django but, again, django-debug-toolbar shouldn't crash. +* Added pyflame (for flame graphs) to the list of third-party panels. 3.4.0 (2022-05-03) ------------------ diff --git a/docs/panels.rst b/docs/panels.rst index 4eba8eba7..8e5558aab 100644 --- a/docs/panels.rst +++ b/docs/panels.rst @@ -141,6 +141,18 @@ Third-party panels If you'd like to add a panel to this list, please submit a pull request! +Flame Graphs +~~~~~~~~~~~~ + +URL: https://gitlab.com/living180/pyflame + +Path: ``pyflame.djdt.panel.FlamegraphPanel`` + +Displays a flame graph for visualizing the performance profile of the request, +using Brendan Gregg's `flamegraph.pl script +`_ to perform the +heavy lifting. + HTML Tidy/Validator ~~~~~~~~~~~~~~~~~~~ diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index 8eddeba4a..e8933b1dd 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -25,6 +25,7 @@ pre profiler psycopg py +pyflame pylibmc Pympler querysets From 813cfbf33616054d25500c31e69c2ab4dba6891a Mon Sep 17 00:00:00 2001 From: Daniel Harding Date: Tue, 24 May 2022 15:47:31 +0300 Subject: [PATCH 31/60] Fix cache panel miss counting The cache panel was not counting misses for the get_many() cache method, because it assumed that all keys would be present in the returned dict (with a value of None if not present in the cache) while in reality only keys present in the cache are present in the returned dict. Correct the miss counting logic, and add a test for tracking hits and misses. --- debug_toolbar/panels/cache.py | 11 ++++++----- docs/changes.rst | 2 ++ tests/panels/test_cache.py | 17 +++++++++++++++++ 3 files changed, 25 insertions(+), 5 deletions(-) diff --git a/debug_toolbar/panels/cache.py b/debug_toolbar/panels/cache.py index 7877ef0ab..8294d7734 100644 --- a/debug_toolbar/panels/cache.py +++ b/debug_toolbar/panels/cache.py @@ -101,11 +101,12 @@ def _store_call_info( else: self.hits += 1 elif name == "get_many": - for key, value in return_value.items(): - if value is None: - self.misses += 1 - else: - self.hits += 1 + if "keys" in kwargs: + keys = kwargs["keys"] + else: + keys = args[0] + self.hits += len(return_value) + self.misses += len(keys) - len(return_value) time_taken *= 1000 self.total_time += time_taken diff --git a/docs/changes.rst b/docs/changes.rst index 9b50631ba..019a6eac3 100644 --- a/docs/changes.rst +++ b/docs/changes.rst @@ -8,6 +8,8 @@ Change log instances isn't a valid use of Django but, again, django-debug-toolbar shouldn't crash. * Added pyflame (for flame graphs) to the list of third-party panels. +* Fixed the cache panel to correctly count cache misses from the get_many() + cache method. 3.4.0 (2022-05-03) ------------------ diff --git a/tests/panels/test_cache.py b/tests/panels/test_cache.py index d45eabb26..aacf521cb 100644 --- a/tests/panels/test_cache.py +++ b/tests/panels/test_cache.py @@ -26,6 +26,23 @@ def test_recording_caches(self): second_cache.get("foo") self.assertEqual(len(self.panel.calls), 2) + def test_hits_and_misses(self): + cache.cache.clear() + cache.cache.get("foo") + self.assertEqual(self.panel.hits, 0) + self.assertEqual(self.panel.misses, 1) + cache.cache.set("foo", 1) + cache.cache.get("foo") + self.assertEqual(self.panel.hits, 1) + self.assertEqual(self.panel.misses, 1) + cache.cache.get_many(["foo", "bar"]) + self.assertEqual(self.panel.hits, 2) + self.assertEqual(self.panel.misses, 2) + cache.cache.set("bar", 2) + cache.cache.get_many(keys=["foo", "bar"]) + self.assertEqual(self.panel.hits, 4) + self.assertEqual(self.panel.misses, 2) + def test_get_or_set_value(self): cache.cache.get_or_set("baz", "val") self.assertEqual(cache.cache.get("baz"), "val") From eba0e06dae03897c98a713c622dfc2885b1e8540 Mon Sep 17 00:00:00 2001 From: Daniel Harding Date: Thu, 21 Apr 2022 14:36:34 +0300 Subject: [PATCH 32/60] Update tidy_stacktrace() docstring. --- debug_toolbar/utils.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/debug_toolbar/utils.py b/debug_toolbar/utils.py index cc90cf4db..ea2a1f65a 100644 --- a/debug_toolbar/utils.py +++ b/debug_toolbar/utils.py @@ -47,12 +47,11 @@ def omit_path(path): def tidy_stacktrace(stack): """ - Clean up stacktrace and remove all entries that: - 1. Are part of Django (except contrib apps) - 2. Are part of socketserver (used by Django's dev server) - 3. Are the last entry (which is part of our stacktracing code) + Clean up stacktrace and remove all entries that are excluded by the + HIDE_IN_STACKTRACES setting. - ``stack`` should be a list of frame tuples from ``inspect.stack()`` + ``stack`` should be a list of frame tuples from ``inspect.stack()`` or + ``debug_toolbar.utils.get_stack()``. """ trace = [] for frame, path, line_no, func_name, text in (f[:5] for f in stack): From 927819d899e2452b5d09451da69472441fa3f4b5 Mon Sep 17 00:00:00 2001 From: Daniel Harding Date: Tue, 19 Apr 2022 17:20:08 +0300 Subject: [PATCH 33/60] Remove obsolete compatibility code inspect.Traceback has existed since Python 2.6. --- debug_toolbar/utils.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/debug_toolbar/utils.py b/debug_toolbar/utils.py index ea2a1f65a..2e5b5f263 100644 --- a/debug_toolbar/utils.py +++ b/debug_toolbar/utils.py @@ -222,10 +222,7 @@ def getframeinfo(frame, context=1): break lines = [line.decode(encoding, "replace") for line in lines] - if hasattr(inspect, "Traceback"): - return inspect.Traceback(filename, lineno, frame.f_code.co_name, lines, index) - else: - return (filename, lineno, frame.f_code.co_name, lines, index) + return inspect.Traceback(filename, lineno, frame.f_code.co_name, lines, index) def get_sorted_request_variable(variable): From 6b9fbea6dfc0dd44183774a5a6ba62ea7e289822 Mon Sep 17 00:00:00 2001 From: Daniel Harding Date: Tue, 17 May 2022 16:29:27 +0300 Subject: [PATCH 34/60] Remove unnecessary decoding from getframeinfo() linecache uses tokenize.open() to detect and use the encoding of the file since Python 3.4. --- debug_toolbar/utils.py | 18 ++---------------- 1 file changed, 2 insertions(+), 16 deletions(-) diff --git a/debug_toolbar/utils.py b/debug_toolbar/utils.py index 2e5b5f263..13f7472ad 100644 --- a/debug_toolbar/utils.py +++ b/debug_toolbar/utils.py @@ -1,6 +1,5 @@ import inspect import os.path -import re import sys from importlib import import_module from pprint import pformat @@ -200,27 +199,14 @@ def getframeinfo(frame, context=1): try: lines, lnum = inspect.findsource(frame) except Exception: # findsource raises platform-dependant exceptions - first_lines = lines = index = None + lines = index = None else: start = max(start, 1) start = max(0, min(start, len(lines) - context)) - first_lines = lines[:2] lines = lines[start : (start + context)] index = lineno - 1 - start else: - first_lines = lines = index = None - - # Code taken from Django's ExceptionReporter._get_lines_from_file - if first_lines and isinstance(first_lines[0], bytes): - encoding = "ascii" - for line in first_lines[:2]: - # File coding may be specified. Match pattern from PEP-263 - # (https://www.python.org/dev/peps/pep-0263/) - match = re.search(rb"coding[:=]\s*([-\w.]+)", line) - if match: - encoding = match.group(1).decode("ascii") - break - lines = [line.decode(encoding, "replace") for line in lines] + lines = index = None return inspect.Traceback(filename, lineno, frame.f_code.co_name, lines, index) From 66a767d614b8f2a52c8b08e4b6e99081b1c1c326 Mon Sep 17 00:00:00 2001 From: Daniel Harding Date: Sun, 29 May 2022 15:40:23 +0300 Subject: [PATCH 35/60] Update change log --- docs/changes.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/changes.rst b/docs/changes.rst index 019a6eac3..7a7cc48fd 100644 --- a/docs/changes.rst +++ b/docs/changes.rst @@ -10,6 +10,7 @@ Change log * Added pyflame (for flame graphs) to the list of third-party panels. * Fixed the cache panel to correctly count cache misses from the get_many() cache method. +* Removed some obsolete compatibility code from the stack trace recording code. 3.4.0 (2022-05-03) ------------------ From 5a8c11dfb7b3f5d3c4a3125956c69bb4e159ca53 Mon Sep 17 00:00:00 2001 From: Daniel Harding Date: Tue, 17 May 2022 17:54:25 +0300 Subject: [PATCH 36/60] Add new stack trace functionality * Add a private _StackTraceRecorder class to debug_toolbar.utils which implements caching to reduce the overhead of expensive file system operations. This class has a get_stack_trace() method which combines the functionality of the get_stack() and tidy_stacktrace() functions. * Add a new debug_toolbar.utils function, get_stack_trace() which gets or instantiates a thread/async task context-local _StackTraceRecorder instance and returns a stack trace using its get_stack_trace() method. * Add a new debug_toolbar.utils function, clear_stack_trace_caches(), which removes any thread/async task context-local _StackTraceRecorder instance. * Update the DebugToolbarMiddleware to call the clear_stack_trace_caches() function after processing a request to ensure that each subsequent request gets a clean cache. --- debug_toolbar/middleware.py | 2 + debug_toolbar/utils.py | 98 +++++++++++++++++++++++++++++++++++++ 2 files changed, 100 insertions(+) diff --git a/debug_toolbar/middleware.py b/debug_toolbar/middleware.py index f131861fc..f62904cf9 100644 --- a/debug_toolbar/middleware.py +++ b/debug_toolbar/middleware.py @@ -10,6 +10,7 @@ from debug_toolbar import settings as dt_settings from debug_toolbar.toolbar import DebugToolbar +from debug_toolbar.utils import clear_stack_trace_caches _HTML_TYPES = ("text/html", "application/xhtml+xml") @@ -56,6 +57,7 @@ def __call__(self, request): # Run panels like Django middleware. response = toolbar.process_request(request) finally: + clear_stack_trace_caches() # Deactivate instrumentation ie. monkey-unpatch. This must run # regardless of the response. Keep 'return' clauses below. for panel in reversed(toolbar.enabled_panels): diff --git a/debug_toolbar/utils.py b/debug_toolbar/utils.py index 13f7472ad..e916b44f6 100644 --- a/debug_toolbar/utils.py +++ b/debug_toolbar/utils.py @@ -1,10 +1,12 @@ import inspect +import linecache import os.path import sys from importlib import import_module from pprint import pformat import django +from asgiref.local import Local from django.core.exceptions import ImproperlyConfigured from django.template import Node from django.utils.html import format_html @@ -18,6 +20,9 @@ threading = None +_local_data = Local() + + # Figure out some paths django_path = os.path.realpath(os.path.dirname(django.__file__)) @@ -242,6 +247,99 @@ def get_stack(context=1): return framelist +def _stack_frames(depth=1): + frame = inspect.currentframe() + while frame is not None: + if depth > 0: + depth -= 1 + else: + yield frame + frame = frame.f_back + + +class _StackTraceRecorder: + def __init__(self, excluded_paths): + self.excluded_paths = excluded_paths + self.filename_cache = {} + self.is_excluded_cache = {} + + def get_source_file(self, frame): + frame_filename = frame.f_code.co_filename + + value = self.filename_cache.get(frame_filename) + if value is None: + filename = inspect.getsourcefile(frame) + if filename is None: + is_source = False + filename = frame_filename + else: + is_source = True + # Ensure linecache validity the first time this recorder + # encounters the filename in this frame. + linecache.checkcache(filename) + value = (filename, is_source) + self.filename_cache[frame_filename] = value + + return value + + def is_excluded_path(self, path): + excluded = self.is_excluded_cache.get(path) + if excluded is None: + resolved_path = os.path.realpath(path) + excluded = any( + resolved_path.startswith(excluded_path) + for excluded_path in self.excluded_paths + ) + self.is_excluded_cache[path] = excluded + return excluded + + def get_stack_trace(self, include_locals=False, depth=1): + trace = [] + for frame in _stack_frames(depth=depth + 1): + filename, is_source = self.get_source_file(frame) + + if self.is_excluded_path(filename): + continue + + line_no = frame.f_lineno + func_name = frame.f_code.co_name + + if is_source: + module = inspect.getmodule(frame, filename) + module_globals = module.__dict__ if module is not None else None + source_line = linecache.getline( + filename, line_no, module_globals + ).strip() + else: + source_line = "" + + frame_locals = frame.f_locals if include_locals else None + + trace.append((filename, line_no, func_name, source_line, frame_locals)) + trace.reverse() + return trace + + +def get_stack_trace(depth=1): + config = dt_settings.get_config() + if config["ENABLE_STACKTRACES"]: + stack_trace_recorder = getattr(_local_data, "stack_trace_recorder", None) + if stack_trace_recorder is None: + stack_trace_recorder = _StackTraceRecorder(hidden_paths) + _local_data.stack_trace_recorder = stack_trace_recorder + return stack_trace_recorder.get_stack_trace( + include_locals=config["ENABLE_STACKTRACES_LOCALS"], + depth=depth, + ) + else: + return [] + + +def clear_stack_trace_caches(): + if hasattr(_local_data, "stack_trace_recorder"): + del _local_data.stack_trace_recorder + + class ThreadCollector: def __init__(self): if threading is None: From 0b4a623c8ba6e059f398c2ccc9f5c993c1bee476 Mon Sep 17 00:00:00 2001 From: Daniel Harding Date: Wed, 25 May 2022 19:02:23 +0300 Subject: [PATCH 37/60] Use new stack trace functionality for SQLPanel --- debug_toolbar/panels/sql/tracking.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/debug_toolbar/panels/sql/tracking.py b/debug_toolbar/panels/sql/tracking.py index c479a8b5d..8a15977de 100644 --- a/debug_toolbar/panels/sql/tracking.py +++ b/debug_toolbar/panels/sql/tracking.py @@ -6,7 +6,7 @@ from django.utils.encoding import force_str from debug_toolbar import settings as dt_settings -from debug_toolbar.utils import get_stack, get_template_info, tidy_stacktrace +from debug_toolbar.utils import get_stack_trace, get_template_info try: from psycopg2._json import Json as PostgresJson @@ -155,10 +155,6 @@ def _record(self, method, sql, params): finally: stop_time = time() duration = (stop_time - start_time) * 1000 - if dt_settings.get_config()["ENABLE_STACKTRACES"]: - stacktrace = tidy_stacktrace(reversed(get_stack())) - else: - stacktrace = [] _params = "" try: _params = json.dumps(self._decode(params)) @@ -180,7 +176,7 @@ def _record(self, method, sql, params): "raw_sql": sql, "params": _params, "raw_params": params, - "stacktrace": stacktrace, + "stacktrace": get_stack_trace(), "start_time": start_time, "stop_time": stop_time, "is_slow": duration > dt_settings.get_config()["SQL_WARNING_THRESHOLD"], From d70257c65fd53941b4873bba9e662660a9de92a4 Mon Sep 17 00:00:00 2001 From: Daniel Harding Date: Wed, 25 May 2022 19:14:06 +0300 Subject: [PATCH 38/60] Use new stack trace functionality for CachePanel --- debug_toolbar/panels/cache.py | 15 ++------------- 1 file changed, 2 insertions(+), 13 deletions(-) diff --git a/debug_toolbar/panels/cache.py b/debug_toolbar/panels/cache.py index 8294d7734..4ca36a387 100644 --- a/debug_toolbar/panels/cache.py +++ b/debug_toolbar/panels/cache.py @@ -6,14 +6,8 @@ from django.core.cache import CacheHandler, caches from django.utils.translation import gettext_lazy as _, ngettext -from debug_toolbar import settings as dt_settings from debug_toolbar.panels import Panel -from debug_toolbar.utils import ( - get_stack, - get_template_info, - render_stacktrace, - tidy_stacktrace, -) +from debug_toolbar.utils import get_stack_trace, get_template_info, render_stacktrace # The order of the methods in this list determines the order in which they are listed in # the Commands table in the panel content. @@ -135,18 +129,13 @@ def _record_call(self, cache, name, original_method, args, kwargs): t = time.time() - t cache._djdt_recording = False - if dt_settings.get_config()["ENABLE_STACKTRACES"]: - stacktrace = tidy_stacktrace(reversed(get_stack())) - else: - stacktrace = [] - self._store_call_info( name=name, time_taken=t, return_value=value, args=args, kwargs=kwargs, - trace=stacktrace, + trace=get_stack_trace(), template_info=get_template_info(), backend=cache, ) From 1ea9dfb32b773c7d55ba076eb3438d29a7bad643 Mon Sep 17 00:00:00 2001 From: Daniel Harding Date: Sun, 29 May 2022 19:32:14 +0300 Subject: [PATCH 39/60] Add deprecation warnings to old stack trace functions --- debug_toolbar/utils.py | 14 ++++++++++++++ tests/test_utils.py | 17 ++++++++++++++++- 2 files changed, 30 insertions(+), 1 deletion(-) diff --git a/debug_toolbar/utils.py b/debug_toolbar/utils.py index e916b44f6..6ae036456 100644 --- a/debug_toolbar/utils.py +++ b/debug_toolbar/utils.py @@ -2,6 +2,7 @@ import linecache import os.path import sys +import warnings from importlib import import_module from pprint import pformat @@ -49,6 +50,15 @@ def omit_path(path): return any(path.startswith(hidden_path) for hidden_path in hidden_paths) +def _stack_trace_deprecation_warning(): + warnings.warn( + "get_stack() and tidy_stacktrace() are deprecated in favor of" + " get_stack_trace()", + DeprecationWarning, + stacklevel=2, + ) + + def tidy_stacktrace(stack): """ Clean up stacktrace and remove all entries that are excluded by the @@ -57,6 +67,8 @@ def tidy_stacktrace(stack): ``stack`` should be a list of frame tuples from ``inspect.stack()`` or ``debug_toolbar.utils.get_stack()``. """ + _stack_trace_deprecation_warning() + trace = [] for frame, path, line_no, func_name, text in (f[:5] for f in stack): if omit_path(os.path.realpath(path)): @@ -239,6 +251,8 @@ def get_stack(context=1): Modified version of ``inspect.stack()`` which calls our own ``getframeinfo()`` """ + _stack_trace_deprecation_warning() + frame = sys._getframe(1) framelist = [] while frame: diff --git a/tests/test_utils.py b/tests/test_utils.py index 9cfc33bc7..d884b050a 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,6 +1,11 @@ import unittest -from debug_toolbar.utils import get_name_from_obj, render_stacktrace +from debug_toolbar.utils import ( + get_name_from_obj, + get_stack, + render_stacktrace, + tidy_stacktrace, +) class GetNameFromObjTestCase(unittest.TestCase): @@ -47,3 +52,13 @@ def test_importlib_path_issue_1612(self): '<frozen importlib._bootstrap> in', result, ) + + +class StackTraceTestCase(unittest.TestCase): + def test_deprecated_functions(self): + with self.assertWarns(DeprecationWarning): + stack = get_stack() + self.assertEqual(stack[0][1], __file__) + with self.assertWarns(DeprecationWarning): + stack_trace = tidy_stacktrace(reversed(stack)) + self.assertEqual(stack_trace[-1][0], __file__) From 1c586c964c7656c3423e2fdf49ddce2a7fcb51ad Mon Sep 17 00:00:00 2001 From: Daniel Harding Date: Sun, 29 May 2022 15:33:11 +0300 Subject: [PATCH 40/60] Update change log --- docs/changes.rst | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/docs/changes.rst b/docs/changes.rst index 7a7cc48fd..f981b3d61 100644 --- a/docs/changes.rst +++ b/docs/changes.rst @@ -11,6 +11,17 @@ Change log * Fixed the cache panel to correctly count cache misses from the get_many() cache method. * Removed some obsolete compatibility code from the stack trace recording code. +* Added a new mechanism for capturing stack traces which includes per-request + caching to reduce expensive file system operations. Updated the cache and + SQL panels to record stack traces using this new mechanism. + +Deprecated features +~~~~~~~~~~~~~~~~~~~ + +* The ``debug_toolbar.utils.get_stack()`` and + ``debug_toolbar.utils.tidy_stacktrace()`` functions are deprecated in favor + of the new ``debug_toolbar.utils.get_stack_trace()`` function. They will + removed in the next major version of the Debug Toolbar. 3.4.0 (2022-05-03) ------------------ From 3b1231229bf17433924ab4587ca1237131284076 Mon Sep 17 00:00:00 2001 From: Daniel Harding Date: Sun, 29 May 2022 20:15:44 +0300 Subject: [PATCH 41/60] Increase minimum coverage percentage to 93% --- README.rst | 2 +- setup.cfg | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/README.rst b/README.rst index d146726d5..2c055ed30 100644 --- a/README.rst +++ b/README.rst @@ -16,7 +16,7 @@ Django Debug Toolbar |latest-version| :target: https://github.com/jazzband/django-debug-toolbar/actions :alt: Build Status -.. |coverage| image:: https://img.shields.io/badge/Coverage-89%25-green +.. |coverage| image:: https://img.shields.io/badge/Coverage-93%25-green :target: https://github.com/jazzband/django-debug-toolbar/actions/workflows/test.yml?query=branch%3Amain :alt: Test coverage status diff --git a/setup.cfg b/setup.cfg index b984e23cc..7f720eded 100644 --- a/setup.cfg +++ b/setup.cfg @@ -60,7 +60,7 @@ source = [coverage:report] # Update coverage badge link in README.rst when fail_under changes -fail_under = 89 +fail_under = 93 show_missing = True [flake8] From 234e5253910d5ac455c70ed951f2a92c05c68c33 Mon Sep 17 00:00:00 2001 From: Daniel Harding Date: Sun, 29 May 2022 21:13:54 +0300 Subject: [PATCH 42/60] Add missing change log entries --- docs/changes.rst | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/docs/changes.rst b/docs/changes.rst index f981b3d61..3b6c68065 100644 --- a/docs/changes.rst +++ b/docs/changes.rst @@ -1,12 +1,21 @@ Change log ========== +* Properly implemented tracking and display of PostgreSQL transactions. * Removed third party panels which have been archived on GitHub. * Added Django 4.1a1 to the CI matrix. * Stopped crashing when ``request.GET`` and ``request.POST`` are neither dictionaries nor ``QueryDict`` instances. Using anything but ``QueryDict`` instances isn't a valid use of Django but, again, django-debug-toolbar shouldn't crash. +* Fixed the cache panel to work correctly in the presence of concurrency by + avoiding the use of signals. +* Reworked the cache panel instrumentation mechanism to monkey patch methods on + the cache instances directly instead of replacing cache instances with + wrapper classes. +* Added a :meth:`debug_toolbar.panels.Panel.ready` class method that panels can + override to perform any initialization or instrumentation that needs to be + done unconditionally at startup time. * Added pyflame (for flame graphs) to the list of third-party panels. * Fixed the cache panel to correctly count cache misses from the get_many() cache method. From c5d95ff1e29d97d243a8238eb5713a529abf549c Mon Sep 17 00:00:00 2001 From: Daniel Harding Date: Sun, 29 May 2022 21:14:11 +0300 Subject: [PATCH 43/60] Always use the canonical PyPI URL in the README --- README.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.rst b/README.rst index 2c055ed30..ae062cb0d 100644 --- a/README.rst +++ b/README.rst @@ -5,7 +5,7 @@ Django Debug Toolbar |latest-version| |jazzband| |build-status| |coverage| |docs| |python-support| |django-support| .. |latest-version| image:: https://img.shields.io/pypi/v/django-debug-toolbar.svg - :target: https://pypi.python.org/pypi/django-debug-toolbar + :target: https://pypi.org/project/django-debug-toolbar/ :alt: Latest version on PyPI .. |jazzband| image:: https://jazzband.co/static/img/badge.svg @@ -25,11 +25,11 @@ Django Debug Toolbar |latest-version| :alt: Documentation status .. |python-support| image:: https://img.shields.io/pypi/pyversions/django-debug-toolbar - :target: https://pypi.python.org/pypi/django-debug-toolbar + :target: https://pypi.org/project/django-debug-toolbar/ :alt: Supported Python versions .. |django-support| image:: https://img.shields.io/pypi/djversions/django-debug-toolbar - :target: https://pypi.org/project/django-debug-toolbar + :target: https://pypi.org/project/django-debug-toolbar/ :alt: Supported Django versions The Django Debug Toolbar is a configurable set of panels that display various From bcc3c46d26df451ff281d6b9c84a34c5f662ce67 Mon Sep 17 00:00:00 2001 From: Daniel Harding Date: Thu, 21 Apr 2022 14:45:23 +0300 Subject: [PATCH 44/60] Remove debug_toolbar.utils.django_path variable Not used since commit 03fd1cc81c02a5462aeb4dbce0bfe8a2afdef43d. --- debug_toolbar/utils.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/debug_toolbar/utils.py b/debug_toolbar/utils.py index 6ae036456..7c8381aa9 100644 --- a/debug_toolbar/utils.py +++ b/debug_toolbar/utils.py @@ -6,7 +6,6 @@ from importlib import import_module from pprint import pformat -import django from asgiref.local import Local from django.core.exceptions import ImproperlyConfigured from django.template import Node @@ -24,10 +23,6 @@ _local_data = Local() -# Figure out some paths -django_path = os.path.realpath(os.path.dirname(django.__file__)) - - def get_module_path(module_name): try: module = import_module(module_name) From cd4c7480c52cc90d9fda4d7a66e89a6e75266d33 Mon Sep 17 00:00:00 2001 From: Daniel Harding Date: Tue, 31 May 2022 00:03:50 +0300 Subject: [PATCH 45/60] Replace path-based stack frame exclusion Prior to this commit, the HIDE_IN_STACKTRACES setting was implemented by importing the modules listed in the setting and recording their file system paths. Then tidy_stacktrace() and _StackRecorder.get_stack_trace() would look up the file system path for each frame's code object and compare that path against the paths for the excluded modules to see if it matched. If so, the frame would be excluded. This was inefficient since it used a file system access, os.path.realpath(), for each frame (although the _StackRecorder implementation included some caching to reduce the cost). It also would not work correctly for namespace packages since they can have multiple file system hierarchies. Replace with a new implementation that instead retrieves the __name__ variable from the frame's f_globals attribute and matches that module name against the list of excluded modules directly. --- debug_toolbar/utils.py | 62 ++++++++++++++---------------------------- 1 file changed, 20 insertions(+), 42 deletions(-) diff --git a/debug_toolbar/utils.py b/debug_toolbar/utils.py index 7c8381aa9..5dc9a28bb 100644 --- a/debug_toolbar/utils.py +++ b/debug_toolbar/utils.py @@ -3,11 +3,9 @@ import os.path import sys import warnings -from importlib import import_module from pprint import pformat from asgiref.local import Local -from django.core.exceptions import ImproperlyConfigured from django.template import Node from django.utils.html import format_html from django.utils.safestring import mark_safe @@ -23,26 +21,17 @@ _local_data = Local() -def get_module_path(module_name): - try: - module = import_module(module_name) - except ImportError as e: - raise ImproperlyConfigured(f"Error importing HIDE_IN_STACKTRACES: {e}") - else: - source_path = inspect.getsourcefile(module) - if source_path.endswith("__init__.py"): - source_path = os.path.dirname(source_path) - return os.path.realpath(source_path) - - -hidden_paths = [ - get_module_path(module_name) - for module_name in dt_settings.get_config()["HIDE_IN_STACKTRACES"] -] - - -def omit_path(path): - return any(path.startswith(hidden_path) for hidden_path in hidden_paths) +def _is_excluded_frame(frame, excluded_modules): + if not excluded_modules: + return False + frame_module = frame.f_globals.get("__name__") + if not isinstance(frame_module, str): + return False + return any( + frame_module == excluded_module + or frame_module.startswith(excluded_module + ".") + for excluded_module in excluded_modules + ) def _stack_trace_deprecation_warning(): @@ -65,8 +54,9 @@ def tidy_stacktrace(stack): _stack_trace_deprecation_warning() trace = [] + excluded_modules = dt_settings.get_config()["HIDE_IN_STACKTRACES"] for frame, path, line_no, func_name, text in (f[:5] for f in stack): - if omit_path(os.path.realpath(path)): + if _is_excluded_frame(frame, excluded_modules): continue text = "".join(text).strip() if text else "" frame_locals = ( @@ -267,10 +257,8 @@ def _stack_frames(depth=1): class _StackTraceRecorder: - def __init__(self, excluded_paths): - self.excluded_paths = excluded_paths + def __init__(self): self.filename_cache = {} - self.is_excluded_cache = {} def get_source_file(self, frame): frame_filename = frame.f_code.co_filename @@ -291,25 +279,14 @@ def get_source_file(self, frame): return value - def is_excluded_path(self, path): - excluded = self.is_excluded_cache.get(path) - if excluded is None: - resolved_path = os.path.realpath(path) - excluded = any( - resolved_path.startswith(excluded_path) - for excluded_path in self.excluded_paths - ) - self.is_excluded_cache[path] = excluded - return excluded - - def get_stack_trace(self, include_locals=False, depth=1): + def get_stack_trace(self, excluded_modules=None, include_locals=False, depth=1): trace = [] for frame in _stack_frames(depth=depth + 1): - filename, is_source = self.get_source_file(frame) - - if self.is_excluded_path(filename): + if _is_excluded_frame(frame, excluded_modules): continue + filename, is_source = self.get_source_file(frame) + line_no = frame.f_lineno func_name = frame.f_code.co_name @@ -334,9 +311,10 @@ def get_stack_trace(depth=1): if config["ENABLE_STACKTRACES"]: stack_trace_recorder = getattr(_local_data, "stack_trace_recorder", None) if stack_trace_recorder is None: - stack_trace_recorder = _StackTraceRecorder(hidden_paths) + stack_trace_recorder = _StackTraceRecorder() _local_data.stack_trace_recorder = stack_trace_recorder return stack_trace_recorder.get_stack_trace( + excluded_modules=config["HIDE_IN_STACKTRACES"], include_locals=config["ENABLE_STACKTRACES_LOCALS"], depth=depth, ) From 56f397caecad25e57f613ae0677edad088a87663 Mon Sep 17 00:00:00 2001 From: Daniel Harding Date: Tue, 31 May 2022 17:40:17 +0300 Subject: [PATCH 46/60] Make get_stack_trace() kwarg-only --- debug_toolbar/utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/debug_toolbar/utils.py b/debug_toolbar/utils.py index 5dc9a28bb..32aa27420 100644 --- a/debug_toolbar/utils.py +++ b/debug_toolbar/utils.py @@ -279,7 +279,7 @@ def get_source_file(self, frame): return value - def get_stack_trace(self, excluded_modules=None, include_locals=False, depth=1): + def get_stack_trace(self, *, excluded_modules=None, include_locals=False, depth=1): trace = [] for frame in _stack_frames(depth=depth + 1): if _is_excluded_frame(frame, excluded_modules): @@ -306,7 +306,7 @@ def get_stack_trace(self, excluded_modules=None, include_locals=False, depth=1): return trace -def get_stack_trace(depth=1): +def get_stack_trace(*, depth=1): config = dt_settings.get_config() if config["ENABLE_STACKTRACES"]: stack_trace_recorder = getattr(_local_data, "stack_trace_recorder", None) From 344e639d8ca50a2e3082a35516b7944b34ddc70a Mon Sep 17 00:00:00 2001 From: Daniel Harding Date: Sun, 29 May 2022 16:59:49 +0300 Subject: [PATCH 47/60] Make query key functions top-level functions Avoid redefining them every time the SQLPanel.generate_stats() method is called. --- debug_toolbar/panels/sql/panel.py | 33 +++++++++++++++---------------- 1 file changed, 16 insertions(+), 17 deletions(-) diff --git a/debug_toolbar/panels/sql/panel.py b/debug_toolbar/panels/sql/panel.py index 23f453567..6de67f4cb 100644 --- a/debug_toolbar/panels/sql/panel.py +++ b/debug_toolbar/panels/sql/panel.py @@ -48,6 +48,18 @@ def get_transaction_status_display(vendor, level): return choices.get(level) +def _similar_query_key(query): + return query["raw_sql"] + + +def _duplicate_query_key(query): + raw_params = () if query["raw_params"] is None else tuple(query["raw_params"]) + # saferepr() avoids problems because of unhashable types + # (e.g. lists) when used as dictionary keys. + # https://github.com/jazzband/django-debug-toolbar/issues/1091 + return (query["raw_sql"], saferepr(raw_params)) + + class SQLPanel(Panel): """ Panel that displays information about the SQL queries run while processing @@ -147,19 +159,6 @@ def generate_stats(self, request, response): query_similar = defaultdict(lambda: defaultdict(int)) query_duplicates = defaultdict(lambda: defaultdict(int)) - # The keys used to determine similar and duplicate queries. - def similar_key(query): - return query["raw_sql"] - - def duplicate_key(query): - raw_params = ( - () if query["raw_params"] is None else tuple(query["raw_params"]) - ) - # saferepr() avoids problems because of unhashable types - # (e.g. lists) when used as dictionary keys. - # https://github.com/jazzband/django-debug-toolbar/issues/1091 - return (query["raw_sql"], saferepr(raw_params)) - if self._queries: width_ratio_tally = 0 factor = int(256.0 / (len(self._databases) * 2.5)) @@ -181,8 +180,8 @@ def duplicate_key(query): # the last query recorded for each DB alias last_by_alias = {} for alias, query in self._queries: - query_similar[alias][similar_key(query)] += 1 - query_duplicates[alias][duplicate_key(query)] += 1 + query_similar[alias][_similar_query_key(query)] += 1 + query_duplicates[alias][_duplicate_query_key(query)] += 1 trans_id = query.get("trans_id") prev_query = last_by_alias.get(alias, {}) @@ -259,11 +258,11 @@ def duplicate_key(query): try: (query["similar_count"], query["similar_color"]) = query_similar_colors[ alias - ][similar_key(query)] + ][_similar_query_key(query)] ( query["duplicate_count"], query["duplicate_color"], - ) = query_duplicates_colors[alias][duplicate_key(query)] + ) = query_duplicates_colors[alias][_duplicate_query_key(query)] except KeyError: pass From ca1d31ce4aa2ec6ae19232ae96222376c9032375 Mon Sep 17 00:00:00 2001 From: Daniel Harding Date: Sun, 29 May 2022 17:37:30 +0300 Subject: [PATCH 48/60] Simplify similar/duplicate query recording Tweak the data structures to reduce the number of loops required, and pull common code into a function. --- debug_toolbar/panels/sql/panel.py | 74 ++++++++++++------------------- 1 file changed, 28 insertions(+), 46 deletions(-) diff --git a/debug_toolbar/panels/sql/panel.py b/debug_toolbar/panels/sql/panel.py index 6de67f4cb..0ac68d161 100644 --- a/debug_toolbar/panels/sql/panel.py +++ b/debug_toolbar/panels/sql/panel.py @@ -60,6 +60,21 @@ def _duplicate_query_key(query): return (query["raw_sql"], saferepr(raw_params)) +def _process_query_groups(query_groups, databases, colors, name): + counts = defaultdict(int) + for (alias, key), query_group in query_groups.items(): + count = len(query_group) + # Queries are similar / duplicates only if there are at least 2 of them. + if count > 1: + color = next(colors) + for query in query_group: + query[f"{name}_count"] = count + query[f"{name}_color"] = color + counts[alias] += count + for alias, db_info in databases.items(): + db_info[f"{name}_count"] = counts[alias] + + class SQLPanel(Panel): """ Panel that displays information about the SQL queries run while processing @@ -156,8 +171,8 @@ def disable_instrumentation(self): def generate_stats(self, request, response): colors = contrasting_color_generator() trace_colors = defaultdict(lambda: next(colors)) - query_similar = defaultdict(lambda: defaultdict(int)) - query_duplicates = defaultdict(lambda: defaultdict(int)) + similar_query_groups = defaultdict(list) + duplicate_query_groups = defaultdict(list) if self._queries: width_ratio_tally = 0 @@ -180,8 +195,10 @@ def generate_stats(self, request, response): # the last query recorded for each DB alias last_by_alias = {} for alias, query in self._queries: - query_similar[alias][_similar_query_key(query)] += 1 - query_duplicates[alias][_duplicate_query_key(query)] += 1 + similar_query_groups[(alias, _similar_query_key(query))].append(query) + duplicate_query_groups[(alias, _duplicate_query_key(query))].append( + query + ) trans_id = query.get("trans_id") prev_query = last_by_alias.get(alias, {}) @@ -234,48 +251,13 @@ def generate_stats(self, request, response): if final_query.get("trans_id") is not None: final_query["ends_trans"] = True - # Queries are similar / duplicates only if there's as least 2 of them. - # Also, to hide queries, we need to give all the duplicate groups an id - query_colors = contrasting_color_generator() - query_similar_colors = { - alias: { - query: (similar_count, next(query_colors)) - for query, similar_count in queries.items() - if similar_count >= 2 - } - for alias, queries in query_similar.items() - } - query_duplicates_colors = { - alias: { - query: (duplicate_count, next(query_colors)) - for query, duplicate_count in queries.items() - if duplicate_count >= 2 - } - for alias, queries in query_duplicates.items() - } - - for alias, query in self._queries: - try: - (query["similar_count"], query["similar_color"]) = query_similar_colors[ - alias - ][_similar_query_key(query)] - ( - query["duplicate_count"], - query["duplicate_color"], - ) = query_duplicates_colors[alias][_duplicate_query_key(query)] - except KeyError: - pass - - for alias, alias_info in self._databases.items(): - try: - alias_info["similar_count"] = sum( - e[0] for e in query_similar_colors[alias].values() - ) - alias_info["duplicate_count"] = sum( - e[0] for e in query_duplicates_colors[alias].values() - ) - except KeyError: - pass + group_colors = contrasting_color_generator() + _process_query_groups( + similar_query_groups, self._databases, group_colors, "similar" + ) + _process_query_groups( + duplicate_query_groups, self._databases, group_colors, "duplicate" + ) self.record_stats( { From dfaa44fe298aa07b806b3a6122882a7fec2803bb Mon Sep 17 00:00:00 2001 From: Daniel Harding Date: Mon, 30 May 2022 09:14:19 +0300 Subject: [PATCH 49/60] Avoid unnecessary use of saferepr() saferepr() is only necessary to guard against recursive data structures. However, if you pass a recursive data structure as an SQL query parameter, You're Gonna Have A Bad Time. So just use repr() which is faster. --- debug_toolbar/panels/sql/panel.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/debug_toolbar/panels/sql/panel.py b/debug_toolbar/panels/sql/panel.py index 0ac68d161..fd8a16289 100644 --- a/debug_toolbar/panels/sql/panel.py +++ b/debug_toolbar/panels/sql/panel.py @@ -1,7 +1,6 @@ import uuid from collections import defaultdict from copy import copy -from pprint import saferepr from django.db import connections from django.urls import path @@ -54,10 +53,10 @@ def _similar_query_key(query): def _duplicate_query_key(query): raw_params = () if query["raw_params"] is None else tuple(query["raw_params"]) - # saferepr() avoids problems because of unhashable types + # repr() avoids problems because of unhashable types # (e.g. lists) when used as dictionary keys. # https://github.com/jazzband/django-debug-toolbar/issues/1091 - return (query["raw_sql"], saferepr(raw_params)) + return (query["raw_sql"], repr(raw_params)) def _process_query_groups(query_groups, databases, colors, name): From 689723502c4e211ed57dbe50ce85b989f3fc391a Mon Sep 17 00:00:00 2001 From: Daniel Harding Date: Wed, 1 Jun 2022 05:34:53 +0300 Subject: [PATCH 50/60] Add a test for similar/duplicate query grouping --- tests/panels/test_sql.py | 57 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 57 insertions(+) diff --git a/tests/panels/test_sql.py b/tests/panels/test_sql.py index 40ec83dbb..55e723a9a 100644 --- a/tests/panels/test_sql.py +++ b/tests/panels/test_sql.py @@ -507,6 +507,63 @@ def test_nested_template_information(self): self.assertEqual(template_info["context"][0]["content"].strip(), "{{ users }}") self.assertEqual(template_info["context"][0]["highlight"], True) + def test_similar_and_duplicate_grouping(self): + self.assertEqual(len(self.panel._queries), 0) + + User.objects.filter(id=1).count() + User.objects.filter(id=1).count() + User.objects.filter(id=2).count() + User.objects.filter(id__lt=10).count() + User.objects.filter(id__lt=20).count() + User.objects.filter(id__gt=10, id__lt=20).count() + + response = self.panel.process_request(self.request) + self.panel.generate_stats(self.request, response) + + self.assertEqual(len(self.panel._queries), 6) + + queries = self.panel._queries + query = queries[0] + self.assertEqual(query[1]["similar_count"], 3) + self.assertEqual(query[1]["duplicate_count"], 2) + + query = queries[1] + self.assertEqual(query[1]["similar_count"], 3) + self.assertEqual(query[1]["duplicate_count"], 2) + + query = queries[2] + self.assertEqual(query[1]["similar_count"], 3) + self.assertTrue("duplicate_count" not in query[1]) + + query = queries[3] + self.assertEqual(query[1]["similar_count"], 2) + self.assertTrue("duplicate_count" not in query[1]) + + query = queries[4] + self.assertEqual(query[1]["similar_count"], 2) + self.assertTrue("duplicate_count" not in query[1]) + + query = queries[5] + self.assertTrue("similar_count" not in query[1]) + self.assertTrue("duplicate_count" not in query[1]) + + self.assertEqual(queries[0][1]["similar_color"], queries[1][1]["similar_color"]) + self.assertEqual(queries[0][1]["similar_color"], queries[2][1]["similar_color"]) + self.assertEqual( + queries[0][1]["duplicate_color"], queries[1][1]["duplicate_color"] + ) + self.assertNotEqual( + queries[0][1]["similar_color"], queries[0][1]["duplicate_color"] + ) + + self.assertEqual(queries[3][1]["similar_color"], queries[4][1]["similar_color"]) + self.assertNotEqual( + queries[0][1]["similar_color"], queries[3][1]["similar_color"] + ) + self.assertNotEqual( + queries[0][1]["duplicate_color"], queries[3][1]["similar_color"] + ) + class SQLPanelMultiDBTestCase(BaseMultiDBTestCase): panel_id = "SQLPanel" From ec4c0c690c993e96bfb08da6147774af18f3e24e Mon Sep 17 00:00:00 2001 From: Daniel Harding Date: Tue, 31 May 2022 22:27:11 +0300 Subject: [PATCH 51/60] Simplify SQLPanel._queries data structure Instead of using a list of tuples of (alias, query) pairs, store the alias as a field on the query like everything else and use a list of queries directly. --- debug_toolbar/panels/sql/panel.py | 12 +- tests/panels/test_sql.py | 190 ++++++++++++++---------------- 2 files changed, 94 insertions(+), 108 deletions(-) diff --git a/debug_toolbar/panels/sql/panel.py b/debug_toolbar/panels/sql/panel.py index fd8a16289..d8099f25b 100644 --- a/debug_toolbar/panels/sql/panel.py +++ b/debug_toolbar/panels/sql/panel.py @@ -111,8 +111,9 @@ def current_transaction_id(self, alias): trans_id = self.new_transaction_id(alias) return trans_id - def record(self, alias, **kwargs): - self._queries.append((alias, kwargs)) + def record(self, **kwargs): + self._queries.append(kwargs) + alias = kwargs["alias"] if alias not in self._databases: self._databases[alias] = { "time_spent": kwargs["duration"], @@ -193,7 +194,9 @@ def generate_stats(self, request, response): # the last query recorded for each DB alias last_by_alias = {} - for alias, query in self._queries: + for query in self._queries: + alias = query["alias"] + similar_query_groups[(alias, _similar_query_key(query))].append(query) duplicate_query_groups[(alias, _duplicate_query_key(query))].append( query @@ -214,7 +217,6 @@ def generate_stats(self, request, response): if trans_id is not None: query["in_trans"] = True - query["alias"] = alias if "iso_level" in query: query["iso_level"] = get_isolation_level_display( query["vendor"], query["iso_level"] @@ -263,7 +265,7 @@ def generate_stats(self, request, response): "databases": sorted( self._databases.items(), key=lambda x: -x[1]["time_spent"] ), - "queries": [q for a, q in self._queries], + "queries": self._queries, "sql_time": self._sql_time, } ) diff --git a/tests/panels/test_sql.py b/tests/panels/test_sql.py index 55e723a9a..f078820c8 100644 --- a/tests/panels/test_sql.py +++ b/tests/panels/test_sql.py @@ -44,13 +44,13 @@ def test_recording(self): # ensure query was logged self.assertEqual(len(self.panel._queries), 1) query = self.panel._queries[0] - self.assertEqual(query[0], "default") - self.assertTrue("sql" in query[1]) - self.assertTrue("duration" in query[1]) - self.assertTrue("stacktrace" in query[1]) + self.assertEqual(query["alias"], "default") + self.assertTrue("sql" in query) + self.assertTrue("duration" in query) + self.assertTrue("stacktrace" in query) # ensure the stacktrace is populated - self.assertTrue(len(query[1]["stacktrace"]) > 0) + self.assertTrue(len(query["stacktrace"]) > 0) @unittest.skipUnless( connection.vendor == "postgresql", "Test valid only on PostgreSQL" @@ -128,7 +128,7 @@ def test_generate_server_timing(self): query = self.panel._queries[0] expected_data = { - "sql_time": {"title": "SQL 1 queries", "value": query[1]["duration"]} + "sql_time": {"title": "SQL 1 queries", "value": query["duration"]} } self.assertEqual(self.panel.get_server_timing_stats(), expected_data) @@ -195,7 +195,7 @@ def test_param_conversion(self): expected_datetime = '["2017-12-22 16:07:01"]' self.assertEqual( - tuple(q[1]["params"] for q in self.panel._queries), + tuple(query["params"] for query in self.panel._queries), ( expected_bools, "[10, 1]", @@ -217,7 +217,7 @@ def test_json_param_conversion(self): # ensure query was logged self.assertEqual(len(self.panel._queries), 1) self.assertEqual( - self.panel._queries[0][1]["params"], + self.panel._queries[0]["params"], '["{\\"foo\\": \\"bar\\"}"]', ) @@ -237,7 +237,7 @@ def test_binary_param_force_text(self): self.assertIn( "SELECT * FROM" " tests_binary WHERE field =", - self.panel._queries[0][1]["sql"], + self.panel._queries[0]["sql"], ) @unittest.skipUnless(connection.vendor != "sqlite", "Test invalid for SQLite") @@ -288,7 +288,7 @@ def test_raw_query_param_conversion(self): self.assertEqual(len(self.panel._queries), 2) self.assertEqual( - tuple(q[1]["params"] for q in self.panel._queries), + tuple(query["params"] for query in self.panel._queries), ( '["Foo", true, false, "2017-12-22 16:07:01"]', " ".join( @@ -375,9 +375,9 @@ def test_execute_with_psycopg2_composed_sql(self): self.assertEqual(len(self.panel._queries), 1) query = self.panel._queries[0] - self.assertEqual(query[0], "default") - self.assertTrue("sql" in query[1]) - self.assertEqual(query[1]["sql"], 'select "username" from "auth_user"') + self.assertEqual(query["alias"], "default") + self.assertTrue("sql" in query) + self.assertEqual(query["sql"], 'select "username" from "auth_user"') def test_disable_stacktraces(self): self.assertEqual(len(self.panel._queries), 0) @@ -388,13 +388,13 @@ def test_disable_stacktraces(self): # ensure query was logged self.assertEqual(len(self.panel._queries), 1) query = self.panel._queries[0] - self.assertEqual(query[0], "default") - self.assertTrue("sql" in query[1]) - self.assertTrue("duration" in query[1]) - self.assertTrue("stacktrace" in query[1]) + self.assertEqual(query["alias"], "default") + self.assertTrue("sql" in query) + self.assertTrue("duration" in query) + self.assertTrue("stacktrace" in query) # ensure the stacktrace is empty - self.assertEqual([], query[1]["stacktrace"]) + self.assertEqual([], query["stacktrace"]) @override_settings( DEBUG=True, @@ -418,13 +418,13 @@ def test_regression_infinite_recursion(self): # template is loaded and basic.html extends base.html. self.assertEqual(len(self.panel._queries), 2) query = self.panel._queries[0] - self.assertEqual(query[0], "default") - self.assertTrue("sql" in query[1]) - self.assertTrue("duration" in query[1]) - self.assertTrue("stacktrace" in query[1]) + self.assertEqual(query["alias"], "default") + self.assertTrue("sql" in query) + self.assertTrue("duration" in query) + self.assertTrue("stacktrace" in query) # ensure the stacktrace is populated - self.assertTrue(len(query[1]["stacktrace"]) > 0) + self.assertTrue(len(query["stacktrace"]) > 0) @override_settings( DEBUG_TOOLBAR_CONFIG={"PRETTIFY_SQL": True}, @@ -439,7 +439,7 @@ def test_prettify_sql(self): response = self.panel.process_request(self.request) self.panel.generate_stats(self.request, response) - pretty_sql = self.panel._queries[-1][1]["sql"] + pretty_sql = self.panel._queries[-1]["sql"] self.assertEqual(len(self.panel._queries), 1) # Reset the queries @@ -450,7 +450,7 @@ def test_prettify_sql(self): response = self.panel.process_request(self.request) self.panel.generate_stats(self.request, response) self.assertEqual(len(self.panel._queries), 1) - self.assertNotEqual(pretty_sql, self.panel._queries[-1][1]["sql"]) + self.assertNotEqual(pretty_sql, self.panel._queries[-1]["sql"]) self.panel._queries = [] # Run it again, but with prettyify back on. @@ -461,7 +461,7 @@ def test_prettify_sql(self): response = self.panel.process_request(self.request) self.panel.generate_stats(self.request, response) self.assertEqual(len(self.panel._queries), 1) - self.assertEqual(pretty_sql, self.panel._queries[-1][1]["sql"]) + self.assertEqual(pretty_sql, self.panel._queries[-1]["sql"]) @override_settings( DEBUG=True, @@ -479,7 +479,7 @@ def test_flat_template_information(self): self.assertEqual(len(self.panel._queries), 1) query = self.panel._queries[0] - template_info = query[1]["template_info"] + template_info = query["template_info"] template_name = os.path.basename(template_info["name"]) self.assertEqual(template_name, "flat.html") self.assertEqual(template_info["context"][2]["content"].strip(), "{{ users }}") @@ -501,7 +501,7 @@ def test_nested_template_information(self): self.assertEqual(len(self.panel._queries), 1) query = self.panel._queries[0] - template_info = query[1]["template_info"] + template_info = query["template_info"] template_name = os.path.basename(template_info["name"]) self.assertEqual(template_name, "included.html") self.assertEqual(template_info["context"][0]["content"].strip(), "{{ users }}") @@ -524,45 +524,37 @@ def test_similar_and_duplicate_grouping(self): queries = self.panel._queries query = queries[0] - self.assertEqual(query[1]["similar_count"], 3) - self.assertEqual(query[1]["duplicate_count"], 2) + self.assertEqual(query["similar_count"], 3) + self.assertEqual(query["duplicate_count"], 2) query = queries[1] - self.assertEqual(query[1]["similar_count"], 3) - self.assertEqual(query[1]["duplicate_count"], 2) + self.assertEqual(query["similar_count"], 3) + self.assertEqual(query["duplicate_count"], 2) query = queries[2] - self.assertEqual(query[1]["similar_count"], 3) - self.assertTrue("duplicate_count" not in query[1]) + self.assertEqual(query["similar_count"], 3) + self.assertTrue("duplicate_count" not in query) query = queries[3] - self.assertEqual(query[1]["similar_count"], 2) - self.assertTrue("duplicate_count" not in query[1]) + self.assertEqual(query["similar_count"], 2) + self.assertTrue("duplicate_count" not in query) query = queries[4] - self.assertEqual(query[1]["similar_count"], 2) - self.assertTrue("duplicate_count" not in query[1]) + self.assertEqual(query["similar_count"], 2) + self.assertTrue("duplicate_count" not in query) query = queries[5] - self.assertTrue("similar_count" not in query[1]) - self.assertTrue("duplicate_count" not in query[1]) + self.assertTrue("similar_count" not in query) + self.assertTrue("duplicate_count" not in query) - self.assertEqual(queries[0][1]["similar_color"], queries[1][1]["similar_color"]) - self.assertEqual(queries[0][1]["similar_color"], queries[2][1]["similar_color"]) - self.assertEqual( - queries[0][1]["duplicate_color"], queries[1][1]["duplicate_color"] - ) - self.assertNotEqual( - queries[0][1]["similar_color"], queries[0][1]["duplicate_color"] - ) + self.assertEqual(queries[0]["similar_color"], queries[1]["similar_color"]) + self.assertEqual(queries[0]["similar_color"], queries[2]["similar_color"]) + self.assertEqual(queries[0]["duplicate_color"], queries[1]["duplicate_color"]) + self.assertNotEqual(queries[0]["similar_color"], queries[0]["duplicate_color"]) - self.assertEqual(queries[3][1]["similar_color"], queries[4][1]["similar_color"]) - self.assertNotEqual( - queries[0][1]["similar_color"], queries[3][1]["similar_color"] - ) - self.assertNotEqual( - queries[0][1]["duplicate_color"], queries[3][1]["similar_color"] - ) + self.assertEqual(queries[3]["similar_color"], queries[4]["similar_color"]) + self.assertNotEqual(queries[0]["similar_color"], queries[3]["similar_color"]) + self.assertNotEqual(queries[0]["duplicate_color"], queries[3]["similar_color"]) class SQLPanelMultiDBTestCase(BaseMultiDBTestCase): @@ -580,10 +572,10 @@ def test_aliases(self): self.assertTrue(self.panel._queries) query = self.panel._queries[0] - self.assertEqual(query[0], "default") + self.assertEqual(query["alias"], "default") query = self.panel._queries[-1] - self.assertEqual(query[0], "replica") + self.assertEqual(query["alias"], "replica") def test_transaction_status(self): """ @@ -614,63 +606,55 @@ def test_transaction_status(self): self.assertEqual(len(self.panel._queries), 6) query = self.panel._queries[0] - self.assertEqual(query[0], "default") - self.assertIsNotNone(query[1]["trans_id"]) - self.assertTrue(query[1]["starts_trans"]) - self.assertTrue(query[1]["in_trans"]) - self.assertFalse("end_trans" in query[1]) + self.assertEqual(query["alias"], "default") + self.assertIsNotNone(query["trans_id"]) + self.assertTrue(query["starts_trans"]) + self.assertTrue(query["in_trans"]) + self.assertFalse("end_trans" in query) query = self.panel._queries[-1] - self.assertEqual(query[0], "replica") - self.assertIsNone(query[1]["trans_id"]) - self.assertFalse("starts_trans" in query[1]) - self.assertFalse("in_trans" in query[1]) - self.assertFalse("end_trans" in query[1]) + self.assertEqual(query["alias"], "replica") + self.assertIsNone(query["trans_id"]) + self.assertFalse("starts_trans" in query) + self.assertFalse("in_trans" in query) + self.assertFalse("end_trans" in query) query = self.panel._queries[2] - self.assertEqual(query[0], "default") - self.assertIsNotNone(query[1]["trans_id"]) - self.assertEqual( - query[1]["trans_id"], self.panel._queries[0][1]["trans_id"] - ) - self.assertFalse("starts_trans" in query[1]) - self.assertTrue(query[1]["in_trans"]) - self.assertTrue(query[1]["ends_trans"]) + self.assertEqual(query["alias"], "default") + self.assertIsNotNone(query["trans_id"]) + self.assertEqual(query["trans_id"], self.panel._queries[0]["trans_id"]) + self.assertFalse("starts_trans" in query) + self.assertTrue(query["in_trans"]) + self.assertTrue(query["ends_trans"]) query = self.panel._queries[3] - self.assertEqual(query[0], "replica") - self.assertIsNotNone(query[1]["trans_id"]) - self.assertNotEqual( - query[1]["trans_id"], self.panel._queries[0][1]["trans_id"] - ) - self.assertTrue(query[1]["starts_trans"]) - self.assertTrue(query[1]["in_trans"]) - self.assertTrue(query[1]["ends_trans"]) + self.assertEqual(query["alias"], "replica") + self.assertIsNotNone(query["trans_id"]) + self.assertNotEqual(query["trans_id"], self.panel._queries[0]["trans_id"]) + self.assertTrue(query["starts_trans"]) + self.assertTrue(query["in_trans"]) + self.assertTrue(query["ends_trans"]) query = self.panel._queries[4] - self.assertEqual(query[0], "default") - self.assertIsNotNone(query[1]["trans_id"]) - self.assertNotEqual( - query[1]["trans_id"], self.panel._queries[0][1]["trans_id"] - ) - self.assertNotEqual( - query[1]["trans_id"], self.panel._queries[3][1]["trans_id"] - ) - self.assertTrue(query[1]["starts_trans"]) - self.assertTrue(query[1]["in_trans"]) - self.assertTrue(query[1]["ends_trans"]) + self.assertEqual(query["alias"], "default") + self.assertIsNotNone(query["trans_id"]) + self.assertNotEqual(query["trans_id"], self.panel._queries[0]["trans_id"]) + self.assertNotEqual(query["trans_id"], self.panel._queries[3]["trans_id"]) + self.assertTrue(query["starts_trans"]) + self.assertTrue(query["in_trans"]) + self.assertTrue(query["ends_trans"]) query = self.panel._queries[5] - self.assertEqual(query[0], "replica") - self.assertIsNone(query[1]["trans_id"]) - self.assertFalse("starts_trans" in query[1]) - self.assertFalse("in_trans" in query[1]) - self.assertFalse("end_trans" in query[1]) + self.assertEqual(query["alias"], "replica") + self.assertIsNone(query["trans_id"]) + self.assertFalse("starts_trans" in query) + self.assertFalse("in_trans" in query) + self.assertFalse("end_trans" in query) else: # Ensure that nothing was recorded for other database engines. self.assertTrue(self.panel._queries) for query in self.panel._queries: - self.assertFalse("trans_id" in query[1]) - self.assertFalse("starts_trans" in query[1]) - self.assertFalse("in_trans" in query[1]) - self.assertFalse("end_trans" in query[1]) + self.assertFalse("trans_id" in query) + self.assertFalse("starts_trans" in query) + self.assertFalse("in_trans" in query) + self.assertFalse("end_trans" in query) From ff552eae0cdfe8fc3e256a445cfbd0333375f1be Mon Sep 17 00:00:00 2001 From: Daniel Harding Date: Tue, 31 May 2022 14:35:55 +0300 Subject: [PATCH 52/60] Tweak code structure --- debug_toolbar/utils.py | 21 ++++++++++----------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/debug_toolbar/utils.py b/debug_toolbar/utils.py index 32aa27420..87d7b73fb 100644 --- a/debug_toolbar/utils.py +++ b/debug_toolbar/utils.py @@ -308,18 +308,17 @@ def get_stack_trace(self, *, excluded_modules=None, include_locals=False, depth= def get_stack_trace(*, depth=1): config = dt_settings.get_config() - if config["ENABLE_STACKTRACES"]: - stack_trace_recorder = getattr(_local_data, "stack_trace_recorder", None) - if stack_trace_recorder is None: - stack_trace_recorder = _StackTraceRecorder() - _local_data.stack_trace_recorder = stack_trace_recorder - return stack_trace_recorder.get_stack_trace( - excluded_modules=config["HIDE_IN_STACKTRACES"], - include_locals=config["ENABLE_STACKTRACES_LOCALS"], - depth=depth, - ) - else: + if not config["ENABLE_STACKTRACES"]: return [] + stack_trace_recorder = getattr(_local_data, "stack_trace_recorder", None) + if stack_trace_recorder is None: + stack_trace_recorder = _StackTraceRecorder() + _local_data.stack_trace_recorder = stack_trace_recorder + return stack_trace_recorder.get_stack_trace( + excluded_modules=config["HIDE_IN_STACKTRACES"], + include_locals=config["ENABLE_STACKTRACES_LOCALS"], + depth=depth, + ) def clear_stack_trace_caches(): From 8709f0fe738be09c612a1bae2fd0137cae4e377c Mon Sep 17 00:00:00 2001 From: Daniel Harding Date: Tue, 31 May 2022 14:33:01 +0300 Subject: [PATCH 53/60] Tweak the get_stack_trace() API Rename the `depth` argument to `skip` and change the semantics so that now `skip=0` has the same meaning as `depth=1`. --- debug_toolbar/utils.py | 31 ++++++++++++++++++++++++------- tests/test_utils.py | 18 ++++++++++++++++++ 2 files changed, 42 insertions(+), 7 deletions(-) diff --git a/debug_toolbar/utils.py b/debug_toolbar/utils.py index 87d7b73fb..bd74e6eed 100644 --- a/debug_toolbar/utils.py +++ b/debug_toolbar/utils.py @@ -246,11 +246,12 @@ def get_stack(context=1): return framelist -def _stack_frames(depth=1): +def _stack_frames(*, skip=0): + skip += 1 # Skip the frame for this generator. frame = inspect.currentframe() while frame is not None: - if depth > 0: - depth -= 1 + if skip > 0: + skip -= 1 else: yield frame frame = frame.f_back @@ -279,9 +280,10 @@ def get_source_file(self, frame): return value - def get_stack_trace(self, *, excluded_modules=None, include_locals=False, depth=1): + def get_stack_trace(self, *, excluded_modules=None, include_locals=False, skip=0): trace = [] - for frame in _stack_frames(depth=depth + 1): + skip += 1 # Skip the frame for this method. + for frame in _stack_frames(skip=skip): if _is_excluded_frame(frame, excluded_modules): continue @@ -306,10 +308,25 @@ def get_stack_trace(self, *, excluded_modules=None, include_locals=False, depth= return trace -def get_stack_trace(*, depth=1): +def get_stack_trace(*, skip=0): + """ + Return a processed stack trace for the current call stack. + + If the ``ENABLE_STACKTRACES`` setting is False, return an empty :class:`list`. + Otherwise return a :class:`list` of processed stack frame tuples (file name, line + number, function name, source line, frame locals) for the current call stack. The + first entry in the list will be for the bottom of the stack and the last entry will + be for the top of the stack. + + ``skip`` is an :class:`int` indicating the number of stack frames above the frame + for this function to omit from the stack trace. The default value of ``0`` means + that the entry for the caller of this function will be the last entry in the + returned stack trace. + """ config = dt_settings.get_config() if not config["ENABLE_STACKTRACES"]: return [] + skip += 1 # Skip the frame for this function. stack_trace_recorder = getattr(_local_data, "stack_trace_recorder", None) if stack_trace_recorder is None: stack_trace_recorder = _StackTraceRecorder() @@ -317,7 +334,7 @@ def get_stack_trace(*, depth=1): return stack_trace_recorder.get_stack_trace( excluded_modules=config["HIDE_IN_STACKTRACES"], include_locals=config["ENABLE_STACKTRACES_LOCALS"], - depth=depth, + skip=skip, ) diff --git a/tests/test_utils.py b/tests/test_utils.py index d884b050a..31a67a6c1 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,8 +1,12 @@ import unittest +from django.test import override_settings + +import debug_toolbar.utils from debug_toolbar.utils import ( get_name_from_obj, get_stack, + get_stack_trace, render_stacktrace, tidy_stacktrace, ) @@ -55,6 +59,20 @@ def test_importlib_path_issue_1612(self): class StackTraceTestCase(unittest.TestCase): + @override_settings(DEBUG_TOOLBAR_CONFIG={"HIDE_IN_STACKTRACES": []}) + def test_get_stack_trace_skip(self): + stack_trace = get_stack_trace(skip=-1) + self.assertTrue(len(stack_trace) > 2) + self.assertEqual(stack_trace[-1][0], debug_toolbar.utils.__file__) + self.assertEqual(stack_trace[-1][2], "get_stack_trace") + self.assertEqual(stack_trace[-2][0], __file__) + self.assertEqual(stack_trace[-2][2], "test_get_stack_trace_skip") + + stack_trace = get_stack_trace() + self.assertTrue(len(stack_trace) > 1) + self.assertEqual(stack_trace[-1][0], __file__) + self.assertEqual(stack_trace[-1][2], "test_get_stack_trace_skip") + def test_deprecated_functions(self): with self.assertWarns(DeprecationWarning): stack = get_stack() From e59a8ec355e328f7ab1a1685183e8489b9453f6f Mon Sep 17 00:00:00 2001 From: Daniel Harding Date: Tue, 31 May 2022 15:50:36 +0300 Subject: [PATCH 54/60] Exclude tracking frames from stack traces Ensure that stack traces do not include any of the cache or SQL panel tracking infrastructure even if HIDE_IN_STACKTRACES is empty. --- debug_toolbar/panels/cache.py | 2 +- debug_toolbar/panels/sql/tracking.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/debug_toolbar/panels/cache.py b/debug_toolbar/panels/cache.py index 4ca36a387..f5ceea513 100644 --- a/debug_toolbar/panels/cache.py +++ b/debug_toolbar/panels/cache.py @@ -135,7 +135,7 @@ def _record_call(self, cache, name, original_method, args, kwargs): return_value=value, args=args, kwargs=kwargs, - trace=get_stack_trace(), + trace=get_stack_trace(skip=2), template_info=get_template_info(), backend=cache, ) diff --git a/debug_toolbar/panels/sql/tracking.py b/debug_toolbar/panels/sql/tracking.py index 8a15977de..b166e592d 100644 --- a/debug_toolbar/panels/sql/tracking.py +++ b/debug_toolbar/panels/sql/tracking.py @@ -176,7 +176,7 @@ def _record(self, method, sql, params): "raw_sql": sql, "params": _params, "raw_params": params, - "stacktrace": get_stack_trace(), + "stacktrace": get_stack_trace(skip=2), "start_time": start_time, "stop_time": stop_time, "is_slow": duration > dt_settings.get_config()["SQL_WARNING_THRESHOLD"], From 0f1d9d93c36c6da8073ad5e00ea119e6d6f2d3ef Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 13 Jun 2022 18:09:21 +0000 Subject: [PATCH 55/60] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pre-commit/pre-commit-hooks: v4.2.0 → v4.3.0](https://github.com/pre-commit/pre-commit-hooks/compare/v4.2.0...v4.3.0) - [github.com/asottile/pyupgrade: v2.32.1 → v2.34.0](https://github.com/asottile/pyupgrade/compare/v2.32.1...v2.34.0) - [github.com/pre-commit/mirrors-eslint: v8.16.0 → v8.17.0](https://github.com/pre-commit/mirrors-eslint/compare/v8.16.0...v8.17.0) --- .pre-commit-config.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 1fcfa5a1d..e9a94ae50 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.2.0 + rev: v4.3.0 hooks: - id: check-yaml - id: end-of-file-fixer @@ -15,7 +15,7 @@ repos: hooks: - id: doc8 - repo: https://github.com/asottile/pyupgrade - rev: v2.32.1 + rev: v2.34.0 hooks: - id: pyupgrade args: [--py37-plus] @@ -43,7 +43,7 @@ repos: - id: prettier types_or: [javascript, css] - repo: https://github.com/pre-commit/mirrors-eslint - rev: v8.16.0 + rev: v8.17.0 hooks: - id: eslint files: \.js?$ From f6401921d0259ecea2d9e2e9867f2fde9cd0c89a Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 20 Jun 2022 17:47:14 +0000 Subject: [PATCH 56/60] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pre-commit/mirrors-prettier: v2.6.2 → v2.7.1](https://github.com/pre-commit/mirrors-prettier/compare/v2.6.2...v2.7.1) - [github.com/pre-commit/mirrors-eslint: v8.17.0 → v8.18.0](https://github.com/pre-commit/mirrors-eslint/compare/v8.17.0...v8.18.0) --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index e9a94ae50..2a0e179ad 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -38,12 +38,12 @@ repos: - id: rst-backticks - id: rst-directive-colons - repo: https://github.com/pre-commit/mirrors-prettier - rev: v2.6.2 + rev: v2.7.1 hooks: - id: prettier types_or: [javascript, css] - repo: https://github.com/pre-commit/mirrors-eslint - rev: v8.17.0 + rev: v8.18.0 hooks: - id: eslint files: \.js?$ From f01ee7bff17cc55f7e105d9c363d9b2ab0343773 Mon Sep 17 00:00:00 2001 From: Matthias Kestenholz Date: Thu, 23 Jun 2022 18:45:54 +0200 Subject: [PATCH 57/60] Replace Django 4.1a1 with Django 4.1b1 in the CI matrix --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 212e31f39..5485a2742 100644 --- a/tox.ini +++ b/tox.ini @@ -9,7 +9,7 @@ envlist = deps = dj32: django~=3.2.9 dj40: django~=4.0.0 - dj41: django>=4.1a1,<4.2 + dj41: django>=4.1b1,<4.2 postgresql: psycopg2-binary postgis: psycopg2-binary mysql: mysqlclient From 1e6f85b001876de06d26f7d7a7947b2c232edc14 Mon Sep 17 00:00:00 2001 From: Matthias Kestenholz Date: Thu, 23 Jun 2022 18:48:50 +0200 Subject: [PATCH 58/60] Add a few missing entries to the changelog --- docs/changes.rst | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/docs/changes.rst b/docs/changes.rst index 3b6c68065..ccad6b586 100644 --- a/docs/changes.rst +++ b/docs/changes.rst @@ -3,7 +3,7 @@ Change log * Properly implemented tracking and display of PostgreSQL transactions. * Removed third party panels which have been archived on GitHub. -* Added Django 4.1a1 to the CI matrix. +* Added Django 4.1b1 to the CI matrix. * Stopped crashing when ``request.GET`` and ``request.POST`` are neither dictionaries nor ``QueryDict`` instances. Using anything but ``QueryDict`` instances isn't a valid use of Django but, again, django-debug-toolbar @@ -23,6 +23,10 @@ Change log * Added a new mechanism for capturing stack traces which includes per-request caching to reduce expensive file system operations. Updated the cache and SQL panels to record stack traces using this new mechanism. +* Changed the ``docs`` tox environment to allow passing posargs. This allows + e.g. building a HTML version of the docs using ``tox -e docs html``. +* Stayed on top of pre-commit hook updates. +* Replaced ``OrderedDict`` by ``dict`` where possible. Deprecated features ~~~~~~~~~~~~~~~~~~~ From 0441b85f3225ac630c05d416775b6676240b724a Mon Sep 17 00:00:00 2001 From: Matthias Kestenholz Date: Thu, 23 Jun 2022 18:54:44 +0200 Subject: [PATCH 59/60] Avoid the spelling problem detection --- docs/changes.rst | 5 +++-- docs/spelling_wordlist.txt | 1 + 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/docs/changes.rst b/docs/changes.rst index ccad6b586..cb8fa32da 100644 --- a/docs/changes.rst +++ b/docs/changes.rst @@ -23,8 +23,9 @@ Change log * Added a new mechanism for capturing stack traces which includes per-request caching to reduce expensive file system operations. Updated the cache and SQL panels to record stack traces using this new mechanism. -* Changed the ``docs`` tox environment to allow passing posargs. This allows - e.g. building a HTML version of the docs using ``tox -e docs html``. +* Changed the ``docs`` tox environment to allow passing positional arguments. + This allows e.g. building a HTML version of the docs using ``tox -e docs + html``. * Stayed on top of pre-commit hook updates. * Replaced ``OrderedDict`` by ``dict`` where possible. diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index e8933b1dd..0ca7eb8b0 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -34,6 +34,7 @@ spooler stacktrace stacktraces timeline +tox Transifex unhashable uWSGI From e4f0e580d1cd1c81de0cc6ce5e8f39ac3eaf1a28 Mon Sep 17 00:00:00 2001 From: Matthias Kestenholz Date: Thu, 23 Jun 2022 18:49:16 +0200 Subject: [PATCH 60/60] django-debug-toolbar 3.5 --- README.rst | 2 +- debug_toolbar/__init__.py | 2 +- docs/changes.rst | 3 +++ docs/conf.py | 2 +- setup.cfg | 2 +- 5 files changed, 7 insertions(+), 4 deletions(-) diff --git a/README.rst b/README.rst index ae062cb0d..2c1ba9730 100644 --- a/README.rst +++ b/README.rst @@ -44,7 +44,7 @@ Here's a screenshot of the toolbar in action: In addition to the built-in panels, a number of third-party panels are contributed by the community. -The current stable version of the Debug Toolbar is 3.4.0. It works on +The current stable version of the Debug Toolbar is 3.5.0. It works on Django ≥ 3.2. Documentation, including installation and configuration instructions, is diff --git a/debug_toolbar/__init__.py b/debug_toolbar/__init__.py index e085bea73..c9834b8e3 100644 --- a/debug_toolbar/__init__.py +++ b/debug_toolbar/__init__.py @@ -4,7 +4,7 @@ # Do not use pkg_resources to find the version but set it here directly! # see issue #1446 -VERSION = "3.4.0" +VERSION = "3.5.0" # Code that discovers files or modules in INSTALLED_APPS imports this module. urls = "debug_toolbar.urls", APP_NAME diff --git a/docs/changes.rst b/docs/changes.rst index cb8fa32da..25ef409fc 100644 --- a/docs/changes.rst +++ b/docs/changes.rst @@ -1,6 +1,9 @@ Change log ========== +3.5.0 (2022-06-23) +------------------ + * Properly implemented tracking and display of PostgreSQL transactions. * Removed third party panels which have been archived on GitHub. * Added Django 4.1b1 to the CI matrix. diff --git a/docs/conf.py b/docs/conf.py index 6bf4770dc..374acd1d2 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -25,7 +25,7 @@ copyright = copyright.format(datetime.date.today().year) # The full version, including alpha/beta/rc tags -release = "3.4.0" +release = "3.5.0" # -- General configuration --------------------------------------------------- diff --git a/setup.cfg b/setup.cfg index 7f720eded..2fe12e38d 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = django-debug-toolbar -version = 3.4.0 +version = 3.5.0 description = A configurable set of panels that display various debug information about the current request/response. long_description = file: README.rst long_description_content_type = text/x-rst