Björn Tillenius has proposed merging ~bjornt/maas:perf-test-no-custom-decorator into maas:master with ~bjornt/maas:perf-test-dont-measure-setup as a prerequisite.
Commit message: Remove the @perf_test decorator. It's not needed, and it was somewhat broken. It reverted back to a savepoint, unless commit_transaction was true. In that case it simply removed the savepoint. I'm not quite sure what the intent was there. If it was to simulate a commit, it was in the wrong place, since it was outside the block that measures the duration. The transaction is rolled back elsewhere for each tests, so no need for a custom decorator. The decorator also had logic for skipping the tests if django wasn't loaded, but I don' Requested reviews: MAAS Maintainers (maas-maintainers) For more details, see: https://code.launchpad.net/~bjornt/maas/+git/maas/+merge/433521 Remove the @perf_test decorator. It's not needed, and it was somewhat broken. It reverted back to a savepoint, unless commit_transaction was true. In that case it simply removed the savepoint. I'm not quite sure what the intent was there. If it was to simulate a commit, it was in the wrong place, since it was outside the block that measures the duration. The transaction is rolled back elsewhere for each tests, so no need for a custom decorator. The decorator also had logic for skipping the tests if django wasn't loaded, but I don' -- Your team MAAS Committers is subscribed to branch maas:master.
diff --git a/src/maasperf/tests/cli/test_machines.py b/src/maasperf/tests/cli/test_machines.py index 7d7c01b..95d09bc 100644 --- a/src/maasperf/tests/cli/test_machines.py +++ b/src/maasperf/tests/cli/test_machines.py @@ -4,14 +4,14 @@ from contextlib import contextmanager from httplib2 import Response +import pytest from maascli import api from maascli.config import ProfileConfig from maascli.parser import get_deepest_subparser, prepare_parser -from maastesting.perftest import perf_test -@perf_test() [email protected]_db def test_perf_list_machines_CLI( perf, cli_profile, monkeypatch, cli_machines_api_response ): diff --git a/src/maasperf/tests/maasserver/api/test_machines.py b/src/maasperf/tests/maasserver/api/test_machines.py index c2acf71..a823f68 100644 --- a/src/maasperf/tests/maasserver/api/test_machines.py +++ b/src/maasperf/tests/maasserver/api/test_machines.py @@ -4,10 +4,10 @@ from django.urls import reverse from piston3.emitters import Emitter from piston3.handler import typemapper +import pytest from maasserver.api.machines import MachinesHandler from maastesting.http import make_HttpRequest -from maastesting.perftest import perf_test class DummyEmitter(Emitter): @@ -15,7 +15,7 @@ class DummyEmitter(Emitter): self.construct() -@perf_test() [email protected]_db def test_perf_list_machines_MachineHandler_api_endpoint( perf, admin_api_client ): @@ -23,7 +23,7 @@ def test_perf_list_machines_MachineHandler_api_endpoint( admin_api_client.get(reverse("machines_handler")) -@perf_test(db_only=True) [email protected]_db def test_perf_list_machines_MachinesHander_direct_call(perf, admin): handler = MachinesHandler() request = make_HttpRequest() @@ -40,7 +40,7 @@ def test_perf_list_machines_MachinesHander_direct_call(perf, admin): emitter.render(request) -@perf_test(db_only=True) [email protected]_db def test_perf_list_machines_MachinesHander_only_objects(perf, admin): handler = MachinesHandler() request = make_HttpRequest() diff --git a/src/maasperf/tests/maasserver/models/test_machine.py b/src/maasperf/tests/maasserver/models/test_machine.py index 4a183c1..2273294 100644 --- a/src/maasperf/tests/maasserver/models/test_machine.py +++ b/src/maasperf/tests/maasserver/models/test_machine.py @@ -1,19 +1,22 @@ # Copyright 2022 Canonical Ltd. This software is licensed under the # GNU Affero General Public License version 3 (see the file LICENSE). +from django.db import transaction +import pytest + from maasserver.models import Machine -from maastesting.perftest import perf_test -@perf_test(commit_transaction=True, db_only=True) [email protected]_db def test_perf_create_machines(perf, factory): # TODO use create machines script with perf.record("test_perf_create_machines"): - for _ in range(30): - factory.make_Machine() + with transaction.atomic(): + for _ in range(30): + factory.make_Machine() -@perf_test(db_only=True) [email protected]_db def test_perf_list_machines(perf): with perf.record("test_perf_list_machines"): list(Machine.objects.all()) diff --git a/src/maasperf/tests/maasserver/websockets/test_machines.py b/src/maasperf/tests/maasserver/websockets/test_machines.py index a642214..a182a22 100644 --- a/src/maasperf/tests/maasserver/websockets/test_machines.py +++ b/src/maasperf/tests/maasserver/websockets/test_machines.py @@ -1,12 +1,13 @@ # Copyright 2022 Canonical Ltd. This software is licensed under the # GNU Affero General Public License version 3 (see the file LICENSE). +import pytest + from maasserver.models import Machine from maasserver.websockets.handlers.machine import MachineHandler -from maastesting.perftest import perf_test -@perf_test(db_only=True) [email protected]_db def test_perf_list_machines_Websocket_endpoint(perf, admin): # This should test the websocket calls that are used to load # the machine listing page on the initial page load. @@ -26,7 +27,7 @@ def test_perf_list_machines_Websocket_endpoint(perf, admin): ws_handler.list(params) -@perf_test(db_only=True) [email protected]_db def test_perf_list_machines_Websocket_endpoint_all(perf, admin): # How long would it take to list all the machines using the # websocket without any pagination. diff --git a/src/maastesting/perftest.py b/src/maastesting/perftest.py index 435b854..d10e3dc 100644 --- a/src/maastesting/perftest.py +++ b/src/maastesting/perftest.py @@ -6,7 +6,6 @@ from contextlib import contextmanager, ExitStack from cProfile import Profile -from functools import wraps import json import os import random @@ -15,7 +14,6 @@ import time from pytest import fixture from pytest import main as pytest_main -from pytest import mark, skip from maastesting.fixtures import MAASDataFixture, MAASRootFixture @@ -89,36 +87,6 @@ class PerfTester: json.dump(self.results, output) -def perf_test(commit_transaction=False, db_only=False): - def inner(fn): - @wraps(fn) - @mark.django_db - def wrapper(*args, **kwargs): - from django.db import transaction - - django_loaded = ( - os.environ.get("DJANGO_SETTINGS_MODULE") is not None - ) - - if db_only and not django_loaded: - skip("skipping database test") - - save_point = None - if django_loaded: - save_point = transaction.savepoint() - - fn(*args, **kwargs) - - if save_point and commit_transaction: - transaction.savepoint_commit(save_point) - elif save_point: - transaction.savepoint_rollback(save_point) - - return wrapper - - return inner - - def run_perf_tests(env): rand_seed = os.environ.get("MAAS_RAND_SEED") random.seed(rand_seed)
-- Mailing list: https://launchpad.net/~sts-sponsors Post to : [email protected] Unsubscribe : https://launchpad.net/~sts-sponsors More help : https://help.launchpad.net/ListHelp

