|
@@ -10,13 +10,14 @@ from sentry.seer.utils import BreakpointData
|
|
|
from sentry.sentry_metrics.use_case_id_registry import UseCaseID
|
|
|
from sentry.snuba.discover import zerofill
|
|
|
from sentry.snuba.metrics.naming_layer.mri import TransactionMRI
|
|
|
-from sentry.statistical_detectors.detector import DetectorPayload
|
|
|
+from sentry.statistical_detectors.detector import DetectorPayload, TrendType
|
|
|
from sentry.tasks.statistical_detectors import (
|
|
|
detect_function_change_points,
|
|
|
detect_function_trends,
|
|
|
detect_transaction_change_points,
|
|
|
detect_transaction_trends,
|
|
|
emit_function_regression_issue,
|
|
|
+ limit_regressions_by_project,
|
|
|
query_functions,
|
|
|
query_transactions,
|
|
|
query_transactions_timeseries,
|
|
@@ -254,7 +255,6 @@ def test_detect_transaction_trends(
|
|
|
query_transactions,
|
|
|
timestamp,
|
|
|
project,
|
|
|
- organization,
|
|
|
):
|
|
|
n = 20
|
|
|
timestamps = [timestamp - timedelta(hours=n - i) for i in range(n)]
|
|
@@ -279,10 +279,126 @@ def test_detect_transaction_trends(
|
|
|
}
|
|
|
), TaskRunner():
|
|
|
for ts in timestamps:
|
|
|
- detect_transaction_trends([organization.id], [project.id], ts)
|
|
|
+ detect_transaction_trends([project.organization.id], [project.id], ts)
|
|
|
assert detect_transaction_change_points.apply_async.called
|
|
|
|
|
|
|
|
|
+@pytest.mark.parametrize(
|
|
|
+ ["ratelimit", "expected_calls"],
|
|
|
+ [(-1, 3), (0, 0), (1, 1), (2, 2), (3, 3)],
|
|
|
+)
|
|
|
+@mock.patch("sentry.tasks.statistical_detectors.query_transactions")
|
|
|
+@mock.patch("sentry.tasks.statistical_detectors.detect_transaction_change_points")
|
|
|
+@django_db_all
|
|
|
+def test_detect_transaction_trends_ratelimit(
|
|
|
+ detect_transaction_change_points,
|
|
|
+ query_transactions,
|
|
|
+ ratelimit,
|
|
|
+ expected_calls,
|
|
|
+ timestamp,
|
|
|
+ organization,
|
|
|
+ project,
|
|
|
+):
|
|
|
+ n = 20
|
|
|
+ timestamps = [timestamp - timedelta(hours=n - i) for i in range(n)]
|
|
|
+
|
|
|
+ query_transactions.side_effect = [
|
|
|
+ [
|
|
|
+ DetectorPayload(
|
|
|
+ project_id=project.id,
|
|
|
+ group="/1",
|
|
|
+ count=100,
|
|
|
+ value=100 if i < n / 2 else 301,
|
|
|
+ timestamp=ts,
|
|
|
+ ),
|
|
|
+ DetectorPayload(
|
|
|
+ project_id=project.id,
|
|
|
+ group="/2",
|
|
|
+ count=100,
|
|
|
+ value=100 if i < n / 2 else 302,
|
|
|
+ timestamp=ts,
|
|
|
+ ),
|
|
|
+ DetectorPayload(
|
|
|
+ project_id=project.id,
|
|
|
+ group="/3",
|
|
|
+ count=100,
|
|
|
+ value=100 if i < n / 2 else 303,
|
|
|
+ timestamp=ts,
|
|
|
+ ),
|
|
|
+ ]
|
|
|
+ for i, ts in enumerate(timestamps)
|
|
|
+ ]
|
|
|
+
|
|
|
+ with override_options(
|
|
|
+ {
|
|
|
+ "statistical_detectors.enable": True,
|
|
|
+ "statistical_detectors.enable.projects.performance": [project.id],
|
|
|
+ "statistical_detectors.ratelimit.ema": ratelimit,
|
|
|
+ }
|
|
|
+ ), TaskRunner():
|
|
|
+ for ts in timestamps:
|
|
|
+ detect_transaction_trends([project.organization.id], [project.id], ts)
|
|
|
+
|
|
|
+ if expected_calls > 0:
|
|
|
+ detect_transaction_change_points.apply_async.assert_has_calls(
|
|
|
+ [
|
|
|
+ mock.call(
|
|
|
+ args=[
|
|
|
+ [(project.id, "/1"), (project.id, "/2"), (project.id, "/3")][
|
|
|
+ -expected_calls:
|
|
|
+ ],
|
|
|
+ timestamp + timedelta(hours=5),
|
|
|
+ ],
|
|
|
+ countdown=12 * 60 * 60,
|
|
|
+ ),
|
|
|
+ ],
|
|
|
+ )
|
|
|
+ assert detect_transaction_change_points.apply_async.call_count == 1
|
|
|
+ else:
|
|
|
+ assert detect_transaction_change_points.apply_async.call_count == 0
|
|
|
+
|
|
|
+
|
|
|
+@pytest.mark.parametrize(
|
|
|
+ ["ratelimit", "expected_idx"],
|
|
|
+ [
|
|
|
+ pytest.param(-1, 4, id="all"),
|
|
|
+ pytest.param(0, 0, id="zero per project"),
|
|
|
+ pytest.param(1, 2, id="one per project"),
|
|
|
+ pytest.param(2, 3, id="two per project"),
|
|
|
+ pytest.param(3, 4, id="three per project"),
|
|
|
+ ],
|
|
|
+)
|
|
|
+def test_limit_regressions_by_project(ratelimit, timestamp, expected_idx):
|
|
|
+ payloads = {
|
|
|
+ (project_id, group): DetectorPayload(
|
|
|
+ project_id=project_id,
|
|
|
+ group=f"{project_id}_{group}",
|
|
|
+ count=int(f"{project_id}_{group}"),
|
|
|
+ value=int(f"{project_id}_{group}"),
|
|
|
+ timestamp=timestamp,
|
|
|
+ )
|
|
|
+ for project_id in range(1, 4)
|
|
|
+ for group in range(1, project_id + 1)
|
|
|
+ }
|
|
|
+
|
|
|
+ def trends():
|
|
|
+ yield (None, 0, payloads[(1, 1)])
|
|
|
+ yield (TrendType.Improved, 0, payloads[(2, 1)])
|
|
|
+ yield (TrendType.Regressed, 0, payloads[(2, 2)])
|
|
|
+ yield (TrendType.Regressed, 0, payloads[(3, 1)])
|
|
|
+ yield (TrendType.Regressed, 1, payloads[(3, 2)])
|
|
|
+ yield (TrendType.Regressed, 2, payloads[(3, 3)])
|
|
|
+
|
|
|
+ expected_regressions = [
|
|
|
+ payloads[(2, 2)],
|
|
|
+ payloads[(3, 3)],
|
|
|
+ payloads[(3, 2)],
|
|
|
+ payloads[(3, 1)],
|
|
|
+ ][:expected_idx]
|
|
|
+ regressions = limit_regressions_by_project(trends(), ratelimit)
|
|
|
+ assert set(regressions) == set(expected_regressions)
|
|
|
+
|
|
|
+
|
|
|
@mock.patch("sentry.tasks.statistical_detectors.query_functions")
|
|
|
@mock.patch("sentry.tasks.statistical_detectors.detect_function_change_points")
|
|
|
@django_db_all
|
|
@@ -319,6 +435,78 @@ def test_detect_function_trends(
|
|
|
assert detect_function_change_points.apply_async.called
|
|
|
|
|
|
|
|
|
+@pytest.mark.parametrize(
|
|
|
+ ["ratelimit", "expected_calls"],
|
|
|
+ [(-1, 3), (0, 0), (1, 1), (2, 2), (3, 3)],
|
|
|
+)
|
|
|
+@mock.patch("sentry.tasks.statistical_detectors.query_functions")
|
|
|
+@mock.patch("sentry.tasks.statistical_detectors.detect_function_change_points")
|
|
|
+@django_db_all
|
|
|
+def test_detect_function_trends_ratelimit(
|
|
|
+ detect_function_change_points,
|
|
|
+ query_functions,
|
|
|
+ ratelimit,
|
|
|
+ expected_calls,
|
|
|
+ timestamp,
|
|
|
+ project,
|
|
|
+):
|
|
|
+ n = 20
|
|
|
+ timestamps = [timestamp - timedelta(hours=n - i) for i in range(n)]
|
|
|
+
|
|
|
+ query_functions.side_effect = [
|
|
|
+ [
|
|
|
+ DetectorPayload(
|
|
|
+ project_id=project.id,
|
|
|
+ group=1,
|
|
|
+ count=100,
|
|
|
+ value=100 if i < n / 2 else 301,
|
|
|
+ timestamp=ts,
|
|
|
+ ),
|
|
|
+ DetectorPayload(
|
|
|
+ project_id=project.id,
|
|
|
+ group=2,
|
|
|
+ count=100,
|
|
|
+ value=100 if i < n / 2 else 302,
|
|
|
+ timestamp=ts,
|
|
|
+ ),
|
|
|
+ DetectorPayload(
|
|
|
+ project_id=project.id,
|
|
|
+ group=3,
|
|
|
+ count=100,
|
|
|
+ value=100 if i < n / 2 else 303,
|
|
|
+ timestamp=ts,
|
|
|
+ ),
|
|
|
+ ]
|
|
|
+ for i, ts in enumerate(timestamps)
|
|
|
+ ]
|
|
|
+
|
|
|
+ with override_options(
|
|
|
+ {
|
|
|
+ "statistical_detectors.enable": True,
|
|
|
+ "statistical_detectors.enable.projects.profiling": [project.id],
|
|
|
+ "statistical_detectors.ratelimit.ema": ratelimit,
|
|
|
+ }
|
|
|
+ ), TaskRunner():
|
|
|
+ for ts in timestamps:
|
|
|
+ detect_function_trends([project.id], ts)
|
|
|
+
|
|
|
+ if expected_calls > 0:
|
|
|
+ detect_function_change_points.apply_async.assert_has_calls(
|
|
|
+ [
|
|
|
+ mock.call(
|
|
|
+ args=[
|
|
|
+ [(project.id, 1), (project.id, 2), (project.id, 3)][-expected_calls:],
|
|
|
+ timestamp + timedelta(hours=5),
|
|
|
+ ],
|
|
|
+ countdown=12 * 60 * 60,
|
|
|
+ ),
|
|
|
+ ],
|
|
|
+ )
|
|
|
+ assert detect_function_change_points.apply_async.call_count == 1
|
|
|
+ else:
|
|
|
+ assert detect_function_change_points.apply_async.call_count == 0
|
|
|
+
|
|
|
+
|
|
|
@mock.patch("sentry.tasks.statistical_detectors.emit_function_regression_issue")
|
|
|
@mock.patch("sentry.tasks.statistical_detectors.detect_breakpoints")
|
|
|
@mock.patch("sentry.tasks.statistical_detectors.raw_snql_query")
|