cases.py 89 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460
  1. from __future__ import annotations
  2. import responses
  3. import sentry_kafka_schemas
  4. from sentry.sentry_metrics.use_case_id_registry import REVERSE_METRIC_PATH_MAPPING, UseCaseID
  5. from sentry.utils.dates import to_timestamp
  6. __all__ = (
  7. "TestCase",
  8. "TransactionTestCase",
  9. "APITestCase",
  10. "TwoFactorAPITestCase",
  11. "AuthProviderTestCase",
  12. "RuleTestCase",
  13. "PermissionTestCase",
  14. "PluginTestCase",
  15. "CliTestCase",
  16. "AcceptanceTestCase",
  17. "IntegrationTestCase",
  18. "SnubaTestCase",
  19. "BaseMetricsTestCase",
  20. "BaseMetricsLayerTestCase",
  21. "BaseIncidentsTest",
  22. "IntegrationRepositoryTestCase",
  23. "ReleaseCommitPatchTest",
  24. "SetRefsTestCase",
  25. "OrganizationDashboardWidgetTestCase",
  26. "SCIMTestCase",
  27. "SCIMAzureTestCase",
  28. "MetricsEnhancedPerformanceTestCase",
  29. "MetricsAPIBaseTestCase",
  30. "OrganizationMetricMetaIntegrationTestCase",
  31. "ReplaysAcceptanceTestCase",
  32. "ReplaysSnubaTestCase",
  33. "MonitorTestCase",
  34. "MonitorIngestTestCase",
  35. )
  36. import hashlib
  37. import inspect
  38. import os.path
  39. import time
  40. from contextlib import contextmanager
  41. from datetime import datetime, timedelta
  42. from io import BytesIO
  43. from typing import Dict, List, Literal, Optional, Sequence, Union
  44. from unittest import mock
  45. from urllib.parse import urlencode
  46. from uuid import uuid4
  47. from zlib import compress
  48. import pytest
  49. import pytz
  50. import requests
  51. from click.testing import CliRunner
  52. from django.conf import settings
  53. from django.contrib.auth import login
  54. from django.contrib.auth.models import AnonymousUser
  55. from django.core import signing
  56. from django.core.cache import cache
  57. from django.db import DEFAULT_DB_ALIAS, connection, connections
  58. from django.db.migrations.executor import MigrationExecutor
  59. from django.http import HttpRequest
  60. from django.test import TestCase, TransactionTestCase, override_settings
  61. from django.test.utils import CaptureQueriesContext
  62. from django.urls import reverse
  63. from django.utils import timezone
  64. from django.utils.functional import cached_property
  65. from pkg_resources import iter_entry_points
  66. from rest_framework import status
  67. from rest_framework.test import APITestCase as BaseAPITestCase
  68. from sentry_relay.consts import SPAN_STATUS_NAME_TO_CODE
  69. from snuba_sdk import Granularity, Limit, Offset
  70. from snuba_sdk.conditions import BooleanCondition, Condition, ConditionGroup
  71. from sentry import auth, eventstore
  72. from sentry.auth.authenticators import TotpInterface
  73. from sentry.auth.providers.dummy import DummyProvider
  74. from sentry.auth.providers.saml2.activedirectory.apps import ACTIVE_DIRECTORY_PROVIDER_NAME
  75. from sentry.auth.superuser import COOKIE_DOMAIN as SU_COOKIE_DOMAIN
  76. from sentry.auth.superuser import COOKIE_NAME as SU_COOKIE_NAME
  77. from sentry.auth.superuser import COOKIE_PATH as SU_COOKIE_PATH
  78. from sentry.auth.superuser import COOKIE_SALT as SU_COOKIE_SALT
  79. from sentry.auth.superuser import COOKIE_SECURE as SU_COOKIE_SECURE
  80. from sentry.auth.superuser import ORG_ID as SU_ORG_ID
  81. from sentry.auth.superuser import Superuser
  82. from sentry.event_manager import EventManager
  83. from sentry.eventstore.models import Event
  84. from sentry.eventstream.snuba import SnubaEventStream
  85. from sentry.issues.grouptype import NoiseConfig, PerformanceNPlusOneGroupType
  86. from sentry.issues.ingest import send_issue_occurrence_to_eventstream
  87. from sentry.mail import mail_adapter
  88. from sentry.mediators.project_rules import Creator
  89. from sentry.models import ApiToken
  90. from sentry.models import AuthProvider as AuthProviderModel
  91. from sentry.models import (
  92. Commit,
  93. CommitAuthor,
  94. Dashboard,
  95. DashboardWidget,
  96. DashboardWidgetDisplayTypes,
  97. DashboardWidgetQuery,
  98. DeletedOrganization,
  99. Deploy,
  100. Environment,
  101. File,
  102. GroupMeta,
  103. Identity,
  104. IdentityProvider,
  105. IdentityStatus,
  106. NotificationSetting,
  107. Organization,
  108. Project,
  109. ProjectOption,
  110. Release,
  111. ReleaseCommit,
  112. Repository,
  113. RuleSource,
  114. UserEmail,
  115. UserOption,
  116. )
  117. from sentry.monitors.models import Monitor, MonitorEnvironment, MonitorType, ScheduleType
  118. from sentry.notifications.types import NotificationSettingOptionValues, NotificationSettingTypes
  119. from sentry.plugins.base import plugins
  120. from sentry.replays.models import ReplayRecordingSegment
  121. from sentry.search.events.constants import (
  122. METRIC_FRUSTRATED_TAG_VALUE,
  123. METRIC_SATISFACTION_TAG_KEY,
  124. METRIC_SATISFIED_TAG_VALUE,
  125. METRIC_TOLERATED_TAG_VALUE,
  126. METRICS_MAP,
  127. )
  128. from sentry.sentry_metrics import indexer
  129. from sentry.sentry_metrics.configuration import UseCaseKey
  130. from sentry.snuba.metrics.datasource import get_series
  131. from sentry.tagstore.snuba import SnubaTagStorage
  132. from sentry.testutils.factories import get_fixture_path
  133. from sentry.testutils.helpers.datetime import before_now, iso_format
  134. from sentry.testutils.helpers.notifications import TEST_ISSUE_OCCURRENCE
  135. from sentry.testutils.helpers.slack import install_slack
  136. from sentry.types.integrations import ExternalProviders
  137. from sentry.utils import json
  138. from sentry.utils.auth import SsoSession
  139. from sentry.utils.json import dumps_htmlsafe
  140. from sentry.utils.performance_issues.performance_detection import detect_performance_problems
  141. from sentry.utils.pytest.selenium import Browser
  142. from sentry.utils.retries import TimedRetryPolicy
  143. from sentry.utils.samples import load_data
  144. from sentry.utils.snuba import _snuba_pool
  145. from ..services.hybrid_cloud.actor import RpcActor
  146. from ..snuba.metrics import (
  147. MetricConditionField,
  148. MetricField,
  149. MetricGroupByField,
  150. MetricOrderByField,
  151. MetricsQuery,
  152. get_date_range,
  153. )
  154. from ..snuba.metrics.naming_layer.mri import SessionMRI, TransactionMRI, parse_mri
  155. from . import assert_status_code
  156. from .factories import Factories
  157. from .fixtures import Fixtures
  158. from .helpers import (
  159. AuthProvider,
  160. Feature,
  161. TaskRunner,
  162. apply_feature_flag_on_cls,
  163. override_options,
  164. parse_queries,
  165. )
  166. from .silo import exempt_from_silo_limits
  167. from .skips import requires_snuba
  168. DEFAULT_USER_AGENT = "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36"
  169. DETECT_TESTCASE_MISUSE = os.environ.get("SENTRY_DETECT_TESTCASE_MISUSE") == "1"
  170. SILENCE_MIXED_TESTCASE_MISUSE = os.environ.get("SENTRY_SILENCE_MIXED_TESTCASE_MISUSE") == "1"
  171. SessionOrTransactionMRI = Union[SessionMRI, TransactionMRI]
  172. class BaseTestCase(Fixtures):
  173. def assertRequiresAuthentication(self, path, method="GET"):
  174. resp = getattr(self.client, method.lower())(path)
  175. assert resp.status_code == 302
  176. assert resp["Location"].startswith("http://testserver" + reverse("sentry-login"))
  177. @pytest.fixture(autouse=True)
  178. def setup_dummy_auth_provider(self):
  179. auth.register("dummy", DummyProvider)
  180. self.addCleanup(auth.unregister, "dummy", DummyProvider)
  181. def tasks(self):
  182. return TaskRunner()
  183. @pytest.fixture(autouse=True)
  184. def polyfill_capture_on_commit_callbacks(self, django_capture_on_commit_callbacks):
  185. """
  186. https://pytest-django.readthedocs.io/en/latest/helpers.html#django_capture_on_commit_callbacks
  187. pytest-django comes with its own polyfill of this Django helper for
  188. older Django versions, so we're using that.
  189. """
  190. self.capture_on_commit_callbacks = django_capture_on_commit_callbacks
  191. @pytest.fixture(autouse=True)
  192. def expose_stale_database_reads(self, stale_database_reads):
  193. self.stale_database_reads = stale_database_reads
  194. def feature(self, names):
  195. """
  196. >>> with self.feature({'feature:name': True})
  197. >>> # ...
  198. """
  199. return Feature(names)
  200. def auth_provider(self, name, cls):
  201. """
  202. >>> with self.auth_provider('name', Provider)
  203. >>> # ...
  204. """
  205. return AuthProvider(name, cls)
  206. def save_session(self):
  207. self.session.save()
  208. self.save_cookie(
  209. name=settings.SESSION_COOKIE_NAME,
  210. value=self.session.session_key,
  211. max_age=None,
  212. path="/",
  213. domain=settings.SESSION_COOKIE_DOMAIN,
  214. secure=settings.SESSION_COOKIE_SECURE or None,
  215. expires=None,
  216. )
  217. def save_cookie(self, name, value, **params):
  218. self.client.cookies[name] = value
  219. self.client.cookies[name].update({k.replace("_", "-"): v for k, v in params.items()})
  220. def make_request(
  221. self, user=None, auth=None, method=None, is_superuser=False, path="/"
  222. ) -> HttpRequest:
  223. request = HttpRequest()
  224. if method:
  225. request.method = method
  226. request.path = path
  227. request.META["REMOTE_ADDR"] = "127.0.0.1"
  228. request.META["SERVER_NAME"] = "testserver"
  229. request.META["SERVER_PORT"] = 80
  230. # order matters here, session -> user -> other things
  231. request.session = self.session
  232. request.auth = auth
  233. request.user = user or AnonymousUser()
  234. # must happen after request.user/request.session is populated
  235. request.superuser = Superuser(request)
  236. if is_superuser:
  237. # XXX: this is gross, but it's a one-off and apis change only once in a great while
  238. request.superuser.set_logged_in(user)
  239. request.is_superuser = lambda: request.superuser.is_active
  240. request.successful_authenticator = None
  241. return request
  242. # TODO(dcramer): ideally superuser_sso would be False by default, but that would require
  243. # a lot of tests changing
  244. @TimedRetryPolicy.wrap(timeout=5)
  245. def login_as(
  246. self, user, organization_id=None, organization_ids=None, superuser=False, superuser_sso=True
  247. ):
  248. user.backend = settings.AUTHENTICATION_BACKENDS[0]
  249. request = self.make_request()
  250. with exempt_from_silo_limits():
  251. login(request, user)
  252. request.user = user
  253. if organization_ids is None:
  254. organization_ids = set()
  255. else:
  256. organization_ids = set(organization_ids)
  257. if superuser and superuser_sso is not False:
  258. if SU_ORG_ID:
  259. organization_ids.add(SU_ORG_ID)
  260. if organization_id:
  261. organization_ids.add(organization_id)
  262. # TODO(dcramer): ideally this would get abstracted
  263. if organization_ids:
  264. for o in organization_ids:
  265. sso_session = SsoSession.create(o)
  266. self.session[sso_session.session_key] = sso_session.to_dict()
  267. # logging in implicitly binds superuser, but for test cases we
  268. # want that action to be explicit to avoid accidentally testing
  269. # superuser-only code
  270. if not superuser:
  271. # XXX(dcramer): we're calling the internal method to avoid logging
  272. request.superuser._set_logged_out()
  273. elif request.user.is_superuser and superuser:
  274. request.superuser.set_logged_in(request.user)
  275. # XXX(dcramer): awful hack to ensure future attempts to instantiate
  276. # the Superuser object are successful
  277. self.save_cookie(
  278. name=SU_COOKIE_NAME,
  279. value=signing.get_cookie_signer(salt=SU_COOKIE_NAME + SU_COOKIE_SALT).sign(
  280. request.superuser.token
  281. ),
  282. max_age=None,
  283. path=SU_COOKIE_PATH,
  284. domain=SU_COOKIE_DOMAIN,
  285. secure=SU_COOKIE_SECURE or None,
  286. expires=None,
  287. )
  288. # Save the session values.
  289. self.save_session()
  290. def load_fixture(self, filepath):
  291. with open(get_fixture_path(filepath), "rb") as fp:
  292. return fp.read()
  293. def _pre_setup(self):
  294. super()._pre_setup()
  295. cache.clear()
  296. ProjectOption.objects.clear_local_cache()
  297. GroupMeta.objects.clear_local_cache()
  298. def _post_teardown(self):
  299. super()._post_teardown()
  300. def options(self, options):
  301. """
  302. A context manager that temporarily sets a global option and reverts
  303. back to the original value when exiting the context.
  304. """
  305. return override_options(options)
  306. def assert_valid_deleted_log(self, deleted_log, original_object):
  307. assert deleted_log is not None
  308. assert original_object.name == deleted_log.name
  309. assert deleted_log.name == original_object.name
  310. assert deleted_log.slug == original_object.slug
  311. if not isinstance(deleted_log, DeletedOrganization):
  312. assert deleted_log.organization_id == original_object.organization.id
  313. assert deleted_log.organization_name == original_object.organization.name
  314. assert deleted_log.organization_slug == original_object.organization.slug
  315. assert deleted_log.date_created == original_object.date_added
  316. assert deleted_log.date_deleted >= deleted_log.date_created
  317. def assertWriteQueries(self, queries, debug=False, *args, **kwargs):
  318. func = kwargs.pop("func", None)
  319. using = kwargs.pop("using", DEFAULT_DB_ALIAS)
  320. conn = connections[using]
  321. context = _AssertQueriesContext(self, queries, debug, conn)
  322. if func is None:
  323. return context
  324. with context:
  325. func(*args, **kwargs)
  326. def get_mock_uuid(self):
  327. class uuid:
  328. hex = "abc123"
  329. bytes = b"\x00\x01\x02"
  330. return uuid
  331. class _AssertQueriesContext(CaptureQueriesContext):
  332. def __init__(self, test_case, queries, debug, connection):
  333. self.test_case = test_case
  334. self.queries = queries
  335. self.debug = debug
  336. super().__init__(connection)
  337. def __exit__(self, exc_type, exc_value, traceback):
  338. super().__exit__(exc_type, exc_value, traceback)
  339. if exc_type is not None:
  340. return
  341. parsed_queries = parse_queries(self.captured_queries)
  342. if self.debug:
  343. import pprint
  344. pprint.pprint("====================== Raw Queries ======================")
  345. pprint.pprint(self.captured_queries)
  346. pprint.pprint("====================== Table writes ======================")
  347. pprint.pprint(parsed_queries)
  348. for table, num in parsed_queries.items():
  349. expected = self.queries.get(table, 0)
  350. if expected == 0:
  351. import pprint
  352. pprint.pprint(
  353. "WARNING: no query against %s emitted, add debug=True to see all the queries"
  354. % (table)
  355. )
  356. else:
  357. self.test_case.assertTrue(
  358. num == expected,
  359. "%d write queries expected on `%s`, got %d, add debug=True to see all the queries"
  360. % (expected, table, num),
  361. )
  362. for table, num in self.queries.items():
  363. executed = parsed_queries.get(table, None)
  364. self.test_case.assertFalse(
  365. executed is None,
  366. "no query against %s emitted, add debug=True to see all the queries" % (table),
  367. )
  368. @override_settings(ROOT_URLCONF="sentry.web.urls")
  369. class TestCase(BaseTestCase, TestCase):
  370. # Ensure that testcases that ask for DB setup actually make use of the
  371. # DB. If they don't, they're wasting CI time.
  372. if DETECT_TESTCASE_MISUSE:
  373. @pytest.fixture(autouse=True, scope="class")
  374. def _require_db_usage(self, request):
  375. class State:
  376. used_db = {}
  377. base = request.cls
  378. state = State()
  379. yield state
  380. did_not_use = set()
  381. did_use = set()
  382. for name, used in state.used_db.items():
  383. if used:
  384. did_use.add(name)
  385. else:
  386. did_not_use.add(name)
  387. if did_not_use and not did_use:
  388. pytest.fail(
  389. f"none of the test functions in {state.base} used the DB! Use `unittest.TestCase` "
  390. f"instead of `sentry.testutils.TestCase` for those kinds of tests."
  391. )
  392. elif did_not_use and did_use and not SILENCE_MIXED_TESTCASE_MISUSE:
  393. pytest.fail(
  394. f"Some of the test functions in {state.base} used the DB and some did not! "
  395. f"test functions using the db: {did_use}\n"
  396. f"Use `unittest.TestCase` instead of `sentry.testutils.TestCase` for the tests not using the db."
  397. )
  398. @pytest.fixture(autouse=True, scope="function")
  399. def _check_function_for_db(self, request, monkeypatch, _require_db_usage):
  400. from django.db.backends.base.base import BaseDatabaseWrapper
  401. real_ensure_connection = BaseDatabaseWrapper.ensure_connection
  402. state = _require_db_usage
  403. def ensure_connection(*args, **kwargs):
  404. for info in inspect.stack():
  405. frame = info.frame
  406. try:
  407. first_arg_name = frame.f_code.co_varnames[0]
  408. first_arg = frame.f_locals[first_arg_name]
  409. except LookupError:
  410. continue
  411. # make an exact check here for two reasons. One is that this is
  412. # good enough as we do not expect subclasses, secondly however because
  413. # it turns out doing an isinstance check on untrusted input can cause
  414. # bad things to happen because it's hookable. In particular this
  415. # blows through max recursion limits here if it encounters certain
  416. # types of broken lazy proxy objects.
  417. if type(first_arg) is state.base and info.function in state.used_db:
  418. state.used_db[info.function] = True
  419. break
  420. return real_ensure_connection(*args, **kwargs)
  421. monkeypatch.setattr(BaseDatabaseWrapper, "ensure_connection", ensure_connection)
  422. state.used_db[request.function.__name__] = False
  423. yield
  424. class TransactionTestCase(BaseTestCase, TransactionTestCase):
  425. pass
  426. class PerformanceIssueTestCase(BaseTestCase):
  427. def create_performance_issue(
  428. self,
  429. tags=None,
  430. contexts=None,
  431. fingerprint=None,
  432. transaction=None,
  433. event_data=None,
  434. issue_type=None,
  435. noise_limit=0,
  436. project_id=None,
  437. detector_option="performance.issues.n_plus_one_db.problem-creation",
  438. user_data=None,
  439. ):
  440. if issue_type is None:
  441. issue_type = PerformanceNPlusOneGroupType
  442. if event_data is None:
  443. event_data = load_data(
  444. "transaction-n-plus-one",
  445. timestamp=before_now(minutes=10),
  446. )
  447. if tags is not None:
  448. event_data["tags"] = tags
  449. if contexts is not None:
  450. event_data["contexts"] = contexts
  451. if transaction:
  452. event_data["transaction"] = transaction
  453. if project_id is None:
  454. project_id = self.project.id
  455. if user_data:
  456. event_data["user"] = user_data
  457. perf_event_manager = EventManager(event_data)
  458. perf_event_manager.normalize()
  459. def detect_performance_problems_interceptor(data: Event, project: Project):
  460. perf_problems = detect_performance_problems(data, project)
  461. if fingerprint:
  462. for perf_problem in perf_problems:
  463. perf_problem.fingerprint = fingerprint
  464. return perf_problems
  465. with mock.patch(
  466. "sentry.issues.ingest.send_issue_occurrence_to_eventstream",
  467. side_effect=send_issue_occurrence_to_eventstream,
  468. ) as mock_eventstream, mock.patch(
  469. "sentry.event_manager.detect_performance_problems",
  470. side_effect=detect_performance_problems_interceptor,
  471. ), mock.patch.object(
  472. issue_type, "noise_config", new=NoiseConfig(noise_limit, timedelta(minutes=1))
  473. ), override_options(
  474. {"performance.issues.all.problem-detection": 1.0, detector_option: 1.0}
  475. ), self.feature(
  476. [
  477. "projects:performance-suspect-spans-ingestion",
  478. ]
  479. ):
  480. event = perf_event_manager.save(project_id)
  481. if mock_eventstream.call_args:
  482. event = event.for_group(mock_eventstream.call_args[0][2].group)
  483. event.occurrence = mock_eventstream.call_args[0][1]
  484. return event
  485. class APITestCase(BaseTestCase, BaseAPITestCase):
  486. """
  487. Extend APITestCase to inherit access to `client`, an object with methods
  488. that simulate API calls to Sentry, and the helper `get_response`, which
  489. combines and simplifies a lot of tedious parts of making API calls in tests.
  490. When creating API tests, use a new class per endpoint-method pair. The class
  491. must set the string `endpoint`.
  492. """
  493. method = "get"
  494. @property
  495. def endpoint(self):
  496. raise NotImplementedError(f"implement for {type(self).__module__}.{type(self).__name__}")
  497. def get_response(self, *args, **params):
  498. """
  499. Simulate an API call to the test case's URI and method.
  500. :param params:
  501. Note: These names are intentionally a little funny to prevent name
  502. collisions with real API arguments.
  503. * extra_headers: (Optional) Dict mapping keys to values that will be
  504. passed as request headers.
  505. * qs_params: (Optional) Dict mapping keys to values that will be
  506. url-encoded into a API call's query string.
  507. * raw_data: (Optional) Sometimes we want to precompute the JSON body.
  508. :returns Response object
  509. """
  510. url = reverse(self.endpoint, args=args)
  511. # In some cases we want to pass querystring params to put/post, handle
  512. # this here.
  513. if "qs_params" in params:
  514. query_string = urlencode(params.pop("qs_params"), doseq=True)
  515. url = f"{url}?{query_string}"
  516. headers = params.pop("extra_headers", {})
  517. raw_data = params.pop("raw_data", None)
  518. if raw_data and isinstance(raw_data, bytes):
  519. raw_data = raw_data.decode("utf-8")
  520. if raw_data and isinstance(raw_data, str):
  521. raw_data = json.loads(raw_data)
  522. data = raw_data or params
  523. method = params.pop("method", self.method).lower()
  524. return getattr(self.client, method)(url, format="json", data=data, **headers)
  525. def get_success_response(self, *args, **params):
  526. """
  527. Call `get_response` (see above) and assert the response's status code.
  528. :param params:
  529. * status_code: (Optional) Assert that the response's status code is
  530. a specific code. Omit to assert any successful status_code.
  531. :returns Response object
  532. """
  533. status_code = params.pop("status_code", None)
  534. if status_code and status_code >= 400:
  535. raise Exception("status_code must be < 400")
  536. method = params.pop("method", self.method).lower()
  537. response = self.get_response(*args, method=method, **params)
  538. if status_code:
  539. assert_status_code(response, status_code)
  540. elif method == "get":
  541. assert_status_code(response, status.HTTP_200_OK)
  542. # TODO(mgaeta): Add the other methods.
  543. # elif method == "post":
  544. # assert_status_code(response, status.HTTP_201_CREATED)
  545. elif method == "put":
  546. assert_status_code(response, status.HTTP_200_OK)
  547. elif method == "delete":
  548. assert_status_code(response, status.HTTP_204_NO_CONTENT)
  549. else:
  550. # TODO(mgaeta): Add other methods.
  551. assert_status_code(response, 200, 300)
  552. return response
  553. def get_error_response(self, *args, **params):
  554. """
  555. Call `get_response` (see above) and assert that the response's status
  556. code is an error code. Basically it's syntactic sugar.
  557. :param params:
  558. * status_code: (Optional) Assert that the response's status code is
  559. a specific error code. Omit to assert any error status_code.
  560. :returns Response object
  561. """
  562. status_code = params.pop("status_code", None)
  563. if status_code and status_code < 400:
  564. raise Exception("status_code must be >= 400 (an error status code)")
  565. response = self.get_response(*args, **params)
  566. if status_code:
  567. assert_status_code(response, status_code)
  568. else:
  569. assert_status_code(response, 400, 600)
  570. return response
  571. def get_cursor_headers(self, response):
  572. return [
  573. link["cursor"]
  574. for link in requests.utils.parse_header_links(
  575. response.get("link").rstrip(">").replace(">,<", ",<")
  576. )
  577. ]
  578. class TwoFactorAPITestCase(APITestCase):
  579. @cached_property
  580. def path_2fa(self):
  581. return reverse("sentry-account-settings-security")
  582. def enable_org_2fa(self, organization):
  583. organization.flags.require_2fa = True
  584. organization.save()
  585. def api_enable_org_2fa(self, organization, user):
  586. self.login_as(user)
  587. url = reverse(
  588. "sentry-api-0-organization-details", kwargs={"organization_slug": organization.slug}
  589. )
  590. return self.client.put(url, data={"require2FA": True})
  591. def api_disable_org_2fa(self, organization, user):
  592. url = reverse(
  593. "sentry-api-0-organization-details", kwargs={"organization_slug": organization.slug}
  594. )
  595. return self.client.put(url, data={"require2FA": False})
  596. def assert_can_enable_org_2fa(self, organization, user, status_code=200):
  597. self.__helper_enable_organization_2fa(organization, user, status_code)
  598. def assert_cannot_enable_org_2fa(self, organization, user, status_code, err_msg=None):
  599. self.__helper_enable_organization_2fa(organization, user, status_code, err_msg)
  600. def __helper_enable_organization_2fa(self, organization, user, status_code, err_msg=None):
  601. response = self.api_enable_org_2fa(organization, user)
  602. assert response.status_code == status_code
  603. if err_msg:
  604. assert err_msg.encode("utf-8") in response.content
  605. organization = Organization.objects.get(id=organization.id)
  606. if 200 <= status_code < 300:
  607. assert organization.flags.require_2fa
  608. else:
  609. assert not organization.flags.require_2fa
  610. def add_2fa_users_to_org(self, organization, num_of_users=10, num_with_2fa=5):
  611. non_compliant_members = []
  612. for num in range(0, num_of_users):
  613. user = self.create_user("foo_%s@example.com" % num)
  614. self.create_member(organization=organization, user=user)
  615. if num_with_2fa:
  616. TotpInterface().enroll(user)
  617. num_with_2fa -= 1
  618. else:
  619. non_compliant_members.append(user.email)
  620. return non_compliant_members
  621. class AuthProviderTestCase(TestCase):
  622. provider = DummyProvider
  623. provider_name = "dummy"
  624. def setUp(self):
  625. super().setUp()
  626. # TestCase automatically sets up dummy provider
  627. if self.provider_name != "dummy" or self.provider != DummyProvider:
  628. auth.register(self.provider_name, self.provider)
  629. self.addCleanup(auth.unregister, self.provider_name, self.provider)
  630. class RuleTestCase(TestCase):
  631. @property
  632. def rule_cls(self):
  633. raise NotImplementedError(f"implement for {type(self).__module__}.{type(self).__name__}")
  634. def get_event(self):
  635. return self.event
  636. def get_rule(self, **kwargs):
  637. kwargs.setdefault("project", self.project)
  638. kwargs.setdefault("data", {})
  639. return self.rule_cls(**kwargs)
  640. def get_state(self, **kwargs):
  641. from sentry.rules import EventState
  642. kwargs.setdefault("is_new", True)
  643. kwargs.setdefault("is_regression", True)
  644. kwargs.setdefault("is_new_group_environment", True)
  645. kwargs.setdefault("has_reappeared", True)
  646. return EventState(**kwargs)
  647. def assertPasses(self, rule, event=None, **kwargs):
  648. if event is None:
  649. event = self.event
  650. state = self.get_state(**kwargs)
  651. assert rule.passes(event, state) is True
  652. def assertDoesNotPass(self, rule, event=None, **kwargs):
  653. if event is None:
  654. event = self.event
  655. state = self.get_state(**kwargs)
  656. assert rule.passes(event, state) is False
  657. class PermissionTestCase(TestCase):
  658. def setUp(self):
  659. super().setUp()
  660. self.owner = self.create_user(is_superuser=False)
  661. self.organization = self.create_organization(
  662. owner=self.owner, flags=0 # disable default allow_joinleave access
  663. )
  664. self.team = self.create_team(organization=self.organization)
  665. def assert_can_access(self, user, path, method="GET", **kwargs):
  666. self.login_as(user, superuser=user.is_superuser)
  667. resp = getattr(self.client, method.lower())(path, **kwargs)
  668. assert resp.status_code >= 200 and resp.status_code < 300
  669. return resp
  670. def assert_cannot_access(self, user, path, method="GET", **kwargs):
  671. self.login_as(user, superuser=user.is_superuser)
  672. resp = getattr(self.client, method.lower())(path, **kwargs)
  673. assert resp.status_code >= 300
  674. def assert_member_can_access(self, path, **kwargs):
  675. return self.assert_role_can_access(path, "member", **kwargs)
  676. def assert_teamless_member_can_access(self, path, **kwargs):
  677. user = self.create_user(is_superuser=False)
  678. self.create_member(user=user, organization=self.organization, role="member", teams=[])
  679. self.assert_can_access(user, path, **kwargs)
  680. def assert_member_cannot_access(self, path, **kwargs):
  681. return self.assert_role_cannot_access(path, "member", **kwargs)
  682. def assert_manager_cannot_access(self, path, **kwargs):
  683. return self.assert_role_cannot_access(path, "manager", **kwargs)
  684. def assert_teamless_member_cannot_access(self, path, **kwargs):
  685. user = self.create_user(is_superuser=False)
  686. self.create_member(user=user, organization=self.organization, role="member", teams=[])
  687. self.assert_cannot_access(user, path, **kwargs)
  688. def assert_team_admin_can_access(self, path, **kwargs):
  689. return self.assert_role_can_access(path, "admin", **kwargs)
  690. def assert_teamless_admin_can_access(self, path, **kwargs):
  691. user = self.create_user(is_superuser=False)
  692. self.create_member(user=user, organization=self.organization, role="admin", teams=[])
  693. self.assert_can_access(user, path, **kwargs)
  694. def assert_team_admin_cannot_access(self, path, **kwargs):
  695. return self.assert_role_cannot_access(path, "admin", **kwargs)
  696. def assert_teamless_admin_cannot_access(self, path, **kwargs):
  697. user = self.create_user(is_superuser=False)
  698. self.create_member(user=user, organization=self.organization, role="admin", teams=[])
  699. self.assert_cannot_access(user, path, **kwargs)
  700. def assert_team_owner_can_access(self, path, **kwargs):
  701. return self.assert_role_can_access(path, "owner", **kwargs)
  702. def assert_owner_can_access(self, path, **kwargs):
  703. return self.assert_role_can_access(path, "owner", **kwargs)
  704. def assert_owner_cannot_access(self, path, **kwargs):
  705. return self.assert_role_cannot_access(path, "owner", **kwargs)
  706. def assert_non_member_cannot_access(self, path, **kwargs):
  707. user = self.create_user(is_superuser=False)
  708. self.assert_cannot_access(user, path, **kwargs)
  709. def assert_role_can_access(self, path, role, **kwargs):
  710. user = self.create_user(is_superuser=False)
  711. self.create_member(user=user, organization=self.organization, role=role, teams=[self.team])
  712. return self.assert_can_access(user, path, **kwargs)
  713. def assert_role_cannot_access(self, path, role, **kwargs):
  714. user = self.create_user(is_superuser=False)
  715. self.create_member(user=user, organization=self.organization, role=role, teams=[self.team])
  716. self.assert_cannot_access(user, path, **kwargs)
  717. class PluginTestCase(TestCase):
  718. @property
  719. def plugin(self):
  720. raise NotImplementedError(f"implement for {type(self).__module__}.{type(self).__name__}")
  721. def setUp(self):
  722. super().setUp()
  723. # Old plugins, plugin is a class, new plugins, it's an instance
  724. # New plugins don't need to be registered
  725. if inspect.isclass(self.plugin):
  726. plugins.register(self.plugin)
  727. self.addCleanup(plugins.unregister, self.plugin)
  728. def assertAppInstalled(self, name, path):
  729. for ep in iter_entry_points("sentry.apps"):
  730. if ep.name == name:
  731. ep_path = ep.module_name
  732. if ep_path == path:
  733. return
  734. self.fail(
  735. "Found app in entry_points, but wrong class. Got %r, expected %r"
  736. % (ep_path, path)
  737. )
  738. self.fail(f"Missing app from entry_points: {name!r}")
  739. def assertPluginInstalled(self, name, plugin):
  740. path = type(plugin).__module__ + ":" + type(plugin).__name__
  741. for ep in iter_entry_points("sentry.plugins"):
  742. if ep.name == name:
  743. ep_path = ep.module_name + ":" + ".".join(ep.attrs)
  744. if ep_path == path:
  745. return
  746. self.fail(
  747. "Found plugin in entry_points, but wrong class. Got %r, expected %r"
  748. % (ep_path, path)
  749. )
  750. self.fail(f"Missing plugin from entry_points: {name!r}")
  751. class CliTestCase(TestCase):
  752. @cached_property
  753. def runner(self) -> CliRunner:
  754. return CliRunner()
  755. @property
  756. def command(self):
  757. raise NotImplementedError(f"implement for {type(self).__module__}.{type(self).__name__}")
  758. default_args = []
  759. def invoke(self, *args, **kwargs):
  760. args += tuple(self.default_args)
  761. return self.runner.invoke(self.command, args, obj={}, **kwargs)
  762. @pytest.mark.usefixtures("browser")
  763. class AcceptanceTestCase(TransactionTestCase):
  764. browser: Browser
  765. @pytest.fixture(autouse=True)
  766. def _setup_today(self):
  767. with mock.patch(
  768. "django.utils.timezone.now",
  769. return_value=(datetime(2013, 5, 18, 15, 13, 58, 132928, tzinfo=timezone.utc)),
  770. ):
  771. yield
  772. def wait_for_loading(self):
  773. # NOTE: [data-test-id="loading-placeholder"] is not used here as
  774. # some dashboards have placeholders that never complete.
  775. self.browser.wait_until_not('[data-test-id="events-request-loading"]')
  776. self.browser.wait_until_not('[data-test-id="loading-indicator"]')
  777. self.browser.wait_until_not(".loading")
  778. def tearDown(self):
  779. # Avoid tests finishing before their API calls have finished.
  780. # NOTE: This is not fool-proof, it requires loading indicators to be
  781. # used when API requests are made.
  782. self.wait_for_loading()
  783. super().tearDown()
  784. def save_cookie(self, name, value, **params):
  785. self.browser.save_cookie(name=name, value=value, **params)
  786. def save_session(self):
  787. self.session.save()
  788. self.save_cookie(name=settings.SESSION_COOKIE_NAME, value=self.session.session_key)
  789. # Forward session cookie to django client.
  790. self.client.cookies[settings.SESSION_COOKIE_NAME] = self.session.session_key
  791. def dismiss_assistant(self, which=None):
  792. if which is None:
  793. which = ("issue", "issue_stream")
  794. if isinstance(which, str):
  795. which = [which]
  796. for item in which:
  797. res = self.client.put(
  798. "/api/0/assistant/",
  799. content_type="application/json",
  800. data=json.dumps({"guide": item, "status": "viewed", "useful": True}),
  801. )
  802. assert res.status_code == 201, res.content
  803. class IntegrationTestCase(TestCase):
  804. @property
  805. def provider(self):
  806. raise NotImplementedError(f"implement for {type(self).__module__}.{type(self).__name__}")
  807. def setUp(self):
  808. from sentry.integrations.pipeline import IntegrationPipeline
  809. super().setUp()
  810. self.organization = self.create_organization(name="foo", owner=self.user)
  811. self.login_as(self.user)
  812. self.request = self.make_request(self.user)
  813. # XXX(dcramer): this is a bit of a hack, but it helps contain this test
  814. self.pipeline = IntegrationPipeline(
  815. request=self.request, organization=self.organization, provider_key=self.provider.key
  816. )
  817. self.init_path = reverse(
  818. "sentry-organization-integrations-setup",
  819. kwargs={"organization_slug": self.organization.slug, "provider_id": self.provider.key},
  820. )
  821. self.setup_path = reverse(
  822. "sentry-extension-setup", kwargs={"provider_id": self.provider.key}
  823. )
  824. self.configure_path = f"/extensions/{self.provider.key}/configure/"
  825. self.pipeline.initialize()
  826. self.save_session()
  827. def assertDialogSuccess(self, resp):
  828. assert b'window.opener.postMessage({"success":true' in resp.content
  829. @pytest.mark.snuba
  830. @requires_snuba
  831. class SnubaTestCase(BaseTestCase):
  832. """
  833. Mixin for enabling test case classes to talk to snuba
  834. Useful when you are working on acceptance tests or integration
  835. tests that require snuba.
  836. """
  837. def setUp(self):
  838. super().setUp()
  839. self.init_snuba()
  840. @pytest.fixture(autouse=True)
  841. def initialize(self, reset_snuba, call_snuba):
  842. self.call_snuba = call_snuba
  843. @contextmanager
  844. def disable_snuba_query_cache(self):
  845. self.snuba_update_config({"use_readthrough_query_cache": 0, "use_cache": 0})
  846. yield
  847. self.snuba_update_config({"use_readthrough_query_cache": None, "use_cache": None})
  848. @classmethod
  849. def snuba_get_config(cls):
  850. return _snuba_pool.request("GET", "/config.json").data
  851. @classmethod
  852. def snuba_update_config(cls, config_vals):
  853. return _snuba_pool.request("POST", "/config.json", body=json.dumps(config_vals))
  854. def init_snuba(self):
  855. self.snuba_eventstream = SnubaEventStream()
  856. self.snuba_tagstore = SnubaTagStorage()
  857. def store_event(self, *args, **kwargs):
  858. """
  859. Simulates storing an event for testing.
  860. To set event title:
  861. - use "message": "{title}" field for errors
  862. - use "transaction": "{title}" field for transactions
  863. More info on event payloads: https://develop.sentry.dev/sdk/event-payloads/
  864. """
  865. with mock.patch("sentry.eventstream.insert", self.snuba_eventstream.insert):
  866. stored_event = Factories.store_event(*args, **kwargs)
  867. # Error groups
  868. stored_group = stored_event.group
  869. if stored_group is not None:
  870. self.store_group(stored_group)
  871. # Performance groups
  872. stored_groups = stored_event.groups
  873. if stored_groups is not None:
  874. for group in stored_groups:
  875. self.store_group(group)
  876. return stored_event
  877. def wait_for_event_count(self, project_id, total, attempts=2):
  878. """
  879. Wait until the event count reaches the provided value or until attempts is reached.
  880. Useful when you're storing several events and need to ensure that snuba/clickhouse
  881. state has settled.
  882. """
  883. # Verify that events have settled in snuba's storage.
  884. # While snuba is synchronous, clickhouse isn't entirely synchronous.
  885. attempt = 0
  886. snuba_filter = eventstore.Filter(project_ids=[project_id])
  887. last_events_seen = 0
  888. while attempt < attempts:
  889. events = eventstore.get_events(snuba_filter, referrer="test.wait_for_event_count")
  890. last_events_seen = len(events)
  891. if len(events) >= total:
  892. break
  893. attempt += 1
  894. time.sleep(0.05)
  895. if attempt == attempts:
  896. assert (
  897. False
  898. ), f"Could not ensure that {total} event(s) were persisted within {attempt} attempt(s). Event count is instead currently {last_events_seen}."
  899. def bulk_store_sessions(self, sessions):
  900. assert (
  901. requests.post(
  902. settings.SENTRY_SNUBA + "/tests/entities/sessions/insert", data=json.dumps(sessions)
  903. ).status_code
  904. == 200
  905. )
  906. def build_session(self, **kwargs):
  907. session = {
  908. "session_id": str(uuid4()),
  909. "distinct_id": str(uuid4()),
  910. "status": "ok",
  911. "seq": 0,
  912. "retention_days": 90,
  913. "duration": 60.0,
  914. "errors": 0,
  915. "started": time.time() // 60 * 60,
  916. "received": time.time(),
  917. }
  918. # Support both passing the values for these field directly, and the full objects
  919. translators = [
  920. ("release", "version", "release"),
  921. ("environment", "name", "environment"),
  922. ("project_id", "id", "project"),
  923. ("org_id", "id", "organization"),
  924. ]
  925. for key, attr, default_attr in translators:
  926. if key not in kwargs:
  927. kwargs[key] = getattr(self, default_attr)
  928. val = kwargs[key]
  929. kwargs[key] = getattr(val, attr, val)
  930. session.update(kwargs)
  931. return session
  932. def store_session(self, session):
  933. self.bulk_store_sessions([session])
  934. def store_group(self, group):
  935. data = [self.__wrap_group(group)]
  936. assert (
  937. requests.post(
  938. settings.SENTRY_SNUBA + "/tests/entities/groupedmessage/insert",
  939. data=json.dumps(data),
  940. ).status_code
  941. == 200
  942. )
  943. def store_outcome(self, group):
  944. data = [self.__wrap_group(group)]
  945. assert (
  946. requests.post(
  947. settings.SENTRY_SNUBA + "/tests/entities/outcomes/insert", data=json.dumps(data)
  948. ).status_code
  949. == 200
  950. )
  951. def to_snuba_time_format(self, datetime_value):
  952. date_format = "%Y-%m-%d %H:%M:%S%z"
  953. return datetime_value.strftime(date_format)
  954. def __wrap_group(self, group):
  955. return {
  956. "event": "change",
  957. "kind": "insert",
  958. "table": "sentry_groupedmessage",
  959. "columnnames": [
  960. "id",
  961. "logger",
  962. "level",
  963. "message",
  964. "status",
  965. "times_seen",
  966. "last_seen",
  967. "first_seen",
  968. "data",
  969. "score",
  970. "project_id",
  971. "time_spent_total",
  972. "time_spent_count",
  973. "resolved_at",
  974. "active_at",
  975. "is_public",
  976. "platform",
  977. "num_comments",
  978. "first_release_id",
  979. "short_id",
  980. ],
  981. "columnvalues": [
  982. group.id,
  983. group.logger,
  984. group.level,
  985. group.message,
  986. group.status,
  987. group.times_seen,
  988. self.to_snuba_time_format(group.last_seen),
  989. self.to_snuba_time_format(group.first_seen),
  990. group.data,
  991. group.score,
  992. group.project.id,
  993. group.time_spent_total,
  994. group.time_spent_count,
  995. group.resolved_at,
  996. self.to_snuba_time_format(group.active_at),
  997. group.is_public,
  998. group.platform,
  999. group.num_comments,
  1000. group.first_release.id if group.first_release else None,
  1001. group.short_id,
  1002. ],
  1003. }
  1004. def snuba_insert(self, events):
  1005. "Write a (wrapped) event (or events) to Snuba."
  1006. if not isinstance(events, list):
  1007. events = [events]
  1008. assert (
  1009. requests.post(
  1010. settings.SENTRY_SNUBA + "/tests/entities/events/insert", data=json.dumps(events)
  1011. ).status_code
  1012. == 200
  1013. )
  1014. class BaseMetricsTestCase(SnubaTestCase):
  1015. snuba_endpoint = "/tests/entities/{entity}/insert"
  1016. def store_session(self, session):
  1017. """Mimic relays behavior of always emitting a metric for a started session,
  1018. and emitting an additional one if the session is fatal
  1019. https://github.com/getsentry/relay/blob/e3c064e213281c36bde5d2b6f3032c6d36e22520/relay-server/src/actors/envelopes.rs#L357
  1020. """
  1021. user = session.get("distinct_id")
  1022. org_id = session["org_id"]
  1023. project_id = session["project_id"]
  1024. base_tags = {}
  1025. if session.get("release") is not None:
  1026. base_tags["release"] = session["release"]
  1027. if session.get("environment") is not None:
  1028. base_tags["environment"] = session["environment"]
  1029. if session.get("abnormal_mechanism") is not None:
  1030. base_tags["abnormal_mechanism"] = session["abnormal_mechanism"]
  1031. # This check is not yet reflected in relay, see https://getsentry.atlassian.net/browse/INGEST-464
  1032. user_is_nil = user is None or user == "00000000-0000-0000-0000-000000000000"
  1033. def push(type, mri: str, tags, value):
  1034. self.store_metric(
  1035. org_id,
  1036. project_id,
  1037. type,
  1038. mri,
  1039. {**tags, **base_tags},
  1040. int(
  1041. session["started"]
  1042. if isinstance(session["started"], (int, float))
  1043. else to_timestamp(session["started"])
  1044. ),
  1045. value,
  1046. use_case_id=UseCaseKey.RELEASE_HEALTH,
  1047. )
  1048. # seq=0 is equivalent to relay's session.init, init=True is transformed
  1049. # to seq=0 in Relay.
  1050. if session["seq"] == 0: # init
  1051. push("counter", SessionMRI.SESSION.value, {"session.status": "init"}, +1)
  1052. status = session["status"]
  1053. # Mark the session as errored, which includes fatal sessions.
  1054. if session.get("errors", 0) > 0 or status not in ("ok", "exited"):
  1055. push("set", SessionMRI.ERROR.value, {}, session["session_id"])
  1056. if not user_is_nil:
  1057. push("set", SessionMRI.USER.value, {"session.status": "errored"}, user)
  1058. elif not user_is_nil:
  1059. push("set", SessionMRI.USER.value, {}, user)
  1060. if status in ("abnormal", "crashed"): # fatal
  1061. push("counter", SessionMRI.SESSION.value, {"session.status": status}, +1)
  1062. if not user_is_nil:
  1063. push("set", SessionMRI.USER.value, {"session.status": status}, user)
  1064. if status == "exited":
  1065. if session["duration"] is not None:
  1066. push(
  1067. "distribution",
  1068. SessionMRI.RAW_DURATION.value,
  1069. {"session.status": status},
  1070. session["duration"],
  1071. )
  1072. def bulk_store_sessions(self, sessions):
  1073. for session in sessions:
  1074. self.store_session(session)
  1075. @classmethod
  1076. def store_metric(
  1077. cls,
  1078. org_id: int,
  1079. project_id: int,
  1080. type: Literal["counter", "set", "distribution"],
  1081. name: str,
  1082. tags: Dict[str, str],
  1083. timestamp: int,
  1084. value,
  1085. use_case_id: UseCaseKey,
  1086. ):
  1087. mapping_meta = {}
  1088. def metric_id(key: str):
  1089. assert isinstance(key, str)
  1090. res = indexer.record(
  1091. use_case_id=REVERSE_METRIC_PATH_MAPPING[use_case_id],
  1092. org_id=org_id,
  1093. string=key,
  1094. )
  1095. assert res is not None, key
  1096. mapping_meta[str(res)] = key
  1097. return res
  1098. def tag_key(name):
  1099. assert isinstance(name, str)
  1100. res = indexer.record(
  1101. use_case_id=REVERSE_METRIC_PATH_MAPPING[use_case_id],
  1102. org_id=org_id,
  1103. string=name,
  1104. )
  1105. assert res is not None, name
  1106. mapping_meta[str(res)] = name
  1107. return str(res)
  1108. def tag_value(name):
  1109. assert isinstance(name, str)
  1110. if use_case_id == UseCaseKey.PERFORMANCE:
  1111. return name
  1112. res = indexer.record(
  1113. use_case_id=REVERSE_METRIC_PATH_MAPPING[use_case_id],
  1114. org_id=org_id,
  1115. string=name,
  1116. )
  1117. assert res is not None, name
  1118. mapping_meta[str(res)] = name
  1119. return res
  1120. assert not isinstance(value, list)
  1121. if type == "set":
  1122. # Relay uses a different hashing algorithm, but that's ok
  1123. value = [int.from_bytes(hashlib.md5(str(value).encode()).digest()[:8], "big")]
  1124. elif type == "distribution":
  1125. value = [value]
  1126. msg = {
  1127. "org_id": org_id,
  1128. "project_id": project_id,
  1129. "metric_id": metric_id(name),
  1130. "timestamp": timestamp,
  1131. "tags": {tag_key(key): tag_value(value) for key, value in tags.items()},
  1132. "type": {"counter": "c", "set": "s", "distribution": "d"}[type],
  1133. "value": value,
  1134. "retention_days": 90,
  1135. "use_case_id": use_case_id.value,
  1136. # making up a sentry_received_timestamp, but it should be sometime
  1137. # after the timestamp of the event
  1138. "sentry_received_timestamp": timestamp + 10,
  1139. "version": 2 if use_case_id == UseCaseKey.PERFORMANCE else 1,
  1140. }
  1141. msg["mapping_meta"] = {}
  1142. msg["mapping_meta"][msg["type"]] = mapping_meta
  1143. if use_case_id == UseCaseKey.PERFORMANCE:
  1144. entity = f"generic_metrics_{type}s"
  1145. else:
  1146. entity = f"metrics_{type}s"
  1147. cls.__send_buckets([msg], entity)
  1148. @classmethod
  1149. def __send_buckets(cls, buckets, entity):
  1150. # DO NOT USE THIS METHOD IN YOUR TESTS, use store_metric instead. we
  1151. # need to be able to make changes to the indexer's output protocol
  1152. # without having to update a million tests
  1153. if entity.startswith("generic_"):
  1154. codec = sentry_kafka_schemas.get_codec("snuba-generic-metrics")
  1155. else:
  1156. codec = sentry_kafka_schemas.get_codec("snuba-metrics")
  1157. for bucket in buckets:
  1158. codec.validate(bucket)
  1159. assert (
  1160. requests.post(
  1161. settings.SENTRY_SNUBA + cls.snuba_endpoint.format(entity=entity),
  1162. data=json.dumps(buckets),
  1163. ).status_code
  1164. == 200
  1165. )
  1166. class BaseMetricsLayerTestCase(BaseMetricsTestCase):
  1167. ENTITY_SHORTHANDS = {
  1168. "c": "counter",
  1169. "s": "set",
  1170. "d": "distribution",
  1171. "g": "gauge",
  1172. }
  1173. # In order to avoid complexity and edge cases while working on tests, all children of this class should use
  1174. # this mocked time, except in case in which a specific time is required. This is suggested because working
  1175. # with time ranges in metrics is very error-prone and requires an in-depth knowledge of the underlying
  1176. # implementation.
  1177. #
  1178. # This time has been specifically chosen to be 10:00:00 so that all tests will automatically have the data inserted
  1179. # and queried with automatically inferred timestamps (e.g., usage of - 1 second, get_date_range()...) without
  1180. # incurring into problems.
  1181. MOCK_DATETIME = (timezone.now() - timedelta(days=1)).replace(
  1182. hour=10, minute=0, second=0, microsecond=0
  1183. )
  1184. @property
  1185. def now(self):
  1186. """
  1187. Returns the current time instance that will be used throughout the tests of the metrics layer.
  1188. This method has to be implemented in all the children classes because it serves as a way to standardize
  1189. access to time.
  1190. """
  1191. raise NotImplementedError
  1192. def _extract_entity_from_mri(self, mri_string: str) -> Optional[str]:
  1193. """
  1194. Extracts the entity name from the MRI given a map of shorthands used to represent that entity in the MRI.
  1195. """
  1196. if (parsed_mri := parse_mri(mri_string)) is not None:
  1197. return self.ENTITY_SHORTHANDS[parsed_mri.entity]
  1198. def _store_metric(
  1199. self,
  1200. name: str,
  1201. tags: Dict[str, str],
  1202. value: int,
  1203. use_case_id: UseCaseKey,
  1204. type: Optional[str] = None,
  1205. org_id: Optional[int] = None,
  1206. project_id: Optional[int] = None,
  1207. days_before_now: int = 0,
  1208. hours_before_now: int = 0,
  1209. minutes_before_now: int = 0,
  1210. seconds_before_now: int = 0,
  1211. ):
  1212. # We subtract one second in order to account for right non-inclusivity in the query. If we wouldn't do this
  1213. # some data won't be returned (this applies only if we use self.now() in the "end" bound of the query).
  1214. #
  1215. # Use SENTRY_SNUBA_INFO=true while running queries in tests to know more about how data is actually queried
  1216. # at the clickhouse level.
  1217. #
  1218. # The solution proposed aims at solving the problem of flaky tests that occurred during CI at specific times.
  1219. self.store_metric(
  1220. org_id=self.organization.id if org_id is None else org_id,
  1221. project_id=self.project.id if project_id is None else project_id,
  1222. type=self._extract_entity_from_mri(name) if type is None else type,
  1223. name=name,
  1224. tags=tags,
  1225. timestamp=int(
  1226. (
  1227. self.adjust_timestamp(
  1228. self.now
  1229. - timedelta(
  1230. days=days_before_now,
  1231. hours=hours_before_now,
  1232. minutes=minutes_before_now,
  1233. seconds=seconds_before_now,
  1234. )
  1235. )
  1236. ).timestamp()
  1237. ),
  1238. value=value,
  1239. use_case_id=use_case_id,
  1240. )
  1241. @staticmethod
  1242. def adjust_timestamp(time: datetime) -> datetime:
  1243. # We subtract 1 second -(+1) in order to account for right non-inclusivity in the queries.
  1244. #
  1245. # E.g.: if we save at 10:00:00, and we have as "end" of the query that time, we must store our
  1246. # value with a timestamp less than 10:00:00 so that irrespectively of the bucket we will have
  1247. # the value in the query result set. This is because when we save 10:00:00 - 1 second in the db it
  1248. # will be saved under different granularities as (09:59:59, 09:59:00, 09:00:00) and these are the
  1249. # actual timestamps that will be compared to the bounds "start" and "end".
  1250. # Supposing we store 09:59:59, and we have "start"=09:00:00 and "end"=10:00:00, and we want to query
  1251. # by granularity (60 = minutes) then we look at entries with timestamp = 09:59:00 which is
  1252. # >= "start" and < "end" thus all these records will be returned.
  1253. # Of course this - 1 second "trick" is just to abstract away this complexity, but it can also be
  1254. # avoided by being more mindful when it comes to using the "end" bound, however because we would
  1255. # like our tests to be deterministic we would like to settle on this approach. This - 1 can also
  1256. # be avoided by choosing specific frozen times depending on granularities and stored data but
  1257. # as previously mentioned we would like to standardize the time we choose unless there are specific
  1258. # cases.
  1259. #
  1260. # This solution helps to abstract away this edge case but one needs to be careful to not use it with times
  1261. # between XX:00:00:000000 and XX:00:999999 because this will result in a time like (XX)-1:AA:BBBBBB which
  1262. # will mess up with the get_date_range function.
  1263. # E.g.: if we have time 10:00:00:567894 and we have statsPeriod = 1h and the interval=1h this will result in the
  1264. # interval being from 10:00:00:000000 to 11:00:00:000000 but the data being saved will be saved with date
  1265. # 09:59:59:567894 thus being outside the query range.
  1266. #
  1267. # All of these considerations must be done only if using directly the time managed by this abstraction, an
  1268. # alternative solution would be to avoid it at all, but for standardization purposes we would prefer to keep
  1269. # using it.
  1270. return time - timedelta(seconds=1)
  1271. def store_performance_metric(
  1272. self,
  1273. name: str,
  1274. tags: Dict[str, str],
  1275. value: int | float,
  1276. type: Optional[str] = None,
  1277. org_id: Optional[int] = None,
  1278. project_id: Optional[int] = None,
  1279. days_before_now: int = 0,
  1280. hours_before_now: int = 0,
  1281. minutes_before_now: int = 0,
  1282. seconds_before_now: int = 0,
  1283. ):
  1284. self._store_metric(
  1285. type=type,
  1286. name=name,
  1287. tags=tags,
  1288. value=value,
  1289. org_id=org_id,
  1290. project_id=project_id,
  1291. use_case_id=UseCaseKey.PERFORMANCE,
  1292. days_before_now=days_before_now,
  1293. hours_before_now=hours_before_now,
  1294. minutes_before_now=minutes_before_now,
  1295. seconds_before_now=seconds_before_now,
  1296. )
  1297. def store_release_health_metric(
  1298. self,
  1299. name: str,
  1300. tags: Dict[str, str],
  1301. value: int,
  1302. type: Optional[str] = None,
  1303. org_id: Optional[int] = None,
  1304. project_id: Optional[int] = None,
  1305. days_before_now: int = 0,
  1306. hours_before_now: int = 0,
  1307. minutes_before_now: int = 0,
  1308. seconds_before_now: int = 0,
  1309. ):
  1310. self._store_metric(
  1311. type=type,
  1312. name=name,
  1313. tags=tags,
  1314. value=value,
  1315. org_id=org_id,
  1316. project_id=project_id,
  1317. use_case_id=UseCaseKey.RELEASE_HEALTH,
  1318. days_before_now=days_before_now,
  1319. hours_before_now=hours_before_now,
  1320. minutes_before_now=minutes_before_now,
  1321. seconds_before_now=seconds_before_now,
  1322. )
  1323. def build_metrics_query(
  1324. self,
  1325. select: Sequence[MetricField],
  1326. project_ids: Sequence[int] = None,
  1327. where: Optional[Sequence[Union[BooleanCondition, Condition, MetricConditionField]]] = None,
  1328. having: Optional[ConditionGroup] = None,
  1329. groupby: Optional[Sequence[MetricGroupByField]] = None,
  1330. orderby: Optional[Sequence[MetricOrderByField]] = None,
  1331. limit: Optional[Limit] = None,
  1332. offset: Optional[Offset] = None,
  1333. include_totals: bool = True,
  1334. include_series: bool = True,
  1335. before_now: str = None,
  1336. granularity: str = None,
  1337. ):
  1338. # TODO: fix this method which gets the range after now instead of before now.
  1339. (start, end, granularity_in_seconds) = get_date_range(
  1340. {"statsPeriod": before_now, "interval": granularity}
  1341. )
  1342. return MetricsQuery(
  1343. org_id=self.organization.id,
  1344. project_ids=[self.project.id] + (project_ids if project_ids is not None else []),
  1345. select=select,
  1346. start=start,
  1347. end=end,
  1348. granularity=Granularity(granularity=granularity_in_seconds),
  1349. where=where,
  1350. having=having,
  1351. groupby=groupby,
  1352. orderby=orderby,
  1353. limit=limit,
  1354. offset=offset,
  1355. include_totals=include_totals,
  1356. include_series=include_series,
  1357. )
  1358. class MetricsEnhancedPerformanceTestCase(BaseMetricsLayerTestCase, TestCase):
  1359. TYPE_MAP = {
  1360. "metrics_distributions": "distribution",
  1361. "metrics_sets": "set",
  1362. "metrics_counters": "counter",
  1363. }
  1364. ENTITY_MAP = {
  1365. "transaction.duration": "metrics_distributions",
  1366. "measurements.lcp": "metrics_distributions",
  1367. "measurements.fp": "metrics_distributions",
  1368. "measurements.fcp": "metrics_distributions",
  1369. "measurements.fid": "metrics_distributions",
  1370. "measurements.cls": "metrics_distributions",
  1371. "measurements.frames_frozen_rate": "metrics_distributions",
  1372. "measurements.time_to_initial_display": "metrics_distributions",
  1373. "spans.http": "metrics_distributions",
  1374. "user": "metrics_sets",
  1375. }
  1376. METRIC_STRINGS = []
  1377. DEFAULT_METRIC_TIMESTAMP = datetime(2015, 1, 1, 10, 15, 0, tzinfo=timezone.utc)
  1378. def setUp(self):
  1379. super().setUp()
  1380. self._index_metric_strings()
  1381. def _index_metric_strings(self):
  1382. strings = [
  1383. "transaction",
  1384. "environment",
  1385. "http.status",
  1386. "transaction.status",
  1387. METRIC_TOLERATED_TAG_VALUE,
  1388. METRIC_SATISFIED_TAG_VALUE,
  1389. METRIC_FRUSTRATED_TAG_VALUE,
  1390. METRIC_SATISFACTION_TAG_KEY,
  1391. *self.METRIC_STRINGS,
  1392. *list(SPAN_STATUS_NAME_TO_CODE.keys()),
  1393. *list(METRICS_MAP.values()),
  1394. ]
  1395. org_strings = {self.organization.id: set(strings)}
  1396. indexer.bulk_record({UseCaseID.TRANSACTIONS: org_strings})
  1397. def store_transaction_metric(
  1398. self,
  1399. value: List[int] | int,
  1400. metric: str = "transaction.duration",
  1401. internal_metric: Optional[str] = None,
  1402. entity: Optional[str] = None,
  1403. tags: Optional[Dict[str, str]] = None,
  1404. timestamp: Optional[datetime] = None,
  1405. project: Optional[id] = None,
  1406. use_case_id: UseCaseKey = UseCaseKey.PERFORMANCE,
  1407. ):
  1408. internal_metric = METRICS_MAP[metric] if internal_metric is None else internal_metric
  1409. entity = self.ENTITY_MAP[metric] if entity is None else entity
  1410. org_id = self.organization.id
  1411. if tags is None:
  1412. tags = {}
  1413. if timestamp is None:
  1414. metric_timestamp = self.DEFAULT_METRIC_TIMESTAMP.timestamp()
  1415. else:
  1416. metric_timestamp = timestamp.timestamp()
  1417. if project is None:
  1418. project = self.project.id
  1419. if not isinstance(value, list):
  1420. value = [value]
  1421. for subvalue in value:
  1422. self.store_metric(
  1423. org_id,
  1424. project,
  1425. self.TYPE_MAP[entity],
  1426. internal_metric,
  1427. tags,
  1428. int(metric_timestamp),
  1429. subvalue,
  1430. use_case_id=UseCaseKey.PERFORMANCE,
  1431. )
  1432. def wait_for_metric_count(
  1433. self,
  1434. project,
  1435. total,
  1436. metric="transaction.duration",
  1437. mri=TransactionMRI.DURATION.value,
  1438. attempts=2,
  1439. ):
  1440. attempt = 0
  1441. metrics_query = self.build_metrics_query(
  1442. before_now="1d",
  1443. granularity="1d",
  1444. select=[
  1445. MetricField(
  1446. op="count",
  1447. metric_mri=mri,
  1448. ),
  1449. ],
  1450. include_series=False,
  1451. )
  1452. while attempt < attempts:
  1453. data = get_series(
  1454. [project],
  1455. metrics_query=metrics_query,
  1456. use_case_id=UseCaseKey.PERFORMANCE,
  1457. )
  1458. count = data["groups"][0]["totals"][f"count({metric})"]
  1459. if count >= total:
  1460. break
  1461. attempt += 1
  1462. time.sleep(0.05)
  1463. if attempt == attempts:
  1464. assert (
  1465. False
  1466. ), f"Could not ensure that {total} metric(s) were persisted within {attempt} attempt(s)."
  1467. class BaseIncidentsTest(SnubaTestCase):
  1468. def create_event(self, timestamp, fingerprint=None, user=None):
  1469. event_id = uuid4().hex
  1470. if fingerprint is None:
  1471. fingerprint = event_id
  1472. data = {
  1473. "event_id": event_id,
  1474. "fingerprint": [fingerprint],
  1475. "timestamp": iso_format(timestamp),
  1476. "type": "error",
  1477. # This is necessary because event type error should not exist without
  1478. # an exception being in the payload
  1479. "exception": [{"type": "Foo"}],
  1480. }
  1481. if user:
  1482. data["user"] = user
  1483. return self.store_event(data=data, project_id=self.project.id)
  1484. @cached_property
  1485. def now(self):
  1486. return timezone.now().replace(minute=0, second=0, microsecond=0)
  1487. @pytest.mark.snuba
  1488. @requires_snuba
  1489. class OutcomesSnubaTest(TestCase):
  1490. def setUp(self):
  1491. super().setUp()
  1492. assert requests.post(settings.SENTRY_SNUBA + "/tests/outcomes/drop").status_code == 200
  1493. def store_outcomes(self, outcome, num_times=1):
  1494. outcomes = []
  1495. for _ in range(num_times):
  1496. outcome_copy = outcome.copy()
  1497. outcome_copy["timestamp"] = outcome_copy["timestamp"].strftime("%Y-%m-%dT%H:%M:%S.%fZ")
  1498. outcomes.append(outcome_copy)
  1499. assert (
  1500. requests.post(
  1501. settings.SENTRY_SNUBA + "/tests/entities/outcomes/insert", data=json.dumps(outcomes)
  1502. ).status_code
  1503. == 200
  1504. )
  1505. @pytest.mark.snuba
  1506. @requires_snuba
  1507. class ReplaysSnubaTestCase(TestCase):
  1508. def setUp(self):
  1509. super().setUp()
  1510. assert requests.post(settings.SENTRY_SNUBA + "/tests/replays/drop").status_code == 200
  1511. def store_replays(self, replay):
  1512. response = requests.post(
  1513. settings.SENTRY_SNUBA + "/tests/entities/replays/insert", json=[replay]
  1514. )
  1515. assert response.status_code == 200
  1516. # AcceptanceTestCase and TestCase are mutually exclusive base classses
  1517. class ReplaysAcceptanceTestCase(AcceptanceTestCase, SnubaTestCase):
  1518. def setUp(self):
  1519. self.now = datetime.utcnow().replace(tzinfo=pytz.utc)
  1520. super().setUp()
  1521. self.drop_replays()
  1522. patcher = mock.patch("django.utils.timezone.now", return_value=self.now)
  1523. patcher.start()
  1524. self.addCleanup(patcher.stop)
  1525. def drop_replays(self):
  1526. assert requests.post(settings.SENTRY_SNUBA + "/tests/replays/drop").status_code == 200
  1527. def store_replays(self, replays):
  1528. assert (
  1529. len(replays) >= 2
  1530. ), "You need to store at least 2 replay events for the replay to be considered valid"
  1531. response = requests.post(
  1532. settings.SENTRY_SNUBA + "/tests/entities/replays/insert", json=replays
  1533. )
  1534. assert response.status_code == 200
  1535. def store_replay_segments(
  1536. self,
  1537. replay_id: str,
  1538. project_id: str,
  1539. segment_id: int,
  1540. segment,
  1541. ):
  1542. f = File.objects.create(name="rr:{segment_id}", type="replay.recording")
  1543. f.putfile(BytesIO(compress(dumps_htmlsafe(segment).encode())))
  1544. ReplayRecordingSegment.objects.create(
  1545. replay_id=replay_id,
  1546. project_id=project_id,
  1547. segment_id=segment_id,
  1548. file_id=f.id,
  1549. )
  1550. class IntegrationRepositoryTestCase(APITestCase):
  1551. def setUp(self):
  1552. super().setUp()
  1553. self.login_as(self.user)
  1554. def add_create_repository_responses(self, repository_config):
  1555. raise NotImplementedError(f"implement for {type(self).__module__}.{type(self).__name__}")
  1556. @exempt_from_silo_limits()
  1557. def create_repository(
  1558. self, repository_config, integration_id, organization_slug=None, add_responses=True
  1559. ):
  1560. if add_responses:
  1561. self.add_create_repository_responses(repository_config)
  1562. if not integration_id:
  1563. data = {"provider": self.provider_name, "identifier": repository_config["id"]}
  1564. else:
  1565. data = {
  1566. "provider": self.provider_name,
  1567. "installation": integration_id,
  1568. "identifier": repository_config["id"],
  1569. }
  1570. response = self.client.post(
  1571. path=reverse(
  1572. "sentry-api-0-organization-repositories",
  1573. args=[organization_slug or self.organization.slug],
  1574. ),
  1575. data=data,
  1576. )
  1577. return response
  1578. def assert_error_message(self, response, error_type, error_message):
  1579. assert response.data["error_type"] == error_type
  1580. assert error_message in response.data["errors"]["__all__"]
  1581. class ReleaseCommitPatchTest(APITestCase):
  1582. def setUp(self):
  1583. user = self.create_user(is_staff=False, is_superuser=False)
  1584. self.org = self.create_organization()
  1585. self.org.save()
  1586. team = self.create_team(organization=self.org)
  1587. self.project = self.create_project(name="foo", organization=self.org, teams=[team])
  1588. self.create_member(teams=[team], user=user, organization=self.org)
  1589. self.login_as(user=user)
  1590. @cached_property
  1591. def url(self):
  1592. raise NotImplementedError(f"implement for {type(self).__module__}.{type(self).__name__}")
  1593. def assert_commit(self, commit, repo_id, key, author_id, message):
  1594. assert commit.organization_id == self.org.id
  1595. assert commit.repository_id == repo_id
  1596. assert commit.key == key
  1597. assert commit.author_id == author_id
  1598. assert commit.message == message
  1599. def assert_file_change(self, file_change, type, filename, commit_id):
  1600. assert file_change.type == type
  1601. assert file_change.filename == filename
  1602. assert file_change.commit_id == commit_id
  1603. class SetRefsTestCase(APITestCase):
  1604. def setUp(self):
  1605. super().setUp()
  1606. self.user = self.create_user(is_staff=False, is_superuser=False)
  1607. self.org = self.create_organization()
  1608. self.team = self.create_team(organization=self.org)
  1609. self.project = self.create_project(name="foo", organization=self.org, teams=[self.team])
  1610. self.create_member(teams=[self.team], user=self.user, organization=self.org)
  1611. self.login_as(user=self.user)
  1612. self.group = self.create_group(project=self.project)
  1613. self.repo = Repository.objects.create(organization_id=self.org.id, name="test/repo")
  1614. def assert_fetch_commits(self, mock_fetch_commit, prev_release_id, release_id, refs):
  1615. assert len(mock_fetch_commit.method_calls) == 1
  1616. kwargs = mock_fetch_commit.method_calls[0][2]["kwargs"]
  1617. assert kwargs == {
  1618. "prev_release_id": prev_release_id,
  1619. "refs": refs,
  1620. "release_id": release_id,
  1621. "user_id": self.user.id,
  1622. }
  1623. def assert_head_commit(self, head_commit, commit_key, release_id=None):
  1624. assert self.org.id == head_commit.organization_id
  1625. assert self.repo.id == head_commit.repository_id
  1626. if release_id:
  1627. assert release_id == head_commit.release_id
  1628. else:
  1629. assert self.release.id == head_commit.release_id
  1630. self.assert_commit(head_commit.commit, commit_key)
  1631. def assert_commit(self, commit, key):
  1632. assert self.org.id == commit.organization_id
  1633. assert self.repo.id == commit.repository_id
  1634. assert commit.key == key
  1635. class OrganizationDashboardWidgetTestCase(APITestCase):
  1636. def setUp(self):
  1637. super().setUp()
  1638. self.login_as(self.user)
  1639. self.dashboard = Dashboard.objects.create(
  1640. title="Dashboard 1", created_by_id=self.user.id, organization=self.organization
  1641. )
  1642. self.anon_users_query = {
  1643. "name": "Anonymous Users",
  1644. "fields": ["count()"],
  1645. "aggregates": ["count()"],
  1646. "columns": [],
  1647. "fieldAliases": ["Count Alias"],
  1648. "conditions": "!has:user.email",
  1649. }
  1650. self.known_users_query = {
  1651. "name": "Known Users",
  1652. "fields": ["count_unique(user.email)"],
  1653. "aggregates": ["count_unique(user.email)"],
  1654. "columns": [],
  1655. "fieldAliases": [],
  1656. "conditions": "has:user.email",
  1657. }
  1658. self.geo_errors_query = {
  1659. "name": "Errors by Geo",
  1660. "fields": ["count()", "geo.country_code"],
  1661. "aggregates": ["count()"],
  1662. "columns": ["geo.country_code"],
  1663. "fieldAliases": [],
  1664. "conditions": "has:geo.country_code",
  1665. }
  1666. def do_request(self, method, url, data=None):
  1667. func = getattr(self.client, method)
  1668. return func(url, data=data)
  1669. def assert_widget_queries(self, widget_id, data):
  1670. result_queries = DashboardWidgetQuery.objects.filter(widget_id=widget_id).order_by("order")
  1671. for ds, expected_ds in zip(result_queries, data):
  1672. assert ds.name == expected_ds["name"]
  1673. assert ds.fields == expected_ds["fields"]
  1674. assert ds.conditions == expected_ds["conditions"]
  1675. def assert_widget(self, widget, order, title, display_type, queries=None):
  1676. assert widget.order == order
  1677. assert widget.display_type == display_type
  1678. assert widget.title == title
  1679. if not queries:
  1680. return
  1681. self.assert_widget_queries(widget.id, queries)
  1682. def assert_widget_data(self, data, title, display_type, queries=None):
  1683. assert data["displayType"] == display_type
  1684. assert data["title"] == title
  1685. if not queries:
  1686. return
  1687. self.assert_widget_queries(data["id"], queries)
  1688. def assert_serialized_widget_query(self, data, widget_data_source):
  1689. if "id" in data:
  1690. assert data["id"] == str(widget_data_source.id)
  1691. if "name" in data:
  1692. assert data["name"] == widget_data_source.name
  1693. if "fields" in data:
  1694. assert data["fields"] == widget_data_source.fields
  1695. if "conditions" in data:
  1696. assert data["conditions"] == widget_data_source.conditions
  1697. if "orderby" in data:
  1698. assert data["orderby"] == widget_data_source.orderby
  1699. if "aggregates" in data:
  1700. assert data["aggregates"] == widget_data_source.aggregates
  1701. if "columns" in data:
  1702. assert data["columns"] == widget_data_source.columns
  1703. if "fieldAliases" in data:
  1704. assert data["fieldAliases"] == widget_data_source.field_aliases
  1705. def get_widgets(self, dashboard_id):
  1706. return DashboardWidget.objects.filter(dashboard_id=dashboard_id).order_by("order")
  1707. def assert_serialized_widget(self, data, expected_widget):
  1708. if "id" in data:
  1709. assert data["id"] == str(expected_widget.id)
  1710. if "title" in data:
  1711. assert data["title"] == expected_widget.title
  1712. if "interval" in data:
  1713. assert data["interval"] == expected_widget.interval
  1714. if "limit" in data:
  1715. assert data["limit"] == expected_widget.limit
  1716. if "displayType" in data:
  1717. assert data["displayType"] == DashboardWidgetDisplayTypes.get_type_name(
  1718. expected_widget.display_type
  1719. )
  1720. if "layout" in data:
  1721. assert data["layout"] == expected_widget.detail["layout"]
  1722. def create_user_member_role(self):
  1723. self.user = self.create_user(is_superuser=False)
  1724. self.create_member(
  1725. user=self.user, organization=self.organization, role="member", teams=[self.team]
  1726. )
  1727. self.login_as(self.user)
  1728. @pytest.mark.migrations
  1729. class TestMigrations(TransactionTestCase):
  1730. """
  1731. From https://www.caktusgroup.com/blog/2016/02/02/writing-unit-tests-django-migrations/
  1732. Note that when running these tests locally you will need to set the `MIGRATIONS_TEST_MIGRATE=1`
  1733. environmental variable for these to pass.
  1734. """
  1735. @property
  1736. def app(self):
  1737. return "sentry"
  1738. @property
  1739. def migrate_from(self):
  1740. raise NotImplementedError(f"implement for {type(self).__module__}.{type(self).__name__}")
  1741. @property
  1742. def migrate_to(self):
  1743. raise NotImplementedError(f"implement for {type(self).__module__}.{type(self).__name__}")
  1744. @property
  1745. def connection(self):
  1746. return "default"
  1747. def setUp(self):
  1748. super().setUp()
  1749. self.migrate_from = [(self.app, self.migrate_from)]
  1750. self.migrate_to = [(self.app, self.migrate_to)]
  1751. connection = connections[self.connection]
  1752. with connection.cursor() as cursor:
  1753. cursor.execute("SET ROLE 'postgres'")
  1754. self.setup_initial_state()
  1755. executor = MigrationExecutor(connection)
  1756. matching_migrations = [m for m in executor.loader.applied_migrations if m[0] == self.app]
  1757. if not matching_migrations:
  1758. raise AssertionError(
  1759. "no migrations detected!\n\n"
  1760. "try running this test with `MIGRATIONS_TEST_MIGRATE=1 pytest ...`"
  1761. )
  1762. self.current_migration = [max(matching_migrations)]
  1763. old_apps = executor.loader.project_state(self.migrate_from).apps
  1764. # Reverse to the original migration
  1765. executor.migrate(self.migrate_from)
  1766. self.setup_before_migration(old_apps)
  1767. # Run the migration to test
  1768. executor = MigrationExecutor(connection)
  1769. executor.loader.build_graph() # reload.
  1770. executor.migrate(self.migrate_to)
  1771. self.apps = executor.loader.project_state(self.migrate_to).apps
  1772. def tearDown(self):
  1773. super().tearDown()
  1774. executor = MigrationExecutor(connection)
  1775. executor.loader.build_graph() # reload.
  1776. executor.migrate(self.current_migration)
  1777. def setup_initial_state(self):
  1778. # Add code here that will run before we roll back the database to the `migrate_from`
  1779. # migration. This can be useful to allow us to use the various `self.create_*` convenience
  1780. # methods.
  1781. # Any objects created here will need to be converted over to migration models if any further
  1782. # database operations are required.
  1783. pass
  1784. def setup_before_migration(self, apps):
  1785. # Add code here to run after we have rolled the database back to the `migrate_from`
  1786. # migration. This code must use `apps` to create any database models, and not directly
  1787. # access Django models.
  1788. # It's preferable to create models here, when not overly complex to do so.
  1789. pass
  1790. class SCIMTestCase(APITestCase):
  1791. def setUp(self, provider="dummy"):
  1792. super().setUp()
  1793. self.auth_provider = AuthProviderModel(
  1794. organization_id=self.organization.id, provider=provider
  1795. )
  1796. self.auth_provider.enable_scim(self.user)
  1797. self.auth_provider.save()
  1798. self.scim_user = ApiToken.objects.get(token=self.auth_provider.get_scim_token()).user
  1799. self.login_as(user=self.scim_user)
  1800. class SCIMAzureTestCase(SCIMTestCase):
  1801. def setUp(self):
  1802. auth.register(ACTIVE_DIRECTORY_PROVIDER_NAME, DummyProvider)
  1803. super().setUp(provider=ACTIVE_DIRECTORY_PROVIDER_NAME)
  1804. self.addCleanup(auth.unregister, ACTIVE_DIRECTORY_PROVIDER_NAME, DummyProvider)
  1805. class ActivityTestCase(TestCase):
  1806. def another_user(self, email_string, team=None, alt_email_string=None):
  1807. user = self.create_user(email_string)
  1808. if alt_email_string:
  1809. UserEmail.objects.create(email=alt_email_string, user=user)
  1810. assert UserEmail.objects.filter(user=user, email=alt_email_string).update(
  1811. is_verified=True
  1812. )
  1813. assert UserEmail.objects.filter(user=user, email=user.email).update(is_verified=True)
  1814. self.create_member(user=user, organization=self.org, teams=[team] if team else None)
  1815. return user
  1816. def another_commit(self, order, name, user, repository, alt_email_string=None):
  1817. commit = Commit.objects.create(
  1818. key=name * 40,
  1819. repository_id=repository.id,
  1820. organization_id=self.org.id,
  1821. author=CommitAuthor.objects.create(
  1822. organization_id=self.org.id,
  1823. name=user.name,
  1824. email=alt_email_string or user.email,
  1825. ),
  1826. )
  1827. ReleaseCommit.objects.create(
  1828. organization_id=self.org.id,
  1829. release=self.release,
  1830. commit=commit,
  1831. order=order,
  1832. )
  1833. return commit
  1834. def another_release(self, name):
  1835. release = Release.objects.create(
  1836. version=name * 40,
  1837. organization_id=self.project.organization_id,
  1838. date_released=timezone.now(),
  1839. )
  1840. release.add_project(self.project)
  1841. release.add_project(self.project2)
  1842. deploy = Deploy.objects.create(
  1843. release=release, organization_id=self.org.id, environment_id=self.environment.id
  1844. )
  1845. return release, deploy
  1846. class SlackActivityNotificationTest(ActivityTestCase):
  1847. @cached_property
  1848. def adapter(self):
  1849. return mail_adapter
  1850. def setUp(self):
  1851. NotificationSetting.objects.update_settings(
  1852. ExternalProviders.SLACK,
  1853. NotificationSettingTypes.WORKFLOW,
  1854. NotificationSettingOptionValues.ALWAYS,
  1855. actor=RpcActor.from_orm_user(self.user),
  1856. )
  1857. NotificationSetting.objects.update_settings(
  1858. ExternalProviders.SLACK,
  1859. NotificationSettingTypes.DEPLOY,
  1860. NotificationSettingOptionValues.ALWAYS,
  1861. actor=RpcActor.from_orm_user(self.user),
  1862. )
  1863. NotificationSetting.objects.update_settings(
  1864. ExternalProviders.SLACK,
  1865. NotificationSettingTypes.ISSUE_ALERTS,
  1866. NotificationSettingOptionValues.ALWAYS,
  1867. actor=RpcActor.from_orm_user(self.user),
  1868. )
  1869. UserOption.objects.create(user=self.user, key="self_notifications", value="1")
  1870. self.integration = install_slack(self.organization)
  1871. self.idp = IdentityProvider.objects.create(type="slack", external_id="TXXXXXXX1", config={})
  1872. self.identity = Identity.objects.create(
  1873. external_id="UXXXXXXX1",
  1874. idp=self.idp,
  1875. user=self.user,
  1876. status=IdentityStatus.VALID,
  1877. scopes=[],
  1878. )
  1879. responses.add(
  1880. method=responses.POST,
  1881. url="https://slack.com/api/chat.postMessage",
  1882. body='{"ok": true}',
  1883. status=200,
  1884. content_type="application/json",
  1885. )
  1886. self.name = self.user.get_display_name()
  1887. self.short_id = self.group.qualified_short_id
  1888. def assert_performance_issue_attachments(
  1889. self, attachment, project_slug, referrer, alert_type="workflow"
  1890. ):
  1891. assert attachment["title"] == "N+1 Query"
  1892. assert (
  1893. attachment["text"]
  1894. == "db - SELECT `books_author`.`id`, `books_author`.`name` FROM `books_author` WHERE `books_author`.`id` = %s LIMIT 21"
  1895. )
  1896. assert (
  1897. attachment["footer"]
  1898. == f"{project_slug} | production | <http://testserver/settings/account/notifications/{alert_type}/?referrer={referrer}|Notification Settings>"
  1899. )
  1900. def assert_generic_issue_attachments(
  1901. self, attachment, project_slug, referrer, alert_type="workflow"
  1902. ):
  1903. assert attachment["title"] == TEST_ISSUE_OCCURRENCE.issue_title
  1904. assert attachment["text"] == TEST_ISSUE_OCCURRENCE.evidence_display[0].value
  1905. assert (
  1906. attachment["footer"]
  1907. == f"{project_slug} | <http://testserver/settings/account/notifications/{alert_type}/?referrer={referrer}|Notification Settings>"
  1908. )
  1909. class MSTeamsActivityNotificationTest(ActivityTestCase):
  1910. def setUp(self):
  1911. NotificationSetting.objects.update_settings(
  1912. ExternalProviders.MSTEAMS,
  1913. NotificationSettingTypes.WORKFLOW,
  1914. NotificationSettingOptionValues.ALWAYS,
  1915. actor=RpcActor.from_orm_user(self.user),
  1916. )
  1917. NotificationSetting.objects.update_settings(
  1918. ExternalProviders.MSTEAMS,
  1919. NotificationSettingTypes.ISSUE_ALERTS,
  1920. NotificationSettingOptionValues.ALWAYS,
  1921. actor=RpcActor.from_orm_user(self.user),
  1922. )
  1923. NotificationSetting.objects.update_settings(
  1924. ExternalProviders.MSTEAMS,
  1925. NotificationSettingTypes.DEPLOY,
  1926. NotificationSettingOptionValues.ALWAYS,
  1927. actor=RpcActor.from_orm_user(self.user),
  1928. )
  1929. UserOption.objects.create(user=self.user, key="self_notifications", value="1")
  1930. self.tenant_id = "50cccd00-7c9c-4b32-8cda-58a084f9334a"
  1931. self.integration = self.create_integration(
  1932. self.organization,
  1933. self.tenant_id,
  1934. metadata={
  1935. "access_token": "xoxb-xxxxxxxxx-xxxxxxxxxx-xxxxxxxxxxxx",
  1936. "service_url": "https://testserviceurl.com/testendpoint/",
  1937. "installation_type": "tenant",
  1938. "expires_at": 1234567890,
  1939. "tenant_id": self.tenant_id,
  1940. },
  1941. name="Personal Installation",
  1942. provider="msteams",
  1943. )
  1944. self.idp = self.create_identity_provider(
  1945. integration=self.integration, type="msteams", external_id=self.tenant_id, config={}
  1946. )
  1947. self.user_id_1 = "29:1XJKJMvc5GBtc2JwZq0oj8tHZmzrQgFmB39ATiQWA85gQtHieVkKilBZ9XHoq9j7Zaqt7CZ-NJWi7me2kHTL3Bw"
  1948. self.user_1 = self.user
  1949. self.identity_1 = self.create_identity(
  1950. user=self.user_1, identity_provider=self.idp, external_id=self.user_id_1
  1951. )
  1952. @apply_feature_flag_on_cls("organizations:metrics")
  1953. @pytest.mark.usefixtures("reset_snuba")
  1954. class MetricsAPIBaseTestCase(BaseMetricsLayerTestCase, APITestCase):
  1955. def build_and_store_session(
  1956. self,
  1957. days_before_now: int = 0,
  1958. hours_before_now: int = 0,
  1959. minutes_before_now: int = 0,
  1960. seconds_before_now: int = 0,
  1961. **kwargs,
  1962. ):
  1963. # We perform also here the same - 1 seconds transformation as in the _store_metric() method.
  1964. kwargs["started"] = self.adjust_timestamp(
  1965. self.now
  1966. - timedelta(
  1967. days=days_before_now,
  1968. hours=hours_before_now,
  1969. minutes=minutes_before_now,
  1970. seconds=seconds_before_now,
  1971. )
  1972. ).timestamp()
  1973. self.store_session(self.build_session(**kwargs))
  1974. class OrganizationMetricMetaIntegrationTestCase(MetricsAPIBaseTestCase):
  1975. def __indexer_record(self, org_id: int, value: str) -> int:
  1976. return indexer.record(use_case_id=UseCaseID.SESSIONS, org_id=org_id, string=value)
  1977. def setUp(self):
  1978. super().setUp()
  1979. self.login_as(user=self.user)
  1980. now = int(time.time())
  1981. org_id = self.organization.id
  1982. self.store_metric(
  1983. org_id=org_id,
  1984. project_id=self.project.id,
  1985. name="metric1",
  1986. timestamp=now,
  1987. tags={
  1988. "tag1": "value1",
  1989. "tag2": "value2",
  1990. },
  1991. type="counter",
  1992. value=1,
  1993. use_case_id=UseCaseKey.RELEASE_HEALTH,
  1994. )
  1995. self.store_metric(
  1996. org_id=org_id,
  1997. project_id=self.project.id,
  1998. name="metric1",
  1999. timestamp=now,
  2000. tags={"tag3": "value3"},
  2001. type="counter",
  2002. value=1,
  2003. use_case_id=UseCaseKey.RELEASE_HEALTH,
  2004. )
  2005. self.store_metric(
  2006. org_id=org_id,
  2007. project_id=self.project.id,
  2008. name="metric2",
  2009. timestamp=now,
  2010. tags={
  2011. "tag4": "value3",
  2012. "tag1": "value2",
  2013. "tag2": "value1",
  2014. },
  2015. type="set",
  2016. value=123,
  2017. use_case_id=UseCaseKey.RELEASE_HEALTH,
  2018. )
  2019. self.store_metric(
  2020. org_id=org_id,
  2021. project_id=self.project.id,
  2022. name="metric3",
  2023. timestamp=now,
  2024. tags={},
  2025. type="set",
  2026. value=123,
  2027. use_case_id=UseCaseKey.RELEASE_HEALTH,
  2028. )
  2029. class MonitorTestCase(APITestCase):
  2030. def _create_monitor(self, **kwargs):
  2031. return Monitor.objects.create(
  2032. organization_id=self.organization.id,
  2033. project_id=self.project.id,
  2034. type=MonitorType.CRON_JOB,
  2035. config={
  2036. "schedule": "* * * * *",
  2037. "schedule_type": ScheduleType.CRONTAB,
  2038. "checkin_margin": None,
  2039. "max_runtime": None,
  2040. },
  2041. **kwargs,
  2042. )
  2043. def _create_monitor_environment(self, monitor, name="production", **kwargs):
  2044. environment = Environment.get_or_create(project=self.project, name=name)
  2045. monitorenvironment_defaults = {
  2046. "status": monitor.status,
  2047. **kwargs,
  2048. }
  2049. return MonitorEnvironment.objects.create(
  2050. monitor=monitor, environment=environment, **monitorenvironment_defaults
  2051. )
  2052. def _create_alert_rule(self, monitor):
  2053. rule = Creator(
  2054. name="New Cool Rule",
  2055. owner=None,
  2056. project=self.project,
  2057. action_match="all",
  2058. filter_match="any",
  2059. conditions=[],
  2060. actions=[],
  2061. frequency=5,
  2062. ).call()
  2063. rule.update(source=RuleSource.CRON_MONITOR)
  2064. config = monitor.config
  2065. config["alert_rule_id"] = rule.id
  2066. monitor.config = config
  2067. monitor.save()
  2068. return rule
  2069. class MonitorIngestTestCase(MonitorTestCase):
  2070. """
  2071. Base test case which provides support for both styles of legacy ingestion
  2072. endpoints, as well as sets up token and DSN authentication helpers
  2073. """
  2074. @property
  2075. def endpoint_with_org(self):
  2076. raise NotImplementedError(f"implement for {type(self).__module__}.{type(self).__name__}")
  2077. @property
  2078. def dsn_auth_headers(self):
  2079. return {"HTTP_AUTHORIZATION": f"DSN {self.project_key.dsn_public}"}
  2080. @property
  2081. def token_auth_headers(self):
  2082. return {"HTTP_AUTHORIZATION": f"Bearer {self.token.token}"}
  2083. def setUp(self):
  2084. super().setUp()
  2085. # DSN based auth
  2086. self.project_key = self.create_project_key()
  2087. # Token based auth
  2088. sentry_app = self.create_sentry_app(
  2089. organization=self.organization,
  2090. scopes=["project:write"],
  2091. )
  2092. app = self.create_sentry_app_installation(
  2093. slug=sentry_app.slug, organization=self.organization
  2094. )
  2095. self.token = self.create_internal_integration_token(app, user=self.user)
  2096. def _get_path_functions(self):
  2097. # Monitor paths are supported both with an org slug and without. We test both as long as we support both.
  2098. # Because removing old urls takes time and consideration of the cost of breaking lingering references, a
  2099. # decision to permanently remove either path schema is a TODO.
  2100. return (
  2101. lambda monitor_slug: reverse(self.endpoint, args=[monitor_slug]),
  2102. lambda monitor_slug: reverse(
  2103. self.endpoint_with_org, args=[self.organization.slug, monitor_slug]
  2104. ),
  2105. )