test_models.py 26 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699
  1. import pickle
  2. from unittest import mock
  3. import pytest
  4. from sentry import eventstore, nodestore
  5. from sentry.db.models.fields.node import NodeData, NodeIntegrityFailure
  6. from sentry.eventstore.models import Event, GroupEvent
  7. from sentry.grouping.enhancer import Enhancements
  8. from sentry.issues.issue_occurrence import IssueOccurrence
  9. from sentry.issues.occurrence_consumer import process_event_and_issue_occurrence
  10. from sentry.models.environment import Environment
  11. from sentry.snuba.dataset import Dataset
  12. from sentry.testutils.cases import PerformanceIssueTestCase, TestCase
  13. from sentry.testutils.helpers.datetime import before_now, iso_format
  14. from sentry.testutils.pytest.fixtures import django_db_all
  15. from sentry.testutils.silo import region_silo_test
  16. from sentry.testutils.skips import requires_snuba
  17. from sentry.utils import snuba
  18. from tests.sentry.issues.test_utils import OccurrenceTestMixin
  19. pytestmark = [requires_snuba]
  20. @region_silo_test(stable=True)
  21. class EventTest(TestCase, PerformanceIssueTestCase):
  22. def test_pickling_compat(self):
  23. event = self.store_event(
  24. data={
  25. "message": "Hello World!",
  26. "tags": {"logger": "foobar", "site": "foo", "server_name": "bar"},
  27. },
  28. project_id=self.project.id,
  29. )
  30. # Ensure we load and memoize the interfaces as well.
  31. assert len(event.interfaces) > 0
  32. # When we pickle an event we need to make sure our canonical code
  33. # does not appear here or it breaks old workers.
  34. data = pickle.dumps(event, protocol=2)
  35. assert b"canonical" not in data
  36. # For testing we remove the backwards compat support in the
  37. # `NodeData` as well.
  38. nodedata_getstate = hasattr(NodeData, "__getstate__")
  39. with mock.patch.object(NodeData, "__getstate__", nodedata_getstate):
  40. del NodeData.__getstate__
  41. # Old worker loading
  42. event2 = pickle.loads(data)
  43. assert event2.data == event.data
  44. assert hasattr(NodeData, "__getstate__")
  45. # New worker loading
  46. event2 = pickle.loads(data)
  47. assert event2.data == event.data
  48. def test_event_as_dict(self):
  49. event = self.store_event(data={"message": "Hello World!"}, project_id=self.project.id)
  50. d = event.as_dict()
  51. assert d["logentry"] == {"formatted": "Hello World!", "message": None, "params": None}
  52. def test_email_subject(self):
  53. event1 = self.store_event(
  54. data={
  55. "event_id": "a" * 32,
  56. "message": "Foo bar",
  57. "level": "info",
  58. "fingerprint": ["group-1"],
  59. },
  60. project_id=self.project.id,
  61. )
  62. event2 = self.store_event(
  63. data={
  64. "event_id": "b" * 32,
  65. "message": "Foo bar",
  66. "level": "error",
  67. "fingerprint": ["group-1"],
  68. },
  69. project_id=self.project.id,
  70. )
  71. assert event1.group is not None
  72. group = event1.group
  73. group.level = 30
  74. assert event1.get_email_subject() == "BAR-1 - Foo bar"
  75. assert event2.get_email_subject() == "BAR-1 - Foo bar"
  76. def test_email_subject_with_template(self):
  77. self.project.update_option(
  78. "mail:subject_template",
  79. "$shortID - ${tag:environment}@${tag:release} $$ $title ${tag:invalid} $invalid",
  80. )
  81. event1 = self.store_event(
  82. data={
  83. "event_id": "a" * 32,
  84. "environment": "production",
  85. "level": "info",
  86. "release": "0",
  87. "message": "baz",
  88. },
  89. project_id=self.project.id,
  90. )
  91. assert event1.get_email_subject() == "BAR-1 - production@0 $ baz ${tag:invalid} $invalid"
  92. def test_transaction_email_subject(self):
  93. self.project.update_option(
  94. "mail:subject_template",
  95. "$shortID - ${tag:environment}@${tag:release} $title",
  96. )
  97. event = self.create_performance_issue()
  98. assert event.get_email_subject() == "BAR-1 - production@0.1 N+1 Query"
  99. def test_as_dict_hides_client_ip(self):
  100. event = self.store_event(
  101. data={"sdk": {"name": "foo", "version": "1.0", "client_ip": "127.0.0.1"}},
  102. project_id=self.project.id,
  103. )
  104. result = event.as_dict()
  105. assert result["sdk"] == {
  106. "name": "foo",
  107. "version": "1.0",
  108. "integrations": None,
  109. "packages": None,
  110. }
  111. def test_get_environment(self):
  112. environment = Environment.get_or_create(self.project, "production")
  113. event = self.store_event(data={"environment": "production"}, project_id=self.project.id)
  114. assert event.get_environment() == environment
  115. with self.assertNumQueries(0):
  116. assert event.get_environment() == environment
  117. def test_ip_address(self):
  118. event = self.store_event(
  119. data={
  120. "user": {"ip_address": "127.0.0.1"},
  121. "request": {"url": "http://some.com", "env": {"REMOTE_ADDR": "::1"}},
  122. },
  123. project_id=self.project.id,
  124. )
  125. assert event.ip_address == "127.0.0.1"
  126. event = self.store_event(
  127. data={
  128. "user": {"ip_address": None},
  129. "request": {"url": "http://some.com", "env": {"REMOTE_ADDR": "::1"}},
  130. },
  131. project_id=self.project.id,
  132. )
  133. assert event.ip_address == "::1"
  134. event = self.store_event(
  135. data={
  136. "user": None,
  137. "request": {"url": "http://some.com", "env": {"REMOTE_ADDR": "::1"}},
  138. },
  139. project_id=self.project.id,
  140. )
  141. assert event.ip_address == "::1"
  142. event = self.store_event(
  143. data={"request": {"url": "http://some.com", "env": {"REMOTE_ADDR": "::1"}}},
  144. project_id=self.project.id,
  145. )
  146. assert event.ip_address == "::1"
  147. event = self.store_event(
  148. data={"request": {"url": "http://some.com", "env": {"REMOTE_ADDR": None}}},
  149. project_id=self.project.id,
  150. )
  151. assert event.ip_address is None
  152. event = self.store_event(data={}, project_id=self.project.id)
  153. assert event.ip_address is None
  154. def test_issueless_event(self):
  155. min_ago = iso_format(before_now(minutes=1))
  156. event = self.store_event(
  157. data={
  158. "event_id": "a" * 32,
  159. "level": "info",
  160. "message": "Foo bar",
  161. "culprit": "app/components/events/eventEntries in map",
  162. "type": "transaction",
  163. "timestamp": min_ago,
  164. "start_timestamp": min_ago,
  165. "contexts": {"trace": {"trace_id": "b" * 32, "span_id": "c" * 16, "op": ""}},
  166. },
  167. project_id=self.project.id,
  168. )
  169. assert event.group is None
  170. assert event.culprit == "app/components/events/eventEntries in map"
  171. def test_snuba_data(self):
  172. self.store_event(
  173. data={
  174. "event_id": "a" * 32,
  175. "message": "Hello World!",
  176. "tags": {"logger": "foobar", "site": "foo", "server_name": "bar"},
  177. "user": {"id": "test", "email": "test@test.com"},
  178. "timestamp": iso_format(before_now(seconds=1)),
  179. },
  180. project_id=self.project.id,
  181. )
  182. event_from_nodestore = Event(project_id=self.project.id, event_id="a" * 32)
  183. event_from_snuba = Event(
  184. project_id=self.project.id,
  185. event_id="a" * 32,
  186. snuba_data=snuba.raw_query(
  187. selected_columns=[
  188. "event_id",
  189. "project_id",
  190. "group_id",
  191. "timestamp",
  192. "culprit",
  193. "location",
  194. "message",
  195. "title",
  196. "type",
  197. "transaction",
  198. "tags.key",
  199. "tags.value",
  200. "email",
  201. "ip_address",
  202. "user_id",
  203. "username",
  204. ],
  205. filter_keys={"project_id": [self.project.id], "event_id": ["a" * 32]},
  206. tenant_ids={"referrer": "r", "organization_id": 1234},
  207. )["data"][0],
  208. )
  209. assert event_from_nodestore.event_id == event_from_snuba.event_id
  210. assert event_from_nodestore.project_id == event_from_snuba.project_id
  211. assert event_from_nodestore.project == event_from_snuba.project
  212. assert event_from_nodestore.timestamp == event_from_snuba.timestamp
  213. assert event_from_nodestore.datetime == event_from_snuba.datetime
  214. assert event_from_nodestore.title == event_from_snuba.title
  215. assert event_from_nodestore.message == event_from_snuba.message
  216. assert event_from_nodestore.platform == event_from_snuba.platform
  217. assert event_from_nodestore.location == event_from_snuba.location
  218. assert event_from_nodestore.culprit == event_from_snuba.culprit
  219. assert event_from_nodestore.get_minimal_user() == event_from_snuba.get_minimal_user()
  220. assert event_from_nodestore.ip_address == event_from_snuba.ip_address
  221. assert event_from_nodestore.tags == event_from_snuba.tags
  222. # Group ID must be fetched from Snuba since it is not present in nodestore
  223. assert event_from_snuba.group_id
  224. assert event_from_snuba.group
  225. assert not event_from_nodestore.group_id
  226. assert not event_from_nodestore.group
  227. def test_snuba_data_transaction(self):
  228. self.store_event(
  229. data={
  230. "event_id": "a" * 32,
  231. "level": "info",
  232. "message": "Foo bar",
  233. "culprit": "app/components/events/eventEntries in map",
  234. "type": "transaction",
  235. "timestamp": iso_format(before_now(minutes=1)),
  236. "start_timestamp": iso_format(before_now(minutes=1, seconds=5)),
  237. "contexts": {"trace": {"trace_id": "b" * 32, "span_id": "c" * 16, "op": ""}},
  238. },
  239. project_id=self.project.id,
  240. )
  241. event_from_nodestore = Event(project_id=self.project.id, event_id="a" * 32)
  242. event_from_snuba = Event(
  243. project_id=self.project.id,
  244. event_id="a" * 32,
  245. snuba_data=snuba.raw_query(
  246. dataset=Dataset.Transactions,
  247. selected_columns=[
  248. "event_id",
  249. "project_id",
  250. "group_ids",
  251. "timestamp",
  252. "message",
  253. "type",
  254. "transaction",
  255. "tags.key",
  256. "tags.value",
  257. ],
  258. filter_keys={"project_id": [self.project.id], "event_id": ["a" * 32]},
  259. tenant_ids={"referrer": "r", "organization_id": 1234},
  260. )["data"][0],
  261. )
  262. # TODO: Remove this once snuba is writing group_ids, and we can create groups as part
  263. # of self.store_event
  264. event_from_snuba.groups = [self.group]
  265. assert event_from_nodestore.event_id == event_from_snuba.event_id
  266. assert event_from_nodestore.project_id == event_from_snuba.project_id
  267. assert event_from_nodestore.project == event_from_snuba.project
  268. assert event_from_nodestore.timestamp == event_from_snuba.timestamp
  269. assert event_from_nodestore.datetime == event_from_snuba.datetime
  270. assert event_from_nodestore.title == event_from_snuba.title
  271. assert event_from_nodestore.message == event_from_snuba.message
  272. assert event_from_nodestore.platform == event_from_snuba.platform
  273. assert event_from_nodestore.location == event_from_snuba.location
  274. assert event_from_nodestore.culprit == event_from_snuba.culprit
  275. assert event_from_nodestore.get_minimal_user() == event_from_snuba.get_minimal_user()
  276. assert event_from_nodestore.ip_address == event_from_snuba.ip_address
  277. assert event_from_nodestore.tags == event_from_snuba.tags
  278. # Group IDs must be fetched from Snuba since they are not present in nodestore
  279. assert not event_from_snuba.group_id
  280. assert event_from_snuba.groups == [self.group]
  281. assert not event_from_snuba.group
  282. assert not event_from_nodestore.group_id
  283. assert not event_from_nodestore.groups
  284. assert not event_from_nodestore.group
  285. def test_grouping_reset(self):
  286. """
  287. Regression test against a specific mutability bug involving grouping,
  288. stacktrace normalization and memoized interfaces
  289. """
  290. event_data = {
  291. "exception": {
  292. "values": [
  293. {
  294. "type": "Hello",
  295. "stacktrace": {
  296. "frames": [
  297. {
  298. "function": "foo",
  299. },
  300. {
  301. "function": "bar",
  302. },
  303. ]
  304. },
  305. }
  306. ]
  307. },
  308. }
  309. enhancement = Enhancements.from_config_string(
  310. """
  311. function:foo category=foo_like
  312. category:foo_like -group
  313. """,
  314. )
  315. grouping_config = {
  316. "enhancements": enhancement.dumps(),
  317. "id": "mobile:2021-02-12",
  318. }
  319. event1 = Event(
  320. event_id="a" * 32,
  321. data=event_data,
  322. project_id=self.project.id,
  323. )
  324. variants1 = event1.get_grouping_variants(grouping_config, normalize_stacktraces=True)
  325. event2 = Event(
  326. event_id="b" * 32,
  327. data=event_data,
  328. project_id=self.project.id,
  329. )
  330. event2.interfaces # Populate cache
  331. variants2 = event2.get_grouping_variants(grouping_config, normalize_stacktraces=True)
  332. assert sorted(v.as_dict()["hash"] for v in variants1.values()) == sorted(
  333. v.as_dict()["hash"] for v in variants2.values()
  334. )
  335. @region_silo_test(stable=True)
  336. class EventGroupsTest(TestCase):
  337. def test_none(self):
  338. event = Event(
  339. event_id="a" * 32,
  340. data={
  341. "level": "info",
  342. "message": "Foo bar",
  343. "culprit": "app/components/events/eventEntries in map",
  344. "type": "transaction",
  345. "contexts": {"trace": {"trace_id": "b" * 32, "span_id": "c" * 16, "op": ""}},
  346. },
  347. project_id=self.project.id,
  348. )
  349. assert event.groups == []
  350. def test_snuba(self):
  351. event = Event(
  352. event_id="a" * 32,
  353. data={
  354. "level": "info",
  355. "message": "Foo bar",
  356. "culprit": "app/components/events/eventEntries in map",
  357. "type": "transaction",
  358. "contexts": {"trace": {"trace_id": "b" * 32, "span_id": "c" * 16, "op": ""}},
  359. },
  360. snuba_data={"group_ids": [self.group.id]},
  361. project_id=self.project.id,
  362. )
  363. assert event.groups == [self.group]
  364. def test_passed_explicitly(self):
  365. event = Event(
  366. event_id="a" * 32,
  367. data={
  368. "level": "info",
  369. "message": "Foo bar",
  370. "culprit": "app/components/events/eventEntries in map",
  371. "type": "transaction",
  372. "contexts": {"trace": {"trace_id": "b" * 32, "span_id": "c" * 16, "op": ""}},
  373. },
  374. project_id=self.project.id,
  375. groups=[self.group],
  376. )
  377. assert event.groups == [self.group]
  378. def test_from_group(self):
  379. event = Event(
  380. event_id="a" * 32,
  381. data={
  382. "level": "info",
  383. "message": "Foo bar",
  384. "culprit": "app/components/events/eventEntries in map",
  385. "type": "transaction",
  386. "contexts": {"trace": {"trace_id": "b" * 32, "span_id": "c" * 16, "op": ""}},
  387. },
  388. project_id=self.project.id,
  389. group_id=self.group.id,
  390. )
  391. assert event.groups == [self.group]
  392. def test_from_group_snuba(self):
  393. event = Event(
  394. event_id="a" * 32,
  395. data={
  396. "level": "info",
  397. "message": "Foo bar",
  398. "culprit": "app/components/events/eventEntries in map",
  399. "type": "transaction",
  400. "contexts": {"trace": {"trace_id": "b" * 32, "span_id": "c" * 16, "op": ""}},
  401. },
  402. snuba_data={"group_id": self.group.id},
  403. project_id=self.project.id,
  404. )
  405. assert event.groups == [self.group]
  406. @region_silo_test(stable=True)
  407. class EventBuildGroupEventsTest(TestCase):
  408. def test_none(self):
  409. event = Event(
  410. event_id="a" * 32,
  411. data={
  412. "level": "info",
  413. "message": "Foo bar",
  414. "culprit": "app/components/events/eventEntries in map",
  415. "type": "transaction",
  416. "contexts": {"trace": {"trace_id": "b" * 32, "span_id": "c" * 16, "op": ""}},
  417. },
  418. project_id=self.project.id,
  419. )
  420. assert list(event.build_group_events()) == []
  421. def test(self):
  422. event = Event(
  423. event_id="a" * 32,
  424. data={
  425. "level": "info",
  426. "message": "Foo bar",
  427. "culprit": "app/components/events/eventEntries in map",
  428. "type": "transaction",
  429. "contexts": {"trace": {"trace_id": "b" * 32, "span_id": "c" * 16, "op": ""}},
  430. },
  431. project_id=self.project.id,
  432. groups=[self.group],
  433. )
  434. assert list(event.build_group_events()) == [GroupEvent.from_event(event, self.group)]
  435. def test_multiple(self):
  436. self.group_2 = self.create_group()
  437. event = Event(
  438. event_id="a" * 32,
  439. data={
  440. "level": "info",
  441. "message": "Foo bar",
  442. "culprit": "app/components/events/eventEntries in map",
  443. "type": "transaction",
  444. "contexts": {"trace": {"trace_id": "b" * 32, "span_id": "c" * 16, "op": ""}},
  445. },
  446. project_id=self.project.id,
  447. groups=[self.group, self.group_2],
  448. )
  449. sort_key = lambda group_event: (group_event.event_id, group_event.group_id)
  450. assert sorted(event.build_group_events(), key=sort_key) == sorted(
  451. [GroupEvent.from_event(event, self.group), GroupEvent.from_event(event, self.group_2)],
  452. key=sort_key,
  453. )
  454. @region_silo_test(stable=True)
  455. class EventForGroupTest(TestCase):
  456. def test(self):
  457. event = Event(
  458. event_id="a" * 32,
  459. data={
  460. "level": "info",
  461. "message": "Foo bar",
  462. "culprit": "app/components/events/eventEntries in map",
  463. "type": "transaction",
  464. "contexts": {"trace": {"trace_id": "b" * 32, "span_id": "c" * 16, "op": ""}},
  465. },
  466. project_id=self.project.id,
  467. )
  468. assert GroupEvent.from_event(event, self.group) == GroupEvent(
  469. self.project.id, event.event_id, self.group, event.data, event._snuba_data
  470. )
  471. @region_silo_test(stable=True)
  472. class GroupEventFromEventTest(TestCase):
  473. def test(self):
  474. event = Event(
  475. event_id="a" * 32,
  476. data={
  477. "level": "info",
  478. "message": "Foo bar",
  479. "culprit": "app/components/events/eventEntries in map",
  480. "type": "transaction",
  481. "contexts": {"trace": {"trace_id": "b" * 32, "span_id": "c" * 16, "op": ""}},
  482. },
  483. project_id=self.project.id,
  484. )
  485. group_event = GroupEvent.from_event(event, self.group)
  486. assert event.for_group(self.group) == group_event
  487. # Since event didn't have a cached project, we should query here to fetch it
  488. with self.assertNumQueries(1):
  489. group_event.project
  490. def test_project_cache(self):
  491. event = Event(
  492. event_id="a" * 32,
  493. data={
  494. "level": "info",
  495. "message": "Foo bar",
  496. "culprit": "app/components/events/eventEntries in map",
  497. "type": "transaction",
  498. "contexts": {"trace": {"trace_id": "b" * 32, "span_id": "c" * 16, "op": ""}},
  499. },
  500. project_id=self.project.id,
  501. )
  502. # This causes the project to be cached
  503. event.project
  504. group_event = GroupEvent.from_event(event, self.group)
  505. # Make sure we don't make additional queries when accessing project here
  506. with self.assertNumQueries(0):
  507. group_event.project
  508. @region_silo_test(stable=True)
  509. class GroupEventOccurrenceTest(TestCase, OccurrenceTestMixin):
  510. def test(self):
  511. occurrence_data = self.build_occurrence_data(project_id=self.project.id)
  512. occurrence, group_info = process_event_and_issue_occurrence(
  513. occurrence_data,
  514. event_data={
  515. "event_id": occurrence_data["event_id"],
  516. "project_id": occurrence_data["project_id"],
  517. "level": "info",
  518. },
  519. )
  520. assert group_info is not None
  521. event = Event(
  522. occurrence_data["project_id"],
  523. occurrence_data["event_id"],
  524. group_info.group.id,
  525. data={},
  526. snuba_data={"occurrence_id": occurrence.id},
  527. )
  528. assert event.group is not None
  529. with mock.patch.object(IssueOccurrence, "fetch", wraps=IssueOccurrence.fetch) as fetch_mock:
  530. group_event = event.for_group(event.group)
  531. assert group_event.occurrence == occurrence
  532. assert fetch_mock.call_count == 1
  533. # Access the property again, call count shouldn't increase since we're cached
  534. group_event.occurrence
  535. assert fetch_mock.call_count == 1
  536. # Call count should increase if we do it a second time
  537. group_event.occurrence = None
  538. assert group_event.occurrence == occurrence
  539. assert fetch_mock.call_count == 2
  540. @django_db_all
  541. def test_renormalization(monkeypatch, factories, task_runner, default_project):
  542. from sentry_relay.processing import StoreNormalizer
  543. old_normalize = StoreNormalizer.normalize_event
  544. normalize_mock_calls = []
  545. def normalize(*args, **kwargs):
  546. normalize_mock_calls.append(1)
  547. return old_normalize(*args, **kwargs)
  548. monkeypatch.setattr("sentry_relay.processing.StoreNormalizer.normalize_event", normalize)
  549. with task_runner():
  550. factories.store_event(
  551. data={"event_id": "a" * 32, "environment": "production"}, project_id=default_project.id
  552. )
  553. # Assert we only renormalize this once. If this assertion fails it's likely
  554. # that you will encounter severe performance issues during event processing
  555. # or postprocessing.
  556. assert len(normalize_mock_calls) == 1
  557. @region_silo_test(stable=True)
  558. class EventNodeStoreTest(TestCase):
  559. def test_event_node_id(self):
  560. # Create an event without specifying node_id. A node_id should be generated
  561. e1 = Event(project_id=1, event_id="abc", data={"foo": "bar"})
  562. assert e1.data.id is not None, "We should have generated a node_id for this event"
  563. e1_node_id = e1.data.id
  564. e1.data.save()
  565. e1_body = nodestore.backend.get(e1_node_id)
  566. assert e1_body == {"foo": "bar"}, "The event body should be in nodestore"
  567. e1 = Event(project_id=1, event_id="abc")
  568. assert e1.data.data == {"foo": "bar"}, "The event body should be loaded from nodestore"
  569. assert e1.data.id == e1_node_id, "The event's node_id should be the same after load"
  570. # Event with no data should not be saved to nodestore
  571. e2 = Event(project_id=1, event_id="mno", data=None)
  572. e2_node_id = e2.data.id
  573. assert e2.data.data == {} # NodeData returns {} by default
  574. eventstore.backend.bind_nodes([e2], "data")
  575. assert e2.data.data == {}
  576. e2_body = nodestore.backend.get(e2_node_id)
  577. assert e2_body is None
  578. def test_screams_bloody_murder_when_ref_fails(self):
  579. project1 = self.create_project()
  580. project2 = self.create_project()
  581. invalid_event = self.store_event(
  582. data={
  583. "event_id": "a" * 32,
  584. "timestamp": iso_format(before_now(minutes=1)),
  585. "fingerprint": ["group-1"],
  586. },
  587. project_id=project1.id,
  588. )
  589. event = self.store_event(
  590. data={
  591. "event_id": "b" * 32,
  592. "timestamp": iso_format(before_now(minutes=1)),
  593. "fingerprint": ["group-2"],
  594. },
  595. project_id=project2.id,
  596. )
  597. event.data.bind_ref(invalid_event)
  598. event.data.save()
  599. assert event.data.get_ref(event) != event.data.get_ref(invalid_event)
  600. # Unload node data to force reloading from nodestore
  601. event.data._node_data = None
  602. with pytest.raises(NodeIntegrityFailure):
  603. eventstore.backend.bind_nodes([event])
  604. def test_accepts_valid_ref(self):
  605. self.store_event(data={"event_id": "a" * 32}, project_id=self.project.id)
  606. event = Event(project_id=self.project.id, event_id="a" * 32)
  607. event.data.bind_ref(event)
  608. assert event.data.ref == event.project.id
  609. def test_basic_ref_binding(self):
  610. event = self.store_event(data={}, project_id=self.project.id)
  611. assert event.data.get_ref(event) == event.project.id