tasks.py 5.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193
  1. import contextlib
  2. import logging
  3. import tempfile
  4. from hashlib import sha1
  5. from celery import shared_task
  6. from symbolic import Archive
  7. from difs.models import DebugInformationFile
  8. from difs.stacktrace_processor import StacktraceProcessor
  9. from events.models import Event
  10. from files.models import File, FileBlob
  11. from projects.models import Project
  12. def getLogger():
  13. return logging.getLogger("glitchtip.difs")
  14. class ChecksumMismatched(Exception):
  15. pass
  16. class UnsupportedFile(Exception):
  17. pass
  18. DIF_STATE_CREATED = "created"
  19. DIF_STATE_OK = "ok"
  20. DIF_STATE_NOT_FOUND = "not_found"
  21. @shared_task
  22. def difs_assemble(project_slug, name, checksum, chunks, debug_id):
  23. try:
  24. project = Project.objects.filter(slug=project_slug).get()
  25. file = difs_get_file_from_chunks(checksum, chunks)
  26. if file is None:
  27. file = difs_create_file_from_chunks(name, checksum, chunks)
  28. difs_create_difs(project, name, file)
  29. except ChecksumMismatched:
  30. getLogger().error("difs_assemble: Checksum mismatched: %s", name)
  31. except Exception as err:
  32. getLogger().error("difs_assemble: %s", err)
  33. def difs_run_resolve_stacktrace(event_id):
  34. difs_resolve_stacktrace.delay(event_id)
  35. @shared_task
  36. def difs_resolve_stacktrace(event_id):
  37. event = Event.objects.get(event_id=event_id)
  38. event_json = event.data
  39. exception = event_json.get("exception")
  40. if exception is None:
  41. # It is not a crash report event
  42. return
  43. project_id = event.issue.project_id
  44. difs = DebugInformationFile.objects.filter(project_id=project_id).order_by(
  45. "-created"
  46. )
  47. resolved_stracktrackes = []
  48. for dif in difs:
  49. if StacktraceProcessor.is_supported(event_json, dif) is False:
  50. continue
  51. blobs = [dif.file.blob]
  52. with difs_concat_file_blobs_to_disk(blobs) as symbol_file:
  53. remapped_stacktrace = StacktraceProcessor.resolve_stacktrace(
  54. event_json, symbol_file.name
  55. )
  56. if remapped_stacktrace is not None and remapped_stacktrace.score > 0:
  57. resolved_stracktrackes.append(remapped_stacktrace)
  58. if len(resolved_stracktrackes) > 0:
  59. best_remapped_stacktrace = max(
  60. resolved_stracktrackes, key=lambda item: item.score
  61. )
  62. StacktraceProcessor.update_frames(event, best_remapped_stacktrace.frames)
  63. event.save()
  64. def difs_get_file_from_chunks(checksum, chunks):
  65. files = File.objects.filter(checksum=checksum)
  66. for file in files:
  67. blob = file.blob
  68. file_chunks = [blob.checksum]
  69. if file_chunks == chunks:
  70. return file
  71. return None
  72. def difs_create_file_from_chunks(name, checksum, chunks):
  73. blobs = FileBlob.objects.filter(checksum__in=chunks)
  74. total_checksum = sha1(b"")
  75. size = 0
  76. for blob in blobs:
  77. size = size + blob.blob.size
  78. with open(blob.blob.path, "rb") as binary_file:
  79. content = binary_file.read()
  80. total_checksum.update(content)
  81. total_checksum = total_checksum.hexdigest()
  82. if checksum != total_checksum:
  83. raise ChecksumMismatched()
  84. file = File(name=name, headers={}, size=size, checksum=checksum)
  85. file.blob = blobs[0]
  86. file.save()
  87. return file
  88. @contextlib.contextmanager
  89. def difs_concat_file_blobs_to_disk(blobs):
  90. output = tempfile.NamedTemporaryFile(delete=False)
  91. for blob in blobs:
  92. with open(blob.blob.path, "rb") as binary_file:
  93. content = binary_file.read()
  94. output.write(content)
  95. output.flush()
  96. output.seek(0)
  97. try:
  98. yield output
  99. finally:
  100. output.close()
  101. def difs_extract_metadata_from_file(file):
  102. with difs_concat_file_blobs_to_disk([file.blob]) as _input:
  103. # Only one kind of file format is supported now
  104. try:
  105. archive = Archive.open(_input.name)
  106. except Exception as err:
  107. getLogger().error("Extract metadata error: %s", err)
  108. raise UnsupportedFile() from err
  109. else:
  110. return [
  111. {
  112. "arch": obj.arch,
  113. "file_format": obj.file_format,
  114. "code_id": obj.code_id,
  115. "debug_id": obj.debug_id,
  116. "kind": obj.kind,
  117. "features": list(obj.features),
  118. "symbol_type": "native",
  119. }
  120. for obj in archive.iter_objects()
  121. ]
  122. def difs_create_difs(project, name, file):
  123. metadatalist = difs_extract_metadata_from_file(file)
  124. for metadata in metadatalist:
  125. dif = DebugInformationFile.objects.filter(
  126. project_id=project.id, file=file
  127. ).first()
  128. if dif is not None:
  129. continue
  130. code_id = metadata["code_id"]
  131. debug_id = metadata["debug_id"]
  132. arch = metadata["arch"]
  133. kind = metadata["kind"]
  134. features = metadata["features"]
  135. symbol_type = metadata["symbol_type"]
  136. dif = DebugInformationFile(
  137. project=project,
  138. name=name,
  139. file=file,
  140. data={
  141. "arch": arch,
  142. "debug_id": debug_id,
  143. "code_id": code_id,
  144. "kind": kind,
  145. "features": features,
  146. "symbol_type": symbol_type,
  147. },
  148. )
  149. dif.save()