Browse Source

ref: Rename dSYMs to DIFs (#10019)

* ref: Rename ProjectDSymFile to ProjectDebugFile

* db: Create empty migration for model rename

* ref: Rename DSymCache to DIFCache

* ref: Rename all inappropriate uses of dsym to dif

* fix: Use proper file extensions for DIF download

* fix: Update some broken imports
Jan Michael Auer 6 years ago
parent
commit
ffb8a0c717

+ 154 - 31
src/sentry/api/endpoints/dsym_files.py → src/sentry/api/endpoints/debug_files.py

@@ -1,5 +1,7 @@
 from __future__ import absolute_import
 
+import six
+import jsonschema
 import logging
 import posixpath
 
@@ -13,8 +15,11 @@ from sentry.api.bases.project import ProjectEndpoint, ProjectReleasePermission
 from sentry.api.content_negotiation import ConditionalContentNegotiation
 from sentry.api.serializers import serialize
 from sentry.api.serializers.rest_framework import ListField
-from sentry.models import ProjectDSymFile, create_files_from_dsym_zip, \
-    VersionDSymFile, DSymApp, DSYM_PLATFORMS
+from sentry.models import ChunkFileState, FileBlobOwner, ProjectDebugFile, \
+    VersionDSymFile, DSymApp, DIF_PLATFORMS, create_files_from_dif_zip, \
+    get_assemble_status, set_assemble_status
+from sentry.utils import json
+
 try:
     from django.http import (
         CompatibleStreamingHttpResponse as StreamingHttpResponse, HttpResponse, Http404)
@@ -29,8 +34,8 @@ ERR_FILE_EXISTS = 'A file matching this debug identifier already exists'
 class AssociateDsymSerializer(serializers.Serializer):
     checksums = ListField(child=serializers.CharField(max_length=40))
     platform = serializers.ChoiceField(choices=zip(
-        DSYM_PLATFORMS.keys(),
-        DSYM_PLATFORMS.keys(),
+        DIF_PLATFORMS.keys(),
+        DIF_PLATFORMS.keys(),
     ))
     name = serializers.CharField(max_length=250)
     appId = serializers.CharField(max_length=250)
@@ -42,44 +47,40 @@ def upload_from_request(request, project):
     if 'file' not in request.FILES:
         return Response({'detail': 'Missing uploaded file'}, status=400)
     fileobj = request.FILES['file']
-    files = create_files_from_dsym_zip(fileobj, project=project)
+    files = create_files_from_dif_zip(fileobj, project=project)
     return Response(serialize(files, request.user), status=201)
 
 
-class DSymFilesEndpoint(ProjectEndpoint):
+class DebugFilesEndpoint(ProjectEndpoint):
     doc_section = DocSection.PROJECTS
     permission_classes = (ProjectReleasePermission, )
 
     content_negotiation_class = ConditionalContentNegotiation
 
-    def download(self, project_dsym_id, project):
+    def download(self, debug_file_id, project):
         rate_limited = ratelimits.is_limited(
             project=project,
             key='rl:DSymFilesEndpoint:download:%s:%s' % (
-                project_dsym_id, project.id),
+                debug_file_id, project.id),
             limit=10,
         )
         if rate_limited:
             logger.info('notification.rate_limited',
                         extra={'project_id': project.id,
-                               'project_dsym_id': project_dsym_id})
+                               'project_debug_file_id': debug_file_id})
             return HttpResponse(
                 {
                     'Too many download requests',
                 }, status=403
             )
 
-        debug_file = ProjectDSymFile.objects.filter(
-            id=project_dsym_id
+        debug_file = ProjectDebugFile.objects.filter(
+            id=debug_file_id
         ).first()
 
         if debug_file is None:
             raise Http404
 
-        suffix = ".dSYM"
-        if debug_file.dsym_type == 'proguard' and debug_file.object_name == 'proguard-mapping':
-            suffix = ".txt"
-
         try:
             fp = debug_file.file.getfile()
             response = StreamingHttpResponse(
@@ -89,31 +90,31 @@ class DSymFilesEndpoint(ProjectEndpoint):
             response['Content-Length'] = debug_file.file.size
             response['Content-Disposition'] = 'attachment; filename="%s%s"' % (posixpath.basename(
                 debug_file.debug_id
-            ), suffix)
+            ), debug_file.file_extension)
             return response
         except IOError:
             raise Http404
 
     def get(self, request, project):
         """
-        List a Project's DSym Files
-        ```````````````````````````
+        List a Project's Debug Information Files
+        ````````````````````````````````````````
 
-        Retrieve a list of dsym files for a given project.
+        Retrieve a list of debug information files for a given project.
 
         :pparam string organization_slug: the slug of the organization the
                                           release belongs to.
         :pparam string project_slug: the slug of the project to list the
-                                     dsym files of.
+                                     DIFs of.
         :auth: required
         """
 
         apps = DSymApp.objects.filter(project=project)
-        dsym_files = VersionDSymFile.objects.filter(
+        debug_files = VersionDSymFile.objects.filter(
             dsym_app=apps
         ).select_related('dsym_file').order_by('-build', 'version')
 
-        file_list = ProjectDSymFile.objects.filter(
+        file_list = ProjectDebugFile.objects.filter(
             project=project,
             versiondsymfile__isnull=True,
         ).select_related('file')[:100]
@@ -125,7 +126,7 @@ class DSymFilesEndpoint(ProjectEndpoint):
         return Response(
             {
                 'apps': serialize(list(apps)),
-                'debugSymbols': serialize(list(dsym_files)),
+                'debugSymbols': serialize(list(debug_files)),
                 'unreferencedDebugSymbols': serialize(list(file_list)),
             }
         )
@@ -135,7 +136,7 @@ class DSymFilesEndpoint(ProjectEndpoint):
         Upload a New File
         `````````````````
 
-        Upload a new dsym file for the given release.
+        Upload a new debug information file for the given release.
 
         Unlike other API requests, files must be uploaded using the
         traditional multipart/form-data content-type.
@@ -154,13 +155,13 @@ class DSymFilesEndpoint(ProjectEndpoint):
         return upload_from_request(request, project=project)
 
 
-class UnknownDSymFilesEndpoint(ProjectEndpoint):
+class UnknownDebugFilesEndpoint(ProjectEndpoint):
     doc_section = DocSection.PROJECTS
     permission_classes = (ProjectReleasePermission, )
 
     def get(self, request, project):
         checksums = request.GET.getlist('checksums')
-        missing = ProjectDSymFile.objects.find_missing(
+        missing = ProjectDebugFile.objects.find_missing(
             checksums, project=project)
         return Response({'missing': missing})
 
@@ -182,26 +183,148 @@ class AssociateDSymFilesEndpoint(ProjectEndpoint):
             app_id=data['appId'],
             project=project,
             data={'name': data['name']},
-            platform=DSYM_PLATFORMS[data['platform']],
+            platform=DIF_PLATFORMS[data['platform']],
         )
 
         # There can be concurrent deletes on the underlying file object
         # that the project dsym file references.  This means that we can
         # get errors if we don't prefetch this when serializing.  Additionally
         # performance wise it's a better idea to fetch this in one go.
-        dsym_files = ProjectDSymFile.objects.find_by_checksums(
+        difs = ProjectDebugFile.objects.find_by_checksums(
             data['checksums'], project).select_related('file')
 
-        for dsym_file in dsym_files:
+        for dif in difs:
             version_dsym_file, created = VersionDSymFile.objects.get_or_create(
-                dsym_file=dsym_file,
+                dsym_file=dif,
                 version=data['version'],
                 build=data.get('build'),
                 defaults=dict(dsym_app=dsym_app),
             )
             if created:
-                associated.append(dsym_file)
+                associated.append(dif)
 
         return Response({
             'associatedDsymFiles': serialize(associated, request.user),
         })
+
+
+def find_missing_chunks(organization, chunks):
+    """Returns a list of chunks which are missing for an org."""
+    owned = set(FileBlobOwner.objects.filter(
+        blob__checksum__in=chunks,
+        organization=organization,
+    ).values_list('blob__checksum', flat=True))
+    return list(set(chunks) - owned)
+
+
+class DifAssembleEndpoint(ProjectEndpoint):
+    permission_classes = (ProjectReleasePermission, )
+
+    def post(self, request, project):
+        """
+        Assmble one or multiple chunks (FileBlob) into debug files
+        ``````````````````````````````````````````````````````````
+
+        :auth: required
+        """
+        schema = {
+            "type": "object",
+            "patternProperties": {
+                "^[0-9a-f]{40}$": {
+                    "type": "object",
+                    "required": ["name", "chunks"],
+                    "properties": {
+                        "name": {"type": "string"},
+                        "chunks": {
+                            "type": "array",
+                            "items": {"type": "string"}
+                        }
+                    },
+                    "additionalProperties": False
+                }
+            },
+            "additionalProperties": False
+        }
+
+        try:
+            files = json.loads(request.body)
+            jsonschema.validate(files, schema)
+        except jsonschema.ValidationError as e:
+            return Response({'error': str(e).splitlines()[0]},
+                            status=400)
+        except BaseException as e:
+            return Response({'error': 'Invalid json body'},
+                            status=400)
+
+        file_response = {}
+
+        from sentry.tasks.assemble import assemble_dif
+        for checksum, file_to_assemble in six.iteritems(files):
+            name = file_to_assemble.get('name', None)
+            chunks = file_to_assemble.get('chunks', [])
+
+            # First, check the cached assemble status. During assembling, a
+            # ProjectDebugFile will be created and we need to prevent a race
+            # condition.
+            state, detail = get_assemble_status(project, checksum)
+            if state is not None:
+                file_response[checksum] = {
+                    'state': state,
+                    'detail': detail,
+                    'missingChunks': [],
+                }
+                continue
+
+            # Next, check if this project already owns the ProjectDebugFile.
+            # This can under rare circumstances yield more than one file
+            # which is why we use first() here instead of get().
+            dif = ProjectDebugFile.objects.filter(
+                project=project,
+                file__checksum=checksum
+            ).select_related('file').first()
+            if dif is not None:
+                file_response[checksum] = {
+                    'state': ChunkFileState.OK,
+                    'detail': None,
+                    'missingChunks': [],
+                    'dif': serialize(dif),
+                }
+                continue
+
+            # There is neither a known file nor a cached state, so we will
+            # have to create a new file.  Assure that there are checksums.
+            # If not, we assume this is a poll and report NOT_FOUND
+            if not chunks:
+                file_response[checksum] = {
+                    'state': ChunkFileState.NOT_FOUND,
+                    'missingChunks': [],
+                }
+                continue
+
+            # Check if all requested chunks have been uploaded.
+            missing_chunks = find_missing_chunks(project.organization, chunks)
+            if missing_chunks:
+                file_response[checksum] = {
+                    'state': ChunkFileState.NOT_FOUND,
+                    'missingChunks': missing_chunks,
+                }
+                continue
+
+            # We don't have a state yet, this means we can now start
+            # an assemble job in the background.
+            set_assemble_status(project, checksum, state)
+            assemble_dif.apply_async(
+                kwargs={
+                    'project_id': project.id,
+                    'name': name,
+                    'checksum': checksum,
+                    'chunks': chunks,
+                }
+            )
+
+            file_response[checksum] = {
+                'state': ChunkFileState.CREATED,
+                'missingChunks': [],
+            }
+
+        return Response(file_response, status=200)

+ 0 - 134
src/sentry/api/endpoints/dif_files.py

@@ -1,134 +0,0 @@
-from __future__ import absolute_import
-
-import six
-import jsonschema
-
-from rest_framework.response import Response
-
-from sentry.utils import json
-from sentry.api.serializers import serialize
-from sentry.api.bases.project import ProjectEndpoint, ProjectReleasePermission
-from sentry.models import ChunkFileState, ProjectDSymFile, FileBlobOwner, \
-    get_assemble_status, set_assemble_status
-
-
-def find_missing_chunks(organization, chunks):
-    """Returns a list of chunks which are missing for an org."""
-    owned = set(FileBlobOwner.objects.filter(
-        blob__checksum__in=chunks,
-        organization=organization,
-    ).values_list('blob__checksum', flat=True))
-    return list(set(chunks) - owned)
-
-
-class DifAssembleEndpoint(ProjectEndpoint):
-    permission_classes = (ProjectReleasePermission, )
-
-    def post(self, request, project):
-        """
-        Assmble one or multiple chunks (FileBlob) into dsym files
-        `````````````````````````````````````````````````````````
-
-        :auth: required
-        """
-        schema = {
-            "type": "object",
-            "patternProperties": {
-                "^[0-9a-f]{40}$": {
-                    "type": "object",
-                    "required": ["name", "chunks"],
-                    "properties": {
-                        "name": {"type": "string"},
-                        "chunks": {
-                            "type": "array",
-                            "items": {"type": "string"}
-                        }
-                    },
-                    "additionalProperties": False
-                }
-            },
-            "additionalProperties": False
-        }
-
-        try:
-            files = json.loads(request.body)
-            jsonschema.validate(files, schema)
-        except jsonschema.ValidationError as e:
-            return Response({'error': str(e).splitlines()[0]},
-                            status=400)
-        except BaseException as e:
-            return Response({'error': 'Invalid json body'},
-                            status=400)
-
-        file_response = {}
-
-        from sentry.tasks.assemble import assemble_dif
-        for checksum, file_to_assemble in six.iteritems(files):
-            name = file_to_assemble.get('name', None)
-            chunks = file_to_assemble.get('chunks', [])
-
-            # First, check the cached assemble status. During assembling, a
-            # ProjectDSymFile will be created and we need to prevent a race
-            # condition.
-            state, detail = get_assemble_status(project, checksum)
-            if state is not None:
-                file_response[checksum] = {
-                    'state': state,
-                    'detail': detail,
-                    'missingChunks': [],
-                }
-                continue
-
-            # Next, check if this project already owns the DSymFile.
-            # This can under rare circumstances yield more than one file
-            # which is why we use first() here instead of get().
-            dif = ProjectDSymFile.objects.filter(
-                project=project,
-                file__checksum=checksum
-            ).select_related('file').first()
-            if dif is not None:
-                file_response[checksum] = {
-                    'state': ChunkFileState.OK,
-                    'detail': None,
-                    'missingChunks': [],
-                    'dif': serialize(dif),
-                }
-                continue
-
-            # There is neither a known file nor a cached state, so we will
-            # have to create a new file.  Assure that there are checksums.
-            # If not, we assume this is a poll and report NOT_FOUND
-            if not chunks:
-                file_response[checksum] = {
-                    'state': ChunkFileState.NOT_FOUND,
-                    'missingChunks': [],
-                }
-                continue
-
-            # Check if all requested chunks have been uploaded.
-            missing_chunks = find_missing_chunks(project.organization, chunks)
-            if missing_chunks:
-                file_response[checksum] = {
-                    'state': ChunkFileState.NOT_FOUND,
-                    'missingChunks': missing_chunks,
-                }
-                continue
-
-            # We don't have a state yet, this means we can now start
-            # an assemble job in the background.
-            set_assemble_status(project, checksum, state)
-            assemble_dif.apply_async(
-                kwargs={
-                    'project_id': project.id,
-                    'name': name,
-                    'checksum': checksum,
-                    'chunks': chunks,
-                }
-            )
-
-            file_response[checksum] = {
-                'state': ChunkFileState.CREATED,
-                'missingChunks': [],
-            }
-
-        return Response(file_response, status=200)

+ 5 - 5
src/sentry/api/serializers/models/dsym_file.py → src/sentry/api/serializers/models/debug_file.py

@@ -3,11 +3,11 @@ from __future__ import absolute_import
 import six
 
 from sentry.api.serializers import Serializer, register, serialize
-from sentry.models import (ProjectDSymFile, VersionDSymFile, DSymApp, DSYM_PLATFORMS_REVERSE)
+from sentry.models import (ProjectDebugFile, VersionDSymFile, DSymApp, DIF_PLATFORMS_REVERSE)
 
 
-@register(ProjectDSymFile)
-class DSymFileSerializer(Serializer):
+@register(ProjectDebugFile)
+class DebugFileSerializer(Serializer):
     def serialize(self, obj, attrs, user):
         d = {
             'id': six.text_type(obj.id),
@@ -15,7 +15,7 @@ class DSymFileSerializer(Serializer):
             'debugId': obj.debug_id,
             'cpuName': obj.cpu_name,
             'objectName': obj.object_name,
-            'symbolType': obj.dsym_type,
+            'symbolType': obj.dif_type,
             'headers': obj.file.headers,
             'size': obj.file.size,
             'sha1': obj.file.checksum,
@@ -46,7 +46,7 @@ class DSymAppSerializer(Serializer):
             'iconUrl': obj.data.get('icon_url', None),
             'appId': six.text_type(obj.app_id),
             'name': obj.data.get('name', None),
-            'platform': DSYM_PLATFORMS_REVERSE.get(obj.platform) or 'unknown',
+            'platform': DIF_PLATFORMS_REVERSE.get(obj.platform) or 'unknown',
             # XXX: this should be renamed.  It's currently only used in
             # the not yet merged itunes connect plugin (ios, tvos etc.)
             'platforms': ', '.join(obj.data.get('platforms', [])),

+ 4 - 5
src/sentry/api/urls.py

@@ -149,9 +149,8 @@ from .endpoints.project_users import ProjectUsersEndpoint
 from .endpoints.filechange import CommitFileChangeEndpoint
 from .endpoints.issues_resolved_in_release import IssuesResolvedInReleaseEndpoint
 from .endpoints.release_deploys import ReleaseDeploysEndpoint
-from .endpoints.dsym_files import DSymFilesEndpoint, \
-    UnknownDSymFilesEndpoint, AssociateDSymFilesEndpoint
-from .endpoints.dif_files import DifAssembleEndpoint
+from .endpoints.debug_files import DebugFilesEndpoint, DifAssembleEndpoint, \
+    UnknownDebugFilesEndpoint, AssociateDSymFilesEndpoint
 from .endpoints.shared_group_details import SharedGroupDetailsEndpoint
 from .endpoints.system_health import SystemHealthEndpoint
 from .endpoints.system_options import SystemOptionsEndpoint
@@ -732,7 +731,7 @@ urlpatterns = patterns(
     ),
     url(
         r'^projects/(?P<organization_slug>[^\/]+)/(?P<project_slug>[^\/]+)/files/dsyms/$',
-        DSymFilesEndpoint.as_view(),
+        DebugFilesEndpoint.as_view(),
         name='sentry-api-0-dsym-files'
     ),
     url(
@@ -742,7 +741,7 @@ urlpatterns = patterns(
     ),
     url(
         r'^projects/(?P<organization_slug>[^\/]+)/(?P<project_slug>[^\/]+)/files/dsyms/unknown/$',
-        UnknownDSymFilesEndpoint.as_view(),
+        UnknownDebugFilesEndpoint.as_view(),
         name='sentry-api-0-unknown-dsym-files'
     ),
     url(

+ 2 - 2
src/sentry/bgtasks/clean_dsymcache.py

@@ -1,9 +1,9 @@
 from __future__ import absolute_import
 
 from sentry.bgtasks.api import bgtask
-from sentry.models import ProjectDSymFile
+from sentry.models import ProjectDebugFile
 
 
 @bgtask()
 def clean_dsymcache():
-    ProjectDSymFile.dsymcache.clear_old_entries()
+    ProjectDebugFile.difcache.clear_old_entries()

+ 2 - 2
src/sentry/constants.py

@@ -220,8 +220,8 @@ FILTER_MASK = '[Filtered]'
 # Maximum length of a symbol
 MAX_SYM = 256
 
-# Known dsym mimetypes
-KNOWN_DSYM_TYPES = {
+# Known debug information file mimetypes
+KNOWN_DIF_TYPES = {
     'text/x-breakpad': 'breakpad',
     'application/x-mach-binary': 'macho',
     'application/x-elf-binary': 'elf',

+ 1 - 1
src/sentry/deletions/defaults/project.py

@@ -43,7 +43,7 @@ class ProjectDeletionTask(ModelDeletionTask):
         # in bulk
         # Release needs to handle deletes after Group is cleaned up as the foreign
         # key is protected
-        model_list = (models.Group, models.ReleaseProject, models.ReleaseProjectEnvironment, models.ProjectDSymFile,
+        model_list = (models.Group, models.ReleaseProject, models.ReleaseProjectEnvironment, models.ProjectDebugFile,
                       models.ProjectSymCacheFile)
         relations.extend(
             [ModelRelation(m, {'project_id': instance.id}, ModelDeletionTask) for m in model_list]

+ 5 - 5
src/sentry/lang/java/plugin.py

@@ -5,7 +5,7 @@ import six
 from symbolic import ProguardMappingView
 from sentry.plugins import Plugin2
 from sentry.stacktraces import StacktraceProcessor
-from sentry.models import ProjectDSymFile, EventError
+from sentry.models import ProjectDebugFile, EventError
 from sentry.reprocessing import report_processing_issue
 
 FRAME_CACHE_VERSION = 2
@@ -41,17 +41,17 @@ class JavaStacktraceProcessor(StacktraceProcessor):
         if not self.available:
             return False
 
-        dsym_paths = ProjectDSymFile.dsymcache.fetch_dsyms(self.project, self.images)
+        dif_paths = ProjectDebugFile.difcache.fetch_difs(self.project, self.images)
         self.mapping_views = []
 
         for debug_id in self.images:
             error_type = None
 
-            dsym_path = dsym_paths.get(debug_id)
-            if dsym_path is None:
+            dif_path = dif_paths.get(debug_id)
+            if dif_path is None:
                 error_type = EventError.PROGUARD_MISSING_MAPPING
             else:
-                view = ProguardMappingView.from_path(dsym_path)
+                view = ProguardMappingView.from_path(dif_path)
                 if not view.has_line_info:
                     error_type = EventError.PROGUARD_MISSING_LINENO
                 else:

+ 16 - 16
src/sentry/lang/native/plugin.py

@@ -9,7 +9,7 @@ from symbolic import parse_addr, find_best_instruction, arch_get_ip_reg_name, \
 
 from sentry import options
 from django.db import transaction, IntegrityError
-from sentry.models import VersionDSymFile, DSymPlatform, DSymApp
+from sentry.models import VersionDSymFile, DifPlatform, DSymApp
 from sentry.plugins import Plugin2
 from sentry.lang.native.symbolizer import Symbolizer, SymbolicationFailed
 from sentry.lang.native.utils import \
@@ -34,7 +34,7 @@ class NativeStacktraceProcessor(StacktraceProcessor):
         debug_meta = self.data.get('debug_meta')
         self.arch = cpu_name_from_data(self.data)
         self.sym = None
-        self.dsyms_referenced = set()
+        self.difs_referenced = set()
         if debug_meta:
             self.available = True
             self.debug_meta = debug_meta
@@ -47,10 +47,10 @@ class NativeStacktraceProcessor(StacktraceProcessor):
 
     def close(self):
         StacktraceProcessor.close(self)
-        if self.dsyms_referenced:
+        if self.difs_referenced:
             metrics.incr(
                 'dsyms.processed',
-                amount=len(self.dsyms_referenced),
+                amount=len(self.difs_referenced),
                 skip_internal=True,
                 tags={
                     'project_id': self.project.id,
@@ -109,7 +109,7 @@ class NativeStacktraceProcessor(StacktraceProcessor):
         processable_frame.data = {
             'instruction_addr': instr_addr,
             'obj': obj,
-            'obj_uuid': obj.id if obj is not None else None,
+            'debug_id': obj.id if obj is not None else None,
             'symbolserver_match': None,
         }
 
@@ -132,32 +132,32 @@ class NativeStacktraceProcessor(StacktraceProcessor):
             return False
 
         referenced_images = set(
-            pf.data['obj_uuid'] for pf in processing_task.iter_processable_frames(self)
-            if pf.cache_value is None and pf.data['obj_uuid'] is not None
+            pf.data['debug_id'] for pf in processing_task.iter_processable_frames(self)
+            if pf.cache_value is None and pf.data['debug_id'] is not None
         )
 
         app_info = version_build_from_data(self.data)
         if app_info is not None:
-            def on_referenced(dsym_file):
+            def on_referenced(dif):
                 dsym_app = DSymApp.objects.create_or_update_app(
                     sync_id=None,
                     app_id=app_info.id,
                     project=self.project,
                     data={'name': app_info.name},
-                    platform=DSymPlatform.APPLE,
+                    platform=DifPlatform.APPLE,
                     no_fetch=True
                 )
                 try:
                     with transaction.atomic():
                         version_dsym_file, created = VersionDSymFile.objects.get_or_create(
-                            dsym_file=dsym_file,
+                            dsym_file=dif,
                             version=app_info.version,
                             build=app_info.build,
                             defaults=dict(dsym_app=dsym_app),
                         )
                 except IntegrityError:
                     # XXX: this can currently happen because we only
-                    # support one app per dsym file.  Since this can
+                    # support one app per debug file.  Since this can
                     # happen in some cases anyways we ignore it.
                     pass
         else:
@@ -167,7 +167,7 @@ class NativeStacktraceProcessor(StacktraceProcessor):
             self.project,
             self.object_lookup,
             referenced_images=referenced_images,
-            on_dsym_file_referenced=on_referenced
+            on_dif_referenced=on_referenced
         )
 
         if options.get('symbolserver.enabled'):
@@ -230,9 +230,9 @@ class NativeStacktraceProcessor(StacktraceProcessor):
             if raw_frame.get('in_app') is None:
                 raw_frame['in_app'] = in_app
 
-            obj_uuid = processable_frame.data['obj_uuid']
-            if obj_uuid is not None:
-                self.dsyms_referenced.add(obj_uuid)
+            debug_id = processable_frame.data['debug_id']
+            if debug_id is not None:
+                self.difs_referenced.add(debug_id)
 
             try:
                 symbolicated_frames = self.sym.symbolize_frame(
@@ -261,7 +261,7 @@ class NativeStacktraceProcessor(StacktraceProcessor):
                 # we will not have any user facing event or error showing
                 # up at all.  We want to keep this here though in case we
                 # do not want to report some processing issues (eg:
-                # optional dsyms)
+                # optional difs)
                 errors = []
                 if e.is_user_fixable or e.is_sdk_failure:
                     errors.append(e.get_data())

+ 8 - 8
src/sentry/lang/native/symbolizer.py

@@ -7,7 +7,7 @@ from symbolic import SymbolicError, ObjectLookup, LineInfo, parse_addr
 
 from sentry.utils.safe import trim
 from sentry.utils.compat import implements_to_string
-from sentry.models import EventError, ProjectDSymFile
+from sentry.models import EventError, ProjectDebugFile
 from sentry.lang.native.utils import image_name, rebase_addr
 from sentry.constants import MAX_SYM, NATIVE_UNKNOWN_STRING
 
@@ -116,15 +116,15 @@ class Symbolizer(object):
     """
 
     def __init__(self, project, object_lookup, referenced_images,
-                 on_dsym_file_referenced=None):
+                 on_dif_referenced=None):
         if not isinstance(object_lookup, ObjectLookup):
             object_lookup = ObjectLookup(object_lookup)
         self.object_lookup = object_lookup
 
         self.symcaches, self.symcaches_conversion_errors = \
-            ProjectDSymFile.dsymcache.get_symcaches(
+            ProjectDebugFile.difcache.get_symcaches(
                 project, referenced_images,
-                on_dsym_file_referenced=on_dsym_file_referenced,
+                on_dif_referenced=on_dif_referenced,
                 with_conversion_errors=True)
 
     def _process_frame(self, sym, obj, package=None, addr_off=0):
@@ -198,8 +198,8 @@ class Symbolizer(object):
         # Otherwise, yeah, let's just say it's in_app
         return True
 
-    def _is_optional_dsym(self, obj, sdk_info=None):
-        """Checks if this is a dsym that is optional."""
+    def _is_optional_dif(self, obj, sdk_info=None):
+        """Checks if this is an optional debug information file."""
         # Frames that are not in the app are not considered optional.  In
         # theory we should never reach this anyways.
         if not self.is_image_from_app_bundle(obj, sdk_info=sdk_info):
@@ -232,7 +232,7 @@ class Symbolizer(object):
                     type=EventError.NATIVE_BAD_DSYM,
                     obj=obj
                 )
-            if self._is_optional_dsym(obj, sdk_info=sdk_info):
+            if self._is_optional_dif(obj, sdk_info=sdk_info):
                 type = EventError.NATIVE_MISSING_OPTIONALLY_BUNDLED_DSYM
             else:
                 type = EventError.NATIVE_MISSING_DSYM
@@ -248,7 +248,7 @@ class Symbolizer(object):
         if not rv:
             # For some frameworks we are willing to ignore missing symbol
             # errors.
-            if self._is_optional_dsym(obj, sdk_info=sdk_info):
+            if self._is_optional_dif(obj, sdk_info=sdk_info):
                 return []
             raise SymbolicationFailed(
                 type=EventError.NATIVE_MISSING_SYMBOL, obj=obj)

Some files were not shown because too many files changed in this diff