|
@@ -1,5 +1,7 @@
|
|
|
from __future__ import absolute_import
|
|
|
|
|
|
+import six
|
|
|
+import jsonschema
|
|
|
import logging
|
|
|
import posixpath
|
|
|
|
|
@@ -13,8 +15,11 @@ from sentry.api.bases.project import ProjectEndpoint, ProjectReleasePermission
|
|
|
from sentry.api.content_negotiation import ConditionalContentNegotiation
|
|
|
from sentry.api.serializers import serialize
|
|
|
from sentry.api.serializers.rest_framework import ListField
|
|
|
-from sentry.models import ProjectDSymFile, create_files_from_dsym_zip, \
|
|
|
- VersionDSymFile, DSymApp, DSYM_PLATFORMS
|
|
|
+from sentry.models import ChunkFileState, FileBlobOwner, ProjectDebugFile, \
|
|
|
+ VersionDSymFile, DSymApp, DIF_PLATFORMS, create_files_from_dif_zip, \
|
|
|
+ get_assemble_status, set_assemble_status
|
|
|
+from sentry.utils import json
|
|
|
+
|
|
|
try:
|
|
|
from django.http import (
|
|
|
CompatibleStreamingHttpResponse as StreamingHttpResponse, HttpResponse, Http404)
|
|
@@ -29,8 +34,8 @@ ERR_FILE_EXISTS = 'A file matching this debug identifier already exists'
|
|
|
class AssociateDsymSerializer(serializers.Serializer):
|
|
|
checksums = ListField(child=serializers.CharField(max_length=40))
|
|
|
platform = serializers.ChoiceField(choices=zip(
|
|
|
- DSYM_PLATFORMS.keys(),
|
|
|
- DSYM_PLATFORMS.keys(),
|
|
|
+ DIF_PLATFORMS.keys(),
|
|
|
+ DIF_PLATFORMS.keys(),
|
|
|
))
|
|
|
name = serializers.CharField(max_length=250)
|
|
|
appId = serializers.CharField(max_length=250)
|
|
@@ -42,44 +47,40 @@ def upload_from_request(request, project):
|
|
|
if 'file' not in request.FILES:
|
|
|
return Response({'detail': 'Missing uploaded file'}, status=400)
|
|
|
fileobj = request.FILES['file']
|
|
|
- files = create_files_from_dsym_zip(fileobj, project=project)
|
|
|
+ files = create_files_from_dif_zip(fileobj, project=project)
|
|
|
return Response(serialize(files, request.user), status=201)
|
|
|
|
|
|
|
|
|
-class DSymFilesEndpoint(ProjectEndpoint):
|
|
|
+class DebugFilesEndpoint(ProjectEndpoint):
|
|
|
doc_section = DocSection.PROJECTS
|
|
|
permission_classes = (ProjectReleasePermission, )
|
|
|
|
|
|
content_negotiation_class = ConditionalContentNegotiation
|
|
|
|
|
|
- def download(self, project_dsym_id, project):
|
|
|
+ def download(self, debug_file_id, project):
|
|
|
rate_limited = ratelimits.is_limited(
|
|
|
project=project,
|
|
|
key='rl:DSymFilesEndpoint:download:%s:%s' % (
|
|
|
- project_dsym_id, project.id),
|
|
|
+ debug_file_id, project.id),
|
|
|
limit=10,
|
|
|
)
|
|
|
if rate_limited:
|
|
|
logger.info('notification.rate_limited',
|
|
|
extra={'project_id': project.id,
|
|
|
- 'project_dsym_id': project_dsym_id})
|
|
|
+ 'project_debug_file_id': debug_file_id})
|
|
|
return HttpResponse(
|
|
|
{
|
|
|
'Too many download requests',
|
|
|
}, status=403
|
|
|
)
|
|
|
|
|
|
- debug_file = ProjectDSymFile.objects.filter(
|
|
|
- id=project_dsym_id
|
|
|
+ debug_file = ProjectDebugFile.objects.filter(
|
|
|
+ id=debug_file_id
|
|
|
).first()
|
|
|
|
|
|
if debug_file is None:
|
|
|
raise Http404
|
|
|
|
|
|
- suffix = ".dSYM"
|
|
|
- if debug_file.dsym_type == 'proguard' and debug_file.object_name == 'proguard-mapping':
|
|
|
- suffix = ".txt"
|
|
|
-
|
|
|
try:
|
|
|
fp = debug_file.file.getfile()
|
|
|
response = StreamingHttpResponse(
|
|
@@ -89,31 +90,31 @@ class DSymFilesEndpoint(ProjectEndpoint):
|
|
|
response['Content-Length'] = debug_file.file.size
|
|
|
response['Content-Disposition'] = 'attachment; filename="%s%s"' % (posixpath.basename(
|
|
|
debug_file.debug_id
|
|
|
- ), suffix)
|
|
|
+ ), debug_file.file_extension)
|
|
|
return response
|
|
|
except IOError:
|
|
|
raise Http404
|
|
|
|
|
|
def get(self, request, project):
|
|
|
"""
|
|
|
- List a Project's DSym Files
|
|
|
- ```````````````````````````
|
|
|
+ List a Project's Debug Information Files
|
|
|
+ ````````````````````````````````````````
|
|
|
|
|
|
- Retrieve a list of dsym files for a given project.
|
|
|
+ Retrieve a list of debug information files for a given project.
|
|
|
|
|
|
:pparam string organization_slug: the slug of the organization the
|
|
|
release belongs to.
|
|
|
:pparam string project_slug: the slug of the project to list the
|
|
|
- dsym files of.
|
|
|
+ DIFs of.
|
|
|
:auth: required
|
|
|
"""
|
|
|
|
|
|
apps = DSymApp.objects.filter(project=project)
|
|
|
- dsym_files = VersionDSymFile.objects.filter(
|
|
|
+ debug_files = VersionDSymFile.objects.filter(
|
|
|
dsym_app=apps
|
|
|
).select_related('dsym_file').order_by('-build', 'version')
|
|
|
|
|
|
- file_list = ProjectDSymFile.objects.filter(
|
|
|
+ file_list = ProjectDebugFile.objects.filter(
|
|
|
project=project,
|
|
|
versiondsymfile__isnull=True,
|
|
|
).select_related('file')[:100]
|
|
@@ -125,7 +126,7 @@ class DSymFilesEndpoint(ProjectEndpoint):
|
|
|
return Response(
|
|
|
{
|
|
|
'apps': serialize(list(apps)),
|
|
|
- 'debugSymbols': serialize(list(dsym_files)),
|
|
|
+ 'debugSymbols': serialize(list(debug_files)),
|
|
|
'unreferencedDebugSymbols': serialize(list(file_list)),
|
|
|
}
|
|
|
)
|
|
@@ -135,7 +136,7 @@ class DSymFilesEndpoint(ProjectEndpoint):
|
|
|
Upload a New File
|
|
|
`````````````````
|
|
|
|
|
|
- Upload a new dsym file for the given release.
|
|
|
+ Upload a new debug information file for the given release.
|
|
|
|
|
|
Unlike other API requests, files must be uploaded using the
|
|
|
traditional multipart/form-data content-type.
|
|
@@ -154,13 +155,13 @@ class DSymFilesEndpoint(ProjectEndpoint):
|
|
|
return upload_from_request(request, project=project)
|
|
|
|
|
|
|
|
|
-class UnknownDSymFilesEndpoint(ProjectEndpoint):
|
|
|
+class UnknownDebugFilesEndpoint(ProjectEndpoint):
|
|
|
doc_section = DocSection.PROJECTS
|
|
|
permission_classes = (ProjectReleasePermission, )
|
|
|
|
|
|
def get(self, request, project):
|
|
|
checksums = request.GET.getlist('checksums')
|
|
|
- missing = ProjectDSymFile.objects.find_missing(
|
|
|
+ missing = ProjectDebugFile.objects.find_missing(
|
|
|
checksums, project=project)
|
|
|
return Response({'missing': missing})
|
|
|
|
|
@@ -182,26 +183,148 @@ class AssociateDSymFilesEndpoint(ProjectEndpoint):
|
|
|
app_id=data['appId'],
|
|
|
project=project,
|
|
|
data={'name': data['name']},
|
|
|
- platform=DSYM_PLATFORMS[data['platform']],
|
|
|
+ platform=DIF_PLATFORMS[data['platform']],
|
|
|
)
|
|
|
|
|
|
# There can be concurrent deletes on the underlying file object
|
|
|
# that the project dsym file references. This means that we can
|
|
|
# get errors if we don't prefetch this when serializing. Additionally
|
|
|
# performance wise it's a better idea to fetch this in one go.
|
|
|
- dsym_files = ProjectDSymFile.objects.find_by_checksums(
|
|
|
+ difs = ProjectDebugFile.objects.find_by_checksums(
|
|
|
data['checksums'], project).select_related('file')
|
|
|
|
|
|
- for dsym_file in dsym_files:
|
|
|
+ for dif in difs:
|
|
|
version_dsym_file, created = VersionDSymFile.objects.get_or_create(
|
|
|
- dsym_file=dsym_file,
|
|
|
+ dsym_file=dif,
|
|
|
version=data['version'],
|
|
|
build=data.get('build'),
|
|
|
defaults=dict(dsym_app=dsym_app),
|
|
|
)
|
|
|
if created:
|
|
|
- associated.append(dsym_file)
|
|
|
+ associated.append(dif)
|
|
|
|
|
|
return Response({
|
|
|
'associatedDsymFiles': serialize(associated, request.user),
|
|
|
})
|
|
|
+
|
|
|
+
|
|
|
+def find_missing_chunks(organization, chunks):
|
|
|
+ """Returns a list of chunks which are missing for an org."""
|
|
|
+ owned = set(FileBlobOwner.objects.filter(
|
|
|
+ blob__checksum__in=chunks,
|
|
|
+ organization=organization,
|
|
|
+ ).values_list('blob__checksum', flat=True))
|
|
|
+ return list(set(chunks) - owned)
|
|
|
+
|
|
|
+
|
|
|
+class DifAssembleEndpoint(ProjectEndpoint):
|
|
|
+ permission_classes = (ProjectReleasePermission, )
|
|
|
+
|
|
|
+ def post(self, request, project):
|
|
|
+ """
|
|
|
+ Assmble one or multiple chunks (FileBlob) into debug files
|
|
|
+ ``````````````````````````````````````````````````````````
|
|
|
+
|
|
|
+ :auth: required
|
|
|
+ """
|
|
|
+ schema = {
|
|
|
+ "type": "object",
|
|
|
+ "patternProperties": {
|
|
|
+ "^[0-9a-f]{40}$": {
|
|
|
+ "type": "object",
|
|
|
+ "required": ["name", "chunks"],
|
|
|
+ "properties": {
|
|
|
+ "name": {"type": "string"},
|
|
|
+ "chunks": {
|
|
|
+ "type": "array",
|
|
|
+ "items": {"type": "string"}
|
|
|
+ }
|
|
|
+ },
|
|
|
+ "additionalProperties": False
|
|
|
+ }
|
|
|
+ },
|
|
|
+ "additionalProperties": False
|
|
|
+ }
|
|
|
+
|
|
|
+ try:
|
|
|
+ files = json.loads(request.body)
|
|
|
+ jsonschema.validate(files, schema)
|
|
|
+ except jsonschema.ValidationError as e:
|
|
|
+ return Response({'error': str(e).splitlines()[0]},
|
|
|
+ status=400)
|
|
|
+ except BaseException as e:
|
|
|
+ return Response({'error': 'Invalid json body'},
|
|
|
+ status=400)
|
|
|
+
|
|
|
+ file_response = {}
|
|
|
+
|
|
|
+ from sentry.tasks.assemble import assemble_dif
|
|
|
+ for checksum, file_to_assemble in six.iteritems(files):
|
|
|
+ name = file_to_assemble.get('name', None)
|
|
|
+ chunks = file_to_assemble.get('chunks', [])
|
|
|
+
|
|
|
+ # First, check the cached assemble status. During assembling, a
|
|
|
+ # ProjectDebugFile will be created and we need to prevent a race
|
|
|
+ # condition.
|
|
|
+ state, detail = get_assemble_status(project, checksum)
|
|
|
+ if state is not None:
|
|
|
+ file_response[checksum] = {
|
|
|
+ 'state': state,
|
|
|
+ 'detail': detail,
|
|
|
+ 'missingChunks': [],
|
|
|
+ }
|
|
|
+ continue
|
|
|
+
|
|
|
+ # Next, check if this project already owns the ProjectDebugFile.
|
|
|
+ # This can under rare circumstances yield more than one file
|
|
|
+ # which is why we use first() here instead of get().
|
|
|
+ dif = ProjectDebugFile.objects.filter(
|
|
|
+ project=project,
|
|
|
+ file__checksum=checksum
|
|
|
+ ).select_related('file').first()
|
|
|
+ if dif is not None:
|
|
|
+ file_response[checksum] = {
|
|
|
+ 'state': ChunkFileState.OK,
|
|
|
+ 'detail': None,
|
|
|
+ 'missingChunks': [],
|
|
|
+ 'dif': serialize(dif),
|
|
|
+ }
|
|
|
+ continue
|
|
|
+
|
|
|
+ # There is neither a known file nor a cached state, so we will
|
|
|
+ # have to create a new file. Assure that there are checksums.
|
|
|
+ # If not, we assume this is a poll and report NOT_FOUND
|
|
|
+ if not chunks:
|
|
|
+ file_response[checksum] = {
|
|
|
+ 'state': ChunkFileState.NOT_FOUND,
|
|
|
+ 'missingChunks': [],
|
|
|
+ }
|
|
|
+ continue
|
|
|
+
|
|
|
+ # Check if all requested chunks have been uploaded.
|
|
|
+ missing_chunks = find_missing_chunks(project.organization, chunks)
|
|
|
+ if missing_chunks:
|
|
|
+ file_response[checksum] = {
|
|
|
+ 'state': ChunkFileState.NOT_FOUND,
|
|
|
+ 'missingChunks': missing_chunks,
|
|
|
+ }
|
|
|
+ continue
|
|
|
+
|
|
|
+ # We don't have a state yet, this means we can now start
|
|
|
+ # an assemble job in the background.
|
|
|
+ set_assemble_status(project, checksum, state)
|
|
|
+ assemble_dif.apply_async(
|
|
|
+ kwargs={
|
|
|
+ 'project_id': project.id,
|
|
|
+ 'name': name,
|
|
|
+ 'checksum': checksum,
|
|
|
+ 'chunks': chunks,
|
|
|
+ }
|
|
|
+ )
|
|
|
+
|
|
|
+ file_response[checksum] = {
|
|
|
+ 'state': ChunkFileState.CREATED,
|
|
|
+ 'missingChunks': [],
|
|
|
+ }
|
|
|
+
|
|
|
+ return Response(file_response, status=200)
|