api.py 3.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116
  1. """Port of sentry.api.endpoints.chunk.ChunkUploadEndpoint"""
  2. import logging
  3. from gzip import GzipFile
  4. from io import BytesIO
  5. from django.conf import settings
  6. from django.shortcuts import aget_object_or_404
  7. from django.urls import reverse
  8. from ninja import File, Router
  9. from ninja.errors import HttpError
  10. from ninja.files import UploadedFile
  11. from apps.organizations_ext.models import Organization
  12. from glitchtip.api.authentication import AuthHttpRequest
  13. from glitchtip.api.permissions import has_permission
  14. from .models import FileBlob
  15. # Force just one blob
  16. CHUNK_UPLOAD_BLOB_SIZE = 32 * 1024 * 1024 # 32MB
  17. MAX_CHUNKS_PER_REQUEST = 1
  18. MAX_REQUEST_SIZE = CHUNK_UPLOAD_BLOB_SIZE
  19. MAX_CONCURRENCY = 1
  20. HASH_ALGORITHM = "sha1"
  21. CHUNK_UPLOAD_ACCEPT = (
  22. "debug_files", # DIF assemble
  23. "release_files", # Release files assemble
  24. "pdbs", # PDB upload and debug id override
  25. "sources", # Source artifact bundle upload
  26. "artifact_bundles", # Artifact bundles contain debug ids to link source to sourcemaps
  27. )
  28. class GzipChunk(BytesIO):
  29. def __init__(self, file):
  30. data = GzipFile(fileobj=file, mode="rb").read()
  31. self.size = len(data)
  32. self.name = file.name
  33. super().__init__(data)
  34. router = Router()
  35. @router.get("organizations/{slug:organization_slug}/chunk-upload/")
  36. async def get_chunk_upload_info(request: AuthHttpRequest, organization_slug: str):
  37. """Get server settings for chunk file upload"""
  38. url = settings.GLITCHTIP_URL.geturl() + reverse(
  39. "api:get_chunk_upload_info", args=[organization_slug]
  40. )
  41. return {
  42. "url": url,
  43. "chunkSize": CHUNK_UPLOAD_BLOB_SIZE,
  44. "chunksPerRequest": MAX_CHUNKS_PER_REQUEST,
  45. "maxFileSize": 2147483648,
  46. "maxRequestSize": MAX_REQUEST_SIZE,
  47. "concurrency": MAX_CONCURRENCY,
  48. "hashAlgorithm": HASH_ALGORITHM,
  49. "compression": ["gzip"],
  50. "accept": CHUNK_UPLOAD_ACCEPT,
  51. }
  52. @router.post("organizations/{slug:organization_slug}/chunk-upload/")
  53. @has_permission(["project:write", "project:admin", "project:releases"])
  54. async def chunk_upload(
  55. request: AuthHttpRequest,
  56. organization_slug: str,
  57. file_gzip: list[UploadedFile] = File(...),
  58. ):
  59. """Upload one more more gzipped files to save"""
  60. logger = logging.getLogger("glitchtip.files")
  61. logger.info("chunkupload.start")
  62. organization = await aget_object_or_404(
  63. Organization, slug=organization_slug.lower(), users=request.auth.user_id
  64. )
  65. files = [GzipChunk(chunk) for chunk in file_gzip]
  66. if len(files) == 0:
  67. # No files uploaded is ok
  68. logger.info("chunkupload.end", extra={"status": 200})
  69. return
  70. logger.info("chunkupload.post.files", extra={"len": len(files)})
  71. # Validate file size
  72. checksums = []
  73. size = 0
  74. for chunk in files:
  75. size += chunk.size
  76. if chunk.size > CHUNK_UPLOAD_BLOB_SIZE:
  77. logger.info("chunkupload.end", extra={"status": 400})
  78. raise HttpError(400, "Chunk size too large")
  79. checksums.append(chunk.name)
  80. if size > MAX_REQUEST_SIZE:
  81. logger.info("chunkupload.end", extra={"status": 400})
  82. raise HttpError(400, "Request too large")
  83. if len(files) > MAX_CHUNKS_PER_REQUEST:
  84. logger.info("chunkupload.end", extra={"status": 400})
  85. raise HttpError(400, "Too many chunks")
  86. try:
  87. await FileBlob.from_files(
  88. zip(files, checksums), organization=organization, logger=logger
  89. )
  90. except IOError as err:
  91. logger.info("chunkupload.end", extra={"status": 400})
  92. raise HttpError(400, str(err)) from err
  93. logger.info("chunkupload.end", extra={"status": 200})