api.py 3.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115
  1. """Port of sentry.api.endpoints.chunk.ChunkUploadEndpoint"""
  2. import logging
  3. from gzip import GzipFile
  4. from io import BytesIO
  5. from django.conf import settings
  6. from django.shortcuts import aget_object_or_404
  7. from django.urls import reverse
  8. from ninja import File, Router
  9. from ninja.errors import HttpError
  10. from ninja.files import UploadedFile
  11. from apps.organizations_ext.models import Organization
  12. from glitchtip.api.authentication import AuthHttpRequest
  13. from glitchtip.api.permissions import has_permission
  14. from .models import FileBlob
  15. # Force just one blob
  16. CHUNK_UPLOAD_BLOB_SIZE = 32 * 1024 * 1024 # 32MB
  17. MAX_CHUNKS_PER_REQUEST = 1
  18. MAX_REQUEST_SIZE = CHUNK_UPLOAD_BLOB_SIZE
  19. MAX_CONCURRENCY = 1
  20. HASH_ALGORITHM = "sha1"
  21. CHUNK_UPLOAD_ACCEPT = (
  22. "debug_files", # DIF assemble
  23. "release_files", # Release files assemble
  24. "pdbs", # PDB upload and debug id override
  25. "sources", # Source artifact bundle upload
  26. )
  27. class GzipChunk(BytesIO):
  28. def __init__(self, file):
  29. data = GzipFile(fileobj=file, mode="rb").read()
  30. self.size = len(data)
  31. self.name = file.name
  32. super().__init__(data)
  33. router = Router()
  34. @router.get("organizations/{slug:organization_slug}/chunk-upload/")
  35. async def get_chunk_upload_info(request: AuthHttpRequest, organization_slug: str):
  36. """Get server settings for chunk file upload"""
  37. url = settings.GLITCHTIP_URL.geturl() + reverse(
  38. "api:get_chunk_upload_info", args=[organization_slug]
  39. )
  40. return {
  41. "url": url,
  42. "chunkSize": CHUNK_UPLOAD_BLOB_SIZE,
  43. "chunksPerRequest": MAX_CHUNKS_PER_REQUEST,
  44. "maxFileSize": 2147483648,
  45. "maxRequestSize": MAX_REQUEST_SIZE,
  46. "concurrency": MAX_CONCURRENCY,
  47. "hashAlgorithm": HASH_ALGORITHM,
  48. "compression": ["gzip"],
  49. "accept": CHUNK_UPLOAD_ACCEPT,
  50. }
  51. @router.post("organizations/{slug:organization_slug}/chunk-upload/")
  52. @has_permission(["project:write", "project:admin", "project:releases"])
  53. async def chunk_upload(
  54. request: AuthHttpRequest,
  55. organization_slug: str,
  56. file_gzip: list[UploadedFile] = File(...),
  57. ):
  58. """Upload one more more gzipped files to save"""
  59. logger = logging.getLogger("glitchtip.files")
  60. logger.info("chunkupload.start")
  61. organization = await aget_object_or_404(
  62. Organization, slug=organization_slug.lower(), users=request.auth.user_id
  63. )
  64. files = [GzipChunk(chunk) for chunk in file_gzip]
  65. if len(files) == 0:
  66. # No files uploaded is ok
  67. logger.info("chunkupload.end", extra={"status": 200})
  68. return
  69. logger.info("chunkupload.post.files", extra={"len": len(files)})
  70. # Validate file size
  71. checksums = []
  72. size = 0
  73. for chunk in files:
  74. size += chunk.size
  75. if chunk.size > CHUNK_UPLOAD_BLOB_SIZE:
  76. logger.info("chunkupload.end", extra={"status": 400})
  77. raise HttpError(400, "Chunk size too large")
  78. checksums.append(chunk.name)
  79. if size > MAX_REQUEST_SIZE:
  80. logger.info("chunkupload.end", extra={"status": 400})
  81. raise HttpError(400, "Request too large")
  82. if len(files) > MAX_CHUNKS_PER_REQUEST:
  83. logger.info("chunkupload.end", extra={"status": 400})
  84. raise HttpError(400, "Too many chunks")
  85. try:
  86. await FileBlob.from_files(
  87. zip(files, checksums), organization=organization, logger=logger
  88. )
  89. except IOError as err:
  90. logger.info("chunkupload.end", extra={"status": 400})
  91. raise HttpError(400, str(err)) from err
  92. logger.info("chunkupload.end", extra={"status": 200})