Browse Source

fix(sourcemaps): Add bundle_id injection and version support (#45417)

Riccardo Busetti 2 years ago
parent
commit
6c7a3a455e

+ 1 - 2
fixtures/artifact_bundle_debug_ids/manifest.json

@@ -1,6 +1,5 @@
 {
-  "org": "__org__",
-  "release": "__release__",
+  "debug_id": "67429b2f-1d9e-43bb-a626-771a1e37555c",
   "files": {
     "files/_/_/index.js": {
       "url": "~/index.js",

+ 16 - 1
src/sentry/api/endpoints/organization_artifactbundle_assemble.py

@@ -24,6 +24,9 @@ class OrganizationArtifactBundleAssembleEndpoint(OrganizationReleasesBaseEndpoin
         schema = {
             "type": "object",
             "properties": {
+                # The version pattern has been extracted from the url definition of OrganizationReleaseAssembleEndpoint.
+                "version": {"type": "string", "pattern": "^[^/]+$"},
+                "dist": {"type": "string"},
                 "projects": {"type": "array", "items": {"type": "string"}},
                 "checksum": {"type": "string", "pattern": "^[0-9a-f]{40}$"},
                 "chunks": {
@@ -77,13 +80,25 @@ class OrganizationArtifactBundleAssembleEndpoint(OrganizationReleasesBaseEndpoin
 
         from sentry.tasks.assemble import assemble_artifacts
 
+        version = data.get("version")
+        dist = data.get("dist")
+
+        if not version and dist:
+            return Response(
+                {"error": "You need to specify a release together with a dist"}, status=400
+            )
+
         assemble_artifacts.apply_async(
             kwargs={
                 "org_id": organization.id,
                 "project_ids": list(project_ids),
-                "version": None,
+                # We don't perform any validation of the version, since the user might bind a bundle to a specific
+                # release version without actually having created the release object itself.
+                "version": version,
+                "dist": dist,
                 "checksum": checksum,
                 "chunks": chunks,
+                "upload_as_artifact_bundle": True,
             }
         )
 

+ 1 - 1
src/sentry/api/endpoints/organization_release_assemble.py

@@ -78,10 +78,10 @@ class OrganizationReleaseAssembleEndpoint(OrganizationReleasesBaseEndpoint):
         assemble_artifacts.apply_async(
             kwargs={
                 "org_id": organization.id,
-                "project_ids": [],
                 "version": version,
                 "checksum": checksum,
                 "chunks": chunks,
+                "upload_as_artifact_bundle": False,
             }
         )
 

+ 1 - 1
src/sentry/models/releasefile.py

@@ -186,8 +186,8 @@ class ReleaseArchive:
         self._fileobj = fileobj
         self._zip_file = zipfile.ZipFile(self._fileobj)
         self.manifest = self._read_manifest()
+        self.artifact_count = len(self.manifest.get("files", {}))
         files = self.manifest.get("files", {})
-
         self._entries_by_url = {entry["url"]: (path, entry) for path, entry in files.items()}
 
     def __enter__(self):

+ 104 - 73
src/sentry/tasks/assemble.py

@@ -10,7 +10,7 @@ from sentry import options
 from sentry.api.serializers import serialize
 from sentry.cache import default_cache
 from sentry.db.models.fields import uuid
-from sentry.models import Distribution, File, Organization, Release, ReleaseFile
+from sentry.models import File, Organization, Release, ReleaseFile
 from sentry.models.artifactbundle import (
     ArtifactBundle,
     DebugIdArtifactBundle,
@@ -251,15 +251,26 @@ def _normalize_debug_id(debug_id: Optional[str]) -> Optional[str]:
         return None
 
 
-def _extract_debug_ids_from_manifest(manifest: dict) -> List[Tuple[SourceFileType, str]]:
+def _extract_debug_ids_from_manifest(
+    manifest: dict,
+) -> Tuple[Optional[str], List[Tuple[SourceFileType, str]]]:
     debug_ids_with_types = []
 
+    # We also want to extract the bundle_id which is also known as the bundle debug_id. This id is used to uniquely
+    # identify a specific ArtifactBundle in case for example of future deletion.
+    #
+    # If no id is found, it means that we must have an associated release to this ArtifactBundle, through the
+    # ReleaseArtifactBundle table.
+    bundle_id = manifest.get("debug_id")
+    if bundle_id is not None:
+        bundle_id = _normalize_debug_id(bundle_id)
+
     files = manifest.get("files", {})
     for file_path, info in files.items():
         headers = _normalize_headers(info.get("headers", {}))
-        if (debug_id := headers.get("debug-id", None)) is not None:
+        if (debug_id := headers.get("debug-id")) is not None:
             debug_id = _normalize_debug_id(debug_id)
-            file_type = info.get("type", None)
+            file_type = info.get("type")
             if (
                 debug_id is not None
                 and file_type is not None
@@ -267,37 +278,42 @@ def _extract_debug_ids_from_manifest(manifest: dict) -> List[Tuple[SourceFileTyp
             ):
                 debug_ids_with_types.append((source_file_type, debug_id))
 
-    return debug_ids_with_types
+    return bundle_id, debug_ids_with_types
 
 
 def _create_artifact_bundle(
-    release: Optional[Release],
-    dist: Optional[Distribution],
+    version: Optional[str],
+    dist: Optional[str],
     org_id: int,
-    project_ids: List[int],
+    project_ids: Optional[List[int]],
     archive_file: File,
     artifact_count: int,
-) -> bool:
+):
     with ReleaseArchive(archive_file.getfile()) as archive:
-        debug_ids_with_types = _extract_debug_ids_from_manifest(archive.manifest)
+        bundle_id, debug_ids_with_types = _extract_debug_ids_from_manifest(archive.manifest)
 
-        if len(debug_ids_with_types) > 0:
+        # We want to save an artifact bundle only if we have found debug ids in the manifest or if the user specified
+        # a release for the upload.
+        if len(debug_ids_with_types) > 0 or version:
             artifact_bundle = ArtifactBundle.objects.create(
                 organization_id=org_id,
-                bundle_id=uuid.uuid4().hex,
+                # In case we didn't find the bundle_id in the manifest, we will just generate our own.
+                bundle_id=bundle_id or uuid.uuid4().hex,
                 file=archive_file,
                 artifact_count=artifact_count,
             )
 
-            if release:
+            # If a release version is passed, we want to create the weak association between a bundle and a release.
+            if version:
                 ReleaseArtifactBundle.objects.create(
                     organization_id=org_id,
-                    release_name=release.version if release else None,
-                    dist_name=dist.name if dist else None,
+                    release_name=version,
+                    # dist_name can be nullable, so no check is required.
+                    dist_name=dist,
                     artifact_bundle=artifact_bundle,
                 )
 
-            for project_id in project_ids:
+            for project_id in project_ids or ():
                 ProjectArtifactBundle.objects.create(
                     organization_id=org_id,
                     project_id=project_id,
@@ -311,14 +327,70 @@ def _create_artifact_bundle(
                     artifact_bundle=artifact_bundle,
                     source_file_type=source_file_type.value,
                 )
+        else:
+            raise AssembleArtifactsError(
+                "uploading a bundle without debug ids or release is prohibited"
+            )
 
-            return True
 
-    return False
+def handle_assemble_for_release_file(bundle, archive, organization, version):
+    manifest = archive.manifest
+
+    if manifest.get("org") != organization.slug:
+        raise AssembleArtifactsError("organization does not match uploaded bundle")
+
+    if manifest.get("release") != version:
+        raise AssembleArtifactsError("release does not match uploaded bundle")
+
+    try:
+        release = Release.objects.get(organization_id=organization.id, version=version)
+    except Release.DoesNotExist:
+        raise AssembleArtifactsError("release does not exist")
+
+    dist_name = manifest.get("dist")
+    dist = release.add_dist(dist_name) if dist_name else None
+
+    min_artifact_count = options.get("processing.release-archive-min-files")
+    saved_as_archive = False
+
+    if archive.artifact_count >= min_artifact_count:
+        try:
+            update_artifact_index(release, dist, bundle)
+            saved_as_archive = True
+        except Exception as exc:
+            logger.error("Unable to update artifact index", exc_info=exc)
+
+    if not saved_as_archive:
+        meta = {
+            "organization_id": organization.id,
+            "release_id": release.id,
+            "dist_id": dist.id if dist else dist,
+        }
+        _store_single_files(archive, meta, True)
+
+
+def handle_assemble_for_artifact_bundle(bundle, archive, organization, version, dist, project_ids):
+    # We want to give precedence to the request fields and only if they are unset fallback to the manifest's
+    # contents.
+    version = version or archive.manifest.get("release")
+    dist = dist or archive.manifest.get("dist")
+    _create_artifact_bundle(
+        version, dist, organization.id, project_ids, bundle, archive.artifact_count
+    )
 
 
 @instrumented_task(name="sentry.tasks.assemble.assemble_artifacts", queue="assemble")
-def assemble_artifacts(org_id, version, checksum, chunks, project_ids=None, **kwargs):
+def assemble_artifacts(
+    org_id,
+    version,
+    checksum,
+    chunks,
+    # These params have been added for supporting artifact bundles assembling.
+    project_ids=None,
+    dist=None,
+    upload_as_artifact_bundle=False,
+    **kwargs,
+):
     """
     Creates a release file or artifact bundle from an uploaded bundle given the checksums of its chunks.
     """
@@ -331,7 +403,9 @@ def assemble_artifacts(org_id, version, checksum, chunks, project_ids=None, **kw
 
         set_assemble_status(AssembleTask.ARTIFACTS, org_id, checksum, ChunkFileState.ASSEMBLING)
 
-        archive_filename = f"release-artifacts-{uuid.uuid4().hex}.zip"
+        archive_name = "bundle-artifacts" if upload_as_artifact_bundle else "release-artifacts"
+        archive_filename = f"{archive_name}-{uuid.uuid4().hex}.zip"
+        file_type = "artifact.bundle" if upload_as_artifact_bundle else "release.bundle"
 
         # Assemble the chunks into a temporary file
         rv = assemble_file(
@@ -340,8 +414,7 @@ def assemble_artifacts(org_id, version, checksum, chunks, project_ids=None, **kw
             archive_filename,
             checksum,
             chunks,
-            # If we have a veraion we are going to create a release bundle otherwise an artifact bundle.
-            file_type="release.bundle" if version else "artifact.bundle",
+            file_type,
         )
 
         # If not file has been created this means that the file failed to
@@ -353,64 +426,22 @@ def assemble_artifacts(org_id, version, checksum, chunks, project_ids=None, **kw
         bundle, temp_file = rv
 
         try:
+            # TODO(iambriccardo): Once the new lookup PR is merged it would be better if we generalize the archive
+            #  handling class.
             archive = ReleaseArchive(temp_file)
         except Exception:
             raise AssembleArtifactsError("failed to open release manifest")
 
         with archive:
-            manifest = archive.manifest
-
-            org_slug = manifest.get("org")
-            if organization.slug != org_slug:
-                raise AssembleArtifactsError("organization does not match uploaded bundle")
-
-            release_name = manifest.get("release")
-            if version and release_name != version:
-                raise AssembleArtifactsError("release does not match uploaded bundle")
-
-            release = None
-            if version:
-                try:
-                    release = Release.objects.get(
-                        organization_id=organization.id, version=release_name
-                    )
-                except Release.DoesNotExist:
-                    raise AssembleArtifactsError("release does not exist")
-
-            dist_name = manifest.get("dist")
-            dist = None
-            if release and dist_name:
-                dist = release.add_dist(dist_name)
-
-            artifact_count = len(manifest.get("files", {}))
-            min_artifact_count = options.get("processing.release-archive-min-files")
-            saved_as_archive = False
-
-            # If we receive a version, it means that we want to create a ReleaseFile, otherwise we will create
-            # an ArtifactBundle.
-            if version and artifact_count >= min_artifact_count:
-                if release is None:
-                    raise AssembleArtifactsError("release does not exist")
-
-                try:
-                    update_artifact_index(release, dist, bundle)
-                    saved_as_archive = True
-                except Exception as exc:
-                    logger.error("Unable to update artifact index", exc_info=exc)
-            elif version is None:
-                _create_artifact_bundle(release, dist, org_id, project_ids, bundle, artifact_count)
-                saved_as_archive = True
-
-            if not saved_as_archive:
-                meta = {
-                    "organization_id": organization.id,
-                    "release_id": release.id,
-                    "dist_id": dist.id if dist else dist,
-                }
-                _store_single_files(archive, meta, True)
+            if upload_as_artifact_bundle:
+                handle_assemble_for_artifact_bundle(
+                    bundle, archive, organization, version, dist, project_ids
+                )
+            else:
+                handle_assemble_for_release_file(bundle, archive, organization, version)
 
             # Count files extracted, to compare them to release files endpoint
-            metrics.incr("tasks.assemble.extracted_files", amount=artifact_count)
+            metrics.incr("tasks.assemble.extracted_files", amount=archive.artifact_count)
     except AssembleArtifactsError as e:
         set_assemble_status(
             AssembleTask.ARTIFACTS, org_id, checksum, ChunkFileState.ERROR, detail=str(e)

+ 6 - 4
src/sentry/testutils/factories.py

@@ -234,11 +234,13 @@ DEFAULT_EVENT_DATA = {
 }
 
 
-def _patch_artifact_manifest(path, org, release, project=None, extra_files=None):
+def _patch_artifact_manifest(path, org=None, release=None, project=None, extra_files=None):
     with open(path, "rb") as fp:
         manifest = json.load(fp)
-    manifest["org"] = org
-    manifest["release"] = release
+    if org:
+        manifest["org"] = org
+    if release:
+        manifest["release"] = release
     if project:
         manifest["project"] = project
     for path in extra_files or {}:
@@ -501,7 +503,7 @@ class Factories:
     @staticmethod
     @exempt_from_silo_limits()
     def create_artifact_bundle(
-        org, release, project=None, extra_files=None, fixture_path="artifact_bundle"
+        org=None, release=None, project=None, extra_files=None, fixture_path="artifact_bundle"
     ):
         import zipfile
 

+ 0 - 4
src/sentry/testutils/fixtures.py

@@ -193,10 +193,6 @@ class Fixtures:
         return Factories.create_release_file(release_id, file, name, dist_id)
 
     def create_artifact_bundle(self, org=None, release=None, *args, **kwargs):
-        if org is None:
-            org = self.organization.slug
-        if release is None:
-            release = self.release.version
         return Factories.create_artifact_bundle(org, release, *args, **kwargs)
 
     def create_release_archive(self, org=None, release=None, *args, **kwargs):

+ 129 - 7
tests/sentry/api/endpoints/test_organization_artifactbundle_assemble.py

@@ -64,6 +64,44 @@ class OrganizationArtifactBundleAssembleTest(APITestCase):
         )
         assert response.status_code == 400, response.content
 
+        response = self.client.post(
+            self.url,
+            data={
+                "checksum": checksum,
+                "chunks": [],
+                "projects": [self.project.id],
+                "version": "release/1",
+                "dist": "android",
+            },
+            HTTP_AUTHORIZATION=f"Bearer {self.token.token}",
+        )
+        assert response.status_code == 400, response.content
+
+        response = self.client.post(
+            self.url,
+            data={
+                "checksum": checksum,
+                "chunks": [],
+                "projects": [self.project.id],
+                "version": "",
+                "dist": "android",
+            },
+            HTTP_AUTHORIZATION=f"Bearer {self.token.token}",
+        )
+        assert response.status_code == 400, response.content
+
+        response = self.client.post(
+            self.url,
+            data={
+                "checksum": checksum,
+                "chunks": [],
+                "projects": [self.project.id],
+                "dist": "android",
+            },
+            HTTP_AUTHORIZATION=f"Bearer {self.token.token}",
+        )
+        assert response.status_code == 400, response.content
+
         response = self.client.post(
             self.url,
             data={"checksum": checksum, "chunks": [], "projects": [self.project.slug]},
@@ -73,7 +111,9 @@ class OrganizationArtifactBundleAssembleTest(APITestCase):
         assert response.data["state"] == ChunkFileState.NOT_FOUND
 
     def test_assemble_with_invalid_projects(self):
-        bundle_file = self.create_artifact_bundle()
+        bundle_file = self.create_artifact_bundle(
+            org=self.organization.slug, release=self.release.version
+        )
         total_checksum = sha1(bundle_file).hexdigest()
 
         blob1 = FileBlob.from_file(ContentFile(bundle_file))
@@ -96,13 +136,16 @@ class OrganizationArtifactBundleAssembleTest(APITestCase):
         assert response.data["error"] == "One or more projects are invalid"
 
     @patch("sentry.tasks.assemble.assemble_artifacts")
-    def test_assemble(self, mock_assemble_artifacts):
-        bundle_file = self.create_artifact_bundle()
+    def test_assemble_without_version_and_dist(self, mock_assemble_artifacts):
+        bundle_file = self.create_artifact_bundle(
+            org=self.organization.slug, release=self.release.version
+        )
         total_checksum = sha1(bundle_file).hexdigest()
 
         blob1 = FileBlob.from_file(ContentFile(bundle_file))
         FileBlobOwner.objects.get_or_create(organization_id=self.organization.id, blob=blob1)
 
+        # We test the endpoint without the release version.
         response = self.client.post(
             self.url,
             data={
@@ -112,7 +155,6 @@ class OrganizationArtifactBundleAssembleTest(APITestCase):
             },
             HTTP_AUTHORIZATION=f"Bearer {self.token.token}",
         )
-
         assert response.status_code == 200, response.content
         assert response.data["state"] == ChunkFileState.CREATED
         assert set(response.data["missingChunks"]) == set()
@@ -122,22 +164,102 @@ class OrganizationArtifactBundleAssembleTest(APITestCase):
                 "org_id": self.organization.id,
                 "project_ids": [self.project.id],
                 "version": None,
+                "dist": None,
+                "chunks": [blob1.checksum],
+                "checksum": total_checksum,
+                "upload_as_artifact_bundle": True,
+            }
+        )
+
+    @patch("sentry.tasks.assemble.assemble_artifacts")
+    def test_assemble_with_version_and_no_dist(self, mock_assemble_artifacts):
+        bundle_file = self.create_artifact_bundle(
+            org=self.organization.slug, release=self.release.version
+        )
+        total_checksum = sha1(bundle_file).hexdigest()
+
+        blob1 = FileBlob.from_file(ContentFile(bundle_file))
+        FileBlobOwner.objects.get_or_create(organization_id=self.organization.id, blob=blob1)
+
+        # We test the endpoint without the release version.
+        response = self.client.post(
+            self.url,
+            data={
+                "checksum": total_checksum,
+                "chunks": [blob1.checksum],
+                "projects": [self.project.slug],
+                "version": self.release.version,
+            },
+            HTTP_AUTHORIZATION=f"Bearer {self.token.token}",
+        )
+        assert response.status_code == 200, response.content
+        assert response.data["state"] == ChunkFileState.CREATED
+        assert set(response.data["missingChunks"]) == set()
+
+        mock_assemble_artifacts.apply_async.assert_called_once_with(
+            kwargs={
+                "org_id": self.organization.id,
+                "project_ids": [self.project.id],
+                "version": self.release.version,
+                "dist": None,
                 "chunks": [blob1.checksum],
                 "checksum": total_checksum,
+                "upload_as_artifact_bundle": True,
+            }
+        )
+
+    @patch("sentry.tasks.assemble.assemble_artifacts")
+    def test_assemble_with_version_and_dist(self, mock_assemble_artifacts):
+        dist = "android"
+        bundle_file = self.create_artifact_bundle(
+            org=self.organization.slug, release=self.release.version
+        )
+        total_checksum = sha1(bundle_file).hexdigest()
+
+        blob1 = FileBlob.from_file(ContentFile(bundle_file))
+        FileBlobOwner.objects.get_or_create(organization_id=self.organization.id, blob=blob1)
+
+        # We test the endpoint without the release version.
+        response = self.client.post(
+            self.url,
+            data={
+                "checksum": total_checksum,
+                "chunks": [blob1.checksum],
+                "projects": [self.project.slug],
+                "version": self.release.version,
+                "dist": dist,
+            },
+            HTTP_AUTHORIZATION=f"Bearer {self.token.token}",
+        )
+        assert response.status_code == 200, response.content
+        assert response.data["state"] == ChunkFileState.CREATED
+        assert set(response.data["missingChunks"]) == set()
+
+        mock_assemble_artifacts.apply_async.assert_called_once_with(
+            kwargs={
+                "org_id": self.organization.id,
+                "project_ids": [self.project.id],
+                "version": self.release.version,
+                "dist": dist,
+                "chunks": [blob1.checksum],
+                "checksum": total_checksum,
+                "upload_as_artifact_bundle": True,
             }
         )
 
     def test_assemble_response(self):
-        bundle_file = self.create_artifact_bundle()
+        bundle_file = self.create_artifact_bundle(
+            org=self.organization.slug, release=self.release.version
+        )
         total_checksum = sha1(bundle_file).hexdigest()
         blob1 = FileBlob.from_file(ContentFile(bundle_file))
 
         assemble_artifacts(
             org_id=self.organization.id,
-            project_ids=[],
             version=self.release.version,
             checksum=total_checksum,
             chunks=[blob1.checksum],
+            upload_as_artifact_bundle=False,
         )
 
         response = self.client.post(
@@ -160,10 +282,10 @@ class OrganizationArtifactBundleAssembleTest(APITestCase):
 
         assemble_artifacts(
             org_id=self.organization.id,
-            project_ids=[],
             version=self.release.version,
             checksum=total_checksum,
             chunks=[blob1.checksum],
+            upload_as_artifact_bundle=False,
         )
 
         response = self.client.post(

+ 9 - 5
tests/sentry/api/endpoints/test_organization_release_assemble.py

@@ -53,7 +53,9 @@ class OrganizationReleaseAssembleTest(APITestCase):
 
     @patch("sentry.tasks.assemble.assemble_artifacts")
     def test_assemble(self, mock_assemble_artifacts):
-        bundle_file = self.create_artifact_bundle()
+        bundle_file = self.create_artifact_bundle(
+            org=self.organization.slug, release=self.release.version
+        )
         total_checksum = sha1(bundle_file).hexdigest()
 
         blob1 = FileBlob.from_file(ContentFile(bundle_file))
@@ -72,24 +74,26 @@ class OrganizationReleaseAssembleTest(APITestCase):
         mock_assemble_artifacts.apply_async.assert_called_once_with(
             kwargs={
                 "org_id": self.organization.id,
-                "project_ids": [],
                 "version": self.release.version,
                 "chunks": [blob1.checksum],
                 "checksum": total_checksum,
+                "upload_as_artifact_bundle": False,
             }
         )
 
     def test_assemble_response(self):
-        bundle_file = self.create_artifact_bundle()
+        bundle_file = self.create_artifact_bundle(
+            org=self.organization.slug, release=self.release.version
+        )
         total_checksum = sha1(bundle_file).hexdigest()
         blob1 = FileBlob.from_file(ContentFile(bundle_file))
 
         assemble_artifacts(
             org_id=self.organization.id,
-            project_ids=[],
             version=self.release.version,
             checksum=total_checksum,
             chunks=[blob1.checksum],
+            upload_as_artifact_bundle=False,
         )
 
         response = self.client.post(
@@ -108,10 +112,10 @@ class OrganizationReleaseAssembleTest(APITestCase):
 
         assemble_artifacts(
             org_id=self.organization.id,
-            project_ids=[],
             version=self.release.version,
             checksum=total_checksum,
             chunks=[blob1.checksum],
+            upload_as_artifact_bundle=False,
         )
 
         response = self.client.post(

+ 68 - 41
tests/sentry/tasks/test_assemble.py

@@ -7,6 +7,7 @@ from django.core.files.base import ContentFile
 
 from sentry.models import FileBlob, FileBlobOwner, ReleaseFile
 from sentry.models.artifactbundle import (
+    ArtifactBundle,
     DebugIdArtifactBundle,
     ProjectArtifactBundle,
     ReleaseArtifactBundle,
@@ -207,44 +208,69 @@ class AssembleArtifactsTest(BaseAssembleTest):
         expected_source_file_types = [SourceFileType.MINIFIED_SOURCE, SourceFileType.SOURCE_MAP]
         expected_debug_ids = ["eb6e60f1-65ff-4f6f-adff-f1bbeded627b"]
 
-        assemble_artifacts(
-            org_id=self.organization.id,
-            project_ids=[self.project.id],
-            version=None,
-            checksum=total_checksum,
-            chunks=[blob1.checksum],
-        )
-
-        assert self.release.count_artifacts() == 0
+        for version, dist, count in [
+            (None, None, 0),
+            ("1.0", None, 1),
+            (None, "android", 0),
+            ("1.0", "android", 1),
+        ]:
+            assemble_artifacts(
+                org_id=self.organization.id,
+                project_ids=[self.project.id],
+                version=version,
+                dist=version,
+                checksum=total_checksum,
+                chunks=[blob1.checksum],
+                upload_as_artifact_bundle=True,
+            )
 
-        status, details = get_assemble_status(
-            AssembleTask.ARTIFACTS, self.organization.id, total_checksum
-        )
-        assert status == ChunkFileState.OK
-        assert details is None
+            assert self.release.count_artifacts() == 0
 
-        for debug_id in expected_debug_ids:
-            debug_id_artifact_bundles = DebugIdArtifactBundle.objects.filter(
-                organization_id=self.organization.id, debug_id=debug_id
-            )
-            assert len(debug_id_artifact_bundles) == 2
-            assert debug_id_artifact_bundles[0].artifact_bundle.file.size == len(bundle_file)
-            # We check also if the source file types are equal.
-            for index, entry in enumerate(debug_id_artifact_bundles):
-                assert entry.source_file_type == expected_source_file_types[index].value
-
-            release_artifact_bundle = ReleaseArtifactBundle.objects.filter(
-                organization_id=self.organization.id
+            status, details = get_assemble_status(
+                AssembleTask.ARTIFACTS, self.organization.id, total_checksum
             )
-            assert len(release_artifact_bundle) == 0
+            assert status == ChunkFileState.OK
+            assert details is None
 
-            project_artifact_bundles = ProjectArtifactBundle.objects.filter(
-                project_id=self.project.id
-            )
-            assert len(project_artifact_bundles) == 1
+            for debug_id in expected_debug_ids:
+                debug_id_artifact_bundles = DebugIdArtifactBundle.objects.filter(
+                    organization_id=self.organization.id, debug_id=debug_id
+                )
+                assert len(debug_id_artifact_bundles) == 2
+                assert debug_id_artifact_bundles[0].artifact_bundle.file.size == len(bundle_file)
+                # We check if the bundle to which each debug id entry is connected has the correct bundle_id.
+                for entry in debug_id_artifact_bundles:
+                    assert (
+                        str(entry.artifact_bundle.bundle_id)
+                        == "67429b2f-1d9e-43bb-a626-771a1e37555c"
+                    )
+                # We check also if the source file types are equal.
+                for index, entry in enumerate(debug_id_artifact_bundles):
+                    assert entry.source_file_type == expected_source_file_types[index].value
+
+                release_artifact_bundle = ReleaseArtifactBundle.objects.filter(
+                    organization_id=self.organization.id
+                )
+                assert len(release_artifact_bundle) == count
+                if count == 1:
+                    release_artifact_bundle[0].version_name = version
+                    release_artifact_bundle[0].dist_name = dist
+
+                project_artifact_bundles = ProjectArtifactBundle.objects.filter(
+                    project_id=self.project.id
+                )
+                assert len(project_artifact_bundles) == 1
+
+            # We delete the newly create data from all the tables.
+            ArtifactBundle.objects.all().delete()
+            DebugIdArtifactBundle.objects.all().delete()
+            ReleaseArtifactBundle.objects.all().delete()
+            ProjectArtifactBundle.objects.all().delete()
 
     def test_artifacts_without_debug_ids(self):
-        bundle_file = self.create_artifact_bundle()
+        bundle_file = self.create_artifact_bundle(
+            org=self.organization.slug, release=self.release.version
+        )
         blob1 = FileBlob.from_file(ContentFile(bundle_file))
         total_checksum = sha1(bundle_file).hexdigest()
 
@@ -254,17 +280,16 @@ class AssembleArtifactsTest(BaseAssembleTest):
                     "processing.release-archive-min-files": min_files,
                 }
             ):
-
                 ReleaseFile.objects.filter(release_id=self.release.id).delete()
 
                 assert self.release.count_artifacts() == 0
 
                 assemble_artifacts(
                     org_id=self.organization.id,
-                    project_ids=[],
                     version=self.release.version,
                     checksum=total_checksum,
                     chunks=[blob1.checksum],
+                    upload_as_artifact_bundle=False,
                 )
 
                 assert self.release.count_artifacts() == 2
@@ -295,16 +320,16 @@ class AssembleArtifactsTest(BaseAssembleTest):
                     assert release_file.file.headers == {"Sourcemap": "index.js.map"}
 
     def test_artifacts_invalid_org(self):
-        bundle_file = self.create_artifact_bundle(org="invalid")
+        bundle_file = self.create_artifact_bundle(org="invalid", release=self.release.version)
         blob1 = FileBlob.from_file(ContentFile(bundle_file))
         total_checksum = sha1(bundle_file).hexdigest()
 
         assemble_artifacts(
             org_id=self.organization.id,
-            project_ids=[],
             version=self.release.version,
             checksum=total_checksum,
             chunks=[blob1.checksum],
+            upload_as_artifact_bundle=False,
         )
 
         status, details = get_assemble_status(
@@ -313,16 +338,16 @@ class AssembleArtifactsTest(BaseAssembleTest):
         assert status == ChunkFileState.ERROR
 
     def test_artifacts_invalid_release(self):
-        bundle_file = self.create_artifact_bundle(release="invalid")
+        bundle_file = self.create_artifact_bundle(org=self.organization.slug, release="invalid")
         blob1 = FileBlob.from_file(ContentFile(bundle_file))
         total_checksum = sha1(bundle_file).hexdigest()
 
         assemble_artifacts(
             org_id=self.organization.id,
-            project_ids=[],
             version=self.release.version,
             checksum=total_checksum,
             chunks=[blob1.checksum],
+            upload_as_artifact_bundle=False,
         )
 
         status, details = get_assemble_status(
@@ -337,10 +362,10 @@ class AssembleArtifactsTest(BaseAssembleTest):
 
         assemble_artifacts(
             org_id=self.organization.id,
-            project_ids=[],
             version=self.release.version,
             checksum=total_checksum,
             chunks=[blob1.checksum],
+            upload_as_artifact_bundle=False,
         )
 
         status, details = get_assemble_status(
@@ -350,7 +375,9 @@ class AssembleArtifactsTest(BaseAssembleTest):
 
     @patch("sentry.tasks.assemble.update_artifact_index", side_effect=RuntimeError("foo"))
     def test_failing_update(self, _):
-        bundle_file = self.create_artifact_bundle()
+        bundle_file = self.create_artifact_bundle(
+            org=self.organization.slug, release=self.release.version
+        )
         blob1 = FileBlob.from_file(ContentFile(bundle_file))
         total_checksum = sha1(bundle_file).hexdigest()
 
@@ -362,10 +389,10 @@ class AssembleArtifactsTest(BaseAssembleTest):
         ):
             assemble_artifacts(
                 org_id=self.organization.id,
-                project_ids=[],
                 version=self.release.version,
                 checksum=total_checksum,
                 chunks=[blob1.checksum],
+                upload_as_artifact_bundle=False,
             )
 
             # Status is still OK: