How to use the pulpcore.plugin.models.Artifact function in pulpcore

To help you get started, we’ve selected a few pulpcore examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github pulp / pulp / plugin / pulpcore / plugin / stages.py View on Github external
if outstanding_downloads > max_downloads:
                        saturated = True
                        incoming_content = incoming_content[i + 1:]  # remove handled content
                        break
                else:
                    incoming_content = []

                if pending:
                    done, pending = await asyncio.wait(pending, return_when=asyncio.FIRST_COMPLETED)
                    for gathered_downloaders in done:
                        results = gathered_downloaders.result()
                        for download_result in results[:-1]:
                            content = results[-1]
                            for declarative_artifact in content.d_artifacts:
                                if declarative_artifact.artifact._state.adding:
                                    new_artifact = Artifact(
                                        **download_result.artifact_attributes,
                                        file=download_result.path
                                    )
                                    declarative_artifact.artifact = new_artifact
                            pb.done = pb.done + len(content.d_artifacts)
                            outstanding_downloads = outstanding_downloads - len(content.d_artifacts)
                            await out_q.put(content)
                else:
                    if shutdown:
                        break

                if outstanding_downloads < max_downloads:
                    saturated = False
        await out_q.put(None)
    return artifact_downloader
github pulp / pulpcore / plugin / pulpcore / plugin / changeset / model.py View on Github external
def downloaded(self, downloader):
        """
        The artifact (file) has been downloaded.
        A new _stored_model is created (and assigned) for the downloaded file.

        Args:
            downloader (BaseDownloader): The downloader that successfully completed.
        """
        self._stored_model = Artifact(file=downloader.path, **downloader.artifact_attributes)
github pulp / pulp_deb / pulp_deb / app / tasks / upload.py View on Github external
def one_shot_upload(artifact_pk, repository_pk=None):
    """
    Create a Package from an uploaded file and attach it to a Repository if provided.

    Args:
        artifact_pk (str): Create a Package from this artifact.

    Optional:
        repository_pk (str): Insert the Package into this Repository.

    """
    artifact = Artifact.objects.get(pk=artifact_pk)
    repository = Repository.objects.get(pk=repository_pk) if repository_pk else None
    log.debug(
        _("Uploading deb package: artifact={artifact}, repository={repo}").format(
            artifact=artifact, repo=repository
        )
    )

    package_paragraph = debfile.DebFile(fileobj=artifact.file).debcontrol()
    package_dict = Package.from822(package_paragraph)
    package_dict["sha256"] = artifact.sha256
    package = Package(**package_dict)
    package.relative_path = package.filename()
    try:
        package = Package.objects.get(sha256=artifact.sha256, relative_path=package.relative_path)
    except ObjectDoesNotExist:
        with transaction.atomic():
github pulp / pulpcore / pulpcore / plugin / viewsets / content.py View on Github external
def init_content_data(self, serializer, request):
        """Initialize a temporary Artifact."""
        shared_resources = []

        task_payload = {k: v for k, v in request.data.items()}
        file_content = task_payload.pop("file", None)
        if file_content:
            # in the upload code path make sure, the artifact exists, and the 'file'
            # parameter is replaced by an Artifact; this Artifact will be afterwards
            # deleted because it serves as a temporary storage for file contents
            artifact = Artifact.init_and_validate(file_content)
            try:
                artifact.save()
            except IntegrityError:
                # if artifact already exists, let's use it
                artifact = Artifact.objects.get(sha256=artifact.sha256)

            task_payload["artifact"] = ArtifactSerializer(
                artifact, context={"request": request}
            ).data["pulp_href"]
            shared_resources.append(artifact)

        return ContentUploadData(shared_resources, task_payload)
github pulp / pulp_ansible / pulp_ansible / app / tasks / collections.py View on Github external
expected_namespace (str): Optional. The namespace is validated against the namespace
            specified in the Collection's metadata. If it does not match a ImporterError is
            raised.
        expected_name (str): Optional. The name is validated against the name specified in the
            Collection's metadata. If it does not match a ImporterError is raised.
        expected_version (str): Optional. The version is validated against the version specified in
            the Collection's metadata. If it does not match a ImporterError is raised.

    Raises:
        ImporterError: If the `expected_namespace`, `expected_name`, or `expected_version` do not
            match the metadata in the tarball.

    """
    CollectionImport.objects.get_or_create(task_id=get_current_job().id)

    artifact = Artifact.objects.get(pk=artifact_pk)
    filename = CollectionFilename(expected_namespace, expected_name, expected_version)
    log.info(f"Processing collection from {artifact.file.name}")
    user_facing_logger = logging.getLogger("pulp_ansible.app.tasks.collection.import_collection")

    with _artifact_guard(artifact):
        try:
            with artifact.file.open() as artifact_file:
                importer_result = process_collection(
                    artifact_file, filename=filename, logger=user_facing_logger
                )

                importer_result["artifact_url"] = reverse("artifacts-detail", args=[artifact_pk])
                collection_version = create_collection_from_importer(importer_result)

        except ImporterError as exc:
            log.info(f"Collection processing was not successfull: {exc}")
github pulp / pulp_docker / pulp_docker / app / tasks / sync_stages.py View on Github external
def create_blob(self, man_dc, blob_data):
        """
        Create blob.

        Args:
            man_dc (pulpcore.plugin.stages.DeclarativeContent): dc for a ImageManifest
            blob_data (dict): Data about a blob

        """
        digest = blob_data.get('digest') or blob_data.get('blobSum')
        blob_artifact = Artifact(sha256=digest[len("sha256:"):])
        blob = Blob(
            digest=digest,
            media_type=blob_data.get('mediaType', MEDIA_TYPE.REGULAR_BLOB),
        )
        relative_url = '/v2/{name}/blobs/{digest}'.format(
            name=self.remote.namespaced_upstream_name,
            digest=digest,
        )
        blob_url = urljoin(self.remote.url, relative_url)
        da = DeclarativeArtifact(
            artifact=blob_artifact,
            url=blob_url,
            relative_path=digest,
            remote=self.remote,
            extra_data={'headers': V2_ACCEPT_HEADERS},
            deferred_download=self.deferred_download
github pulp / pulp_docker / pulp_docker / app / tasks / synchronizing.py View on Github external
digest_value = getattr(declarative_artifact.artifact, digest_name)
                            if digest_value and digest_value == getattr(artifact, digest_name):
                                declarative_artifact.artifact = artifact
                                break

            artifacts_to_save = []

            for declarative_content in batch:
                for declarative_artifact in declarative_content.d_artifacts:
                    if declarative_artifact.artifact.pk is None:
                        declarative_artifact.artifact.file = str(
                            declarative_artifact.artifact.file)
                        artifacts_to_save.append(declarative_artifact.artifact)

            if artifacts_to_save:
                Artifact.objects.bulk_create(artifacts_to_save)

            for declarative_content in batch:
                await out_q.put(declarative_content)
        await out_q.put(None)
github pulp / pulp / plugin / pulpcore / plugin / changeset / model.py View on Github external
def downloaded(self, downloader):
        """
        The artifact (file) has been downloaded.
        A new _stored_model is created (and assigned) for the downloaded file.

        Args:
            downloader (BaseDownloader): The downloader that successfully completed.
        """
        self._stored_model = Artifact(file=downloader.path, **downloader.artifact_attributes)
github pulp / pulp_deb / pulp_deb / app / tasks / synchronizing.py View on Github external
def _to_d_artifact(self, relative_path, data=None):
        artifact = Artifact(**_get_checksums(data or {}))
        url_path = os.path.join(self.parsed_url.path, relative_path)
        return DeclarativeFailsafeArtifact(
            artifact,
            urlunparse(self.parsed_url._replace(path=url_path)),
            relative_path,
            self.remote,
            deferred_download=False,
        )