Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
if algorithm in data and digest != data[algorithm]:
raise serializers.ValidationError(_("The %s checksum did not match.")
% algorithm)
else:
data[algorithm] = digest
if algorithm in UNIQUE_ALGORITHMS:
validator = UniqueValidator(models.Artifact.objects.all(),
message=_("{0} checksum must be "
"unique.").format(algorithm))
validator.field_name = algorithm
validator.instance = None
validator(digest)
return data
class Meta:
model = models.Artifact
fields = base.ModelSerializer.Meta.fields + ('file', 'size', 'md5', 'sha1', 'sha224',
'sha256', 'sha384', 'sha512')
def _iter(self):
"""
Iterate the content and flatten the artifacts.
Ensure that at least (1) artifact is yielded for each content. When content
does not have any un-downloaded artifacts, A NopPendingArtifact is yielded.
The PendingArtifact.artifact set with an Artifact fetched from DB. This is
batched to limit the memory footprint.
Yields:
pulpcore.plugin.changeset.PendingArtifact: The flattened pending artifacts.
"""
for batch in self._batch_artifacts():
q = self._batch_q(batch)
fetched = {}
for model in Artifact.objects.filter(q):
for field in Artifact.RELIABLE_DIGEST_FIELDS:
digest = getattr(model, field)
key = (field, digest)
fetched[key] = model
for artifact in batch:
self._set_stored_model(fetched, artifact)
yield artifact
def _set_stored_model(fetched, artifact):
"""
Set the stored_model on the artifact with the model matched in the cache.
The artifact is matched by digest by order of algorithm strength.
The cache key is (tuple): (field, digest).
Args:
fetched (dict): Artifacts fetched from the DB.
Keyed with (field, digest)
artifact (pulpcore.plugin.changeset.PendingArtifact): A pending artifact.
"""
if isinstance(artifact, NopPendingArtifact):
return
for field in Artifact.RELIABLE_DIGEST_FIELDS:
digest = getattr(artifact.model, field)
if not digest:
continue
key = (field, digest)
model = fetched.get(key)
if model:
artifact.stored_model = model
break
def __init__(self, download=None):
"""
Args:
download (pulpcore.download.Download): An (optional) download object
for which metrics are collected.
"""
self.algorithms = {n: hashlib.new(n) for n in Artifact.DIGEST_FIELDS}
self.size = 0
if download:
self.attach(download)
def _iter(self):
"""
Iterate the content and flatten the artifacts.
Ensure that at least (1) artifact is yielded for each content. When content
does not have any un-downloaded artifacts, A NopPendingArtifact is yielded.
The PendingArtifact.artifact set with an Artifact fetched from DB. This is
batched to limit the memory footprint.
Yields:
pulpcore.plugin.changeset.PendingArtifact: The flattened pending artifacts.
"""
for batch in self._batch_artifacts():
q = self._batch_q(batch)
fetched = {}
for model in Artifact.objects.filter(q):
for field in Artifact.RELIABLE_DIGEST_FIELDS:
digest = getattr(model, field)
key = (field, digest)
fetched[key] = model
for artifact in batch:
self._set_stored_model(fetched, artifact)
yield artifact
_("The %s checksum did not match.") % algorithm
)
else:
data[algorithm] = digest
if algorithm in UNIQUE_ALGORITHMS:
validator = UniqueValidator(
models.Artifact.objects.all(),
message=_("{0} checksum must be " "unique.").format(algorithm),
)
validator.field_name = algorithm
validator.instance = None
validator(digest)
return data
class Meta:
model = models.Artifact
fields = base.ModelSerializer.Meta.fields + (
"file",
"size",
"md5",
"sha1",
"sha224",
"sha256",
"sha384",
"sha512",
)
class SigningServiceSerializer(base.ModelSerializer):
"""
A serializer for the model declaring a signing service.
"""
def general_create_from_temp_file(app_label, serializer_name, *args, **kwargs):
"""
Create a model instance from contents stored in a temporary Artifact.
A caller should always pass the dictionary "data", as a keyword argument, containing the
href to the temporary Artifact. Otherwise, the function does nothing.
This function calls the function general_create() to create a model instance.
Data passed to that function already contains a serialized artifact converted
to PulpTemporaryUploadFile that will be deleted afterwards.
"""
data = kwargs.pop("data", None)
if data and "artifact" in data:
named_model_view_set = get_plugin_config(app_label).viewsets_module.NamedModelViewSet
artifact = named_model_view_set.get_resource(data.pop("artifact"), Artifact)
data["file"] = PulpTemporaryUploadedFile.from_file(artifact.file)
general_create(app_label, serializer_name, data=data, *args, **kwargs)
artifact.delete()
content = content.exclude(pulp_type="core.{}".format(PublishedMetadata.TYPE))
progress_bar = ProgressReport(
message="Clean up orphan Content",
total=content.count(),
code="clean-up.content",
done=0,
state="running",
)
progress_bar.save()
content.delete()
progress_bar.done = progress_bar.total
progress_bar.state = "completed"
progress_bar.save()
# Artifact cleanup
artifacts = Artifact.objects.exclude(
pk__in=ContentArtifact.objects.values_list("artifact_id", flat=True)
)
progress_bar = ProgressReport(
message="Clean up orphan Artifacts",
total=artifacts.count(),
code="clean-up.content",
done=0,
state="running",
)
progress_bar.save()
for artifact in artifacts:
# we need to manually call delete() because it cleans up the file on the filesystem
artifact.delete()
progress_bar.increment()
progress_bar.state = "completed"
the created artifact and the updated content_artifact are saved to the DB.
Plugin-writers may overide this method if their content module requires
additional/different steps for saving.
Args:
download_result (:class:`~pulpcore.plugin.download.DownloadResult`: The
DownloadResult for the downloaded artifact.
content_artifact (:class:`~pulpcore.plugin.models.ContentArtifact`): The
ContentArtifact to associate the Artifact with.
Returns:
The associated :class:`~pulpcore.plugin.models.Artifact`.
"""
artifact = Artifact(
**download_result.artifact_attributes,
file=download_result.path
)
with transaction.atomic():
try:
with transaction.atomic():
artifact.save()
except IntegrityError:
artifact = Artifact.objects.get(artifact.q())
content_artifact.artifact = artifact
content_artifact.save()
return artifact
def artifact_attributes(self):
"""
A property that returns a dictionary with size and digest information. The keys of this
dictionary correspond with :class:`~pulpcore.plugin.models.Artifact` fields.
"""
attributes = {"size": self._size}
for algorithm in Artifact.DIGEST_FIELDS:
attributes[algorithm] = self._digests[algorithm].hexdigest()
return attributes