Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def create_content_artifacts(self, dc):
"""
Create ContentArtifacts to associate saved Content to saved Artifacts.
Args:
dc (class:`~pulpcore.plugin.stages.DeclarativeContent`): Object containing Content and
Artifacts to relate.
"""
for da in dc.d_artifacts:
content_artifact = ContentArtifact(
content=dc.content,
artifact=da.artifact,
relative_path=da.relative_path
)
try:
content_artifact.save()
except IntegrityError:
content_artifact = ContentArtifact.objects.get(
content=dc.content,
artifact=da.artifact,
relative_path=da.relative_path
)
remote_artifact_data = {
'url': da.url,
'size': da.artifact.size,
def get(self, request, path, tag_name):
"""
Return a response to the "GET" action.
"""
distribution = get_object_or_404(DockerDistribution, base_path=path)
try:
ca = ContentArtifact.objects.get(
content__in=distribution.publication.repository_version.content,
relative_path=tag_name)
content = ca.content.cast()
if content.manifest:
headers = {'Content-Type': content.manifest.media_type}
else:
headers = {'Content-Type': content.manifest_list.media_type}
except ObjectDoesNotExist:
pass
else:
artifact = ca.artifact
if artifact:
return self._dispatch(artifact.file.name, headers)
else:
raise ArtifactNotFound(path)
repository_pk: repository to extend with new pkg
"""
artifact = Artifact.objects.get(pk=artifact_pk)
# export META from rpm and prepare dict as saveable format
try:
new_pkg = _prepare_package(artifact, filename)
except OSError:
raise OSError('RPM file cannot be parsed for metadata.')
pkg, created = Package.objects.get_or_create(**new_pkg)
if not created:
raise OSError('RPM package {} already exists.'.format(pkg.filename))
ContentArtifact.objects.create(
artifact=artifact,
content=pkg,
relative_path=filename
)
resource = CreatedResource(content_object=pkg)
resource.save()
if repository_pk:
repository = RpmRepository.objects.get(pk=repository_pk)
content_to_add = Package.objects.filter(pkgId=pkg.pkgId)
# create new repo version with uploaded package
with repository.new_version() as new_version:
new_version.add_content(content_to_add)
def artifact(self):
"""
Returns artifact object.
"""
content_artifact = ContentArtifact.objects.filter(
content=self.distribution_tree,
relative_path=self.path,
).first()
artifact = content_artifact.artifact if content_artifact else None
return artifact
batch (list): List of :class:`~pulpcore.plugin.stages.DeclarativeContent`.
Returns:
List: Of :class:`~pulpcore.plugin.models.RemoteArtifact`.
"""
remotes_present = set()
for d_content in batch:
for d_artifact in d_content.d_artifacts:
if d_artifact.remote:
remotes_present.add(d_artifact.remote)
prefetch_related_objects(
[d_c.content for d_c in batch],
Prefetch(
"contentartifact_set",
queryset=ContentArtifact.objects.prefetch_related(
Prefetch(
"remoteartifact_set",
queryset=RemoteArtifact.objects.filter(remote__in=remotes_present),
to_attr="_remote_artifact_saver_ras",
)
),
to_attr="_remote_artifact_saver_cas",
),
)
needed_ras = []
for d_content in batch:
for content_artifact in d_content.content._remote_artifact_saver_cas:
for d_artifact in d_content.d_artifacts:
if d_artifact.relative_path == content_artifact.relative_path:
break
else:
remote_artifact_data = {
'url': declarative_artifact.url,
'size': declarative_artifact.artifact.size,
'md5': declarative_artifact.artifact.md5,
'sha1': declarative_artifact.artifact.sha1,
'sha224': declarative_artifact.artifact.sha224,
'sha256': declarative_artifact.artifact.sha256,
'sha384': declarative_artifact.artifact.sha384,
'sha512': declarative_artifact.artifact.sha512,
'remote': declarative_artifact.remote,
}
rel_path = declarative_artifact.relative_path
content_key = str(content_artifact.content.pk) + rel_path
remote_artifact_map[content_key] = remote_artifact_data
for content_artifact in ContentArtifact.objects.bulk_create(content_artifact_bulk):
rel_path = content_artifact.relative_path
content_key = str(content_artifact.content.pk) + rel_path
remote_artifact_data = remote_artifact_map.pop(content_key)
new_remote_artifact = RemoteArtifact(
content_artifact=content_artifact, **remote_artifact_data
)
remote_artifact_bulk.append(new_remote_artifact)
RemoteArtifact.objects.bulk_create(remote_artifact_bulk)
await self._post_save(batch)
for declarative_content in batch:
await out_q.put(declarative_content)
await out_q.put(None)
with artifact.file.open() as artifact_file:
importer_result = process_collection(
artifact_file, filename=filename, logger=user_facing_logger
)
importer_result["artifact_url"] = reverse("artifacts-detail", args=[artifact_pk])
collection_version = create_collection_from_importer(importer_result)
except ImporterError as exc:
log.info(f"Collection processing was not successfull: {exc}")
raise
except Exception as exc:
user_facing_logger.error(f"Collection processing was not successfull: {exc}")
raise
ContentArtifact.objects.create(
artifact=artifact,
content=collection_version,
relative_path=collection_version.relative_path,
)
CreatedResource.objects.create(content_object=collection_version)
if repository_pk:
repository = Repository.objects.get(pk=repository_pk)
content_q = CollectionVersion.objects.filter(pk=collection_version.pk)
with repository.new_version() as new_version:
new_version.add_content(content_q)
CreatedResource.objects.create(content_object=repository)
def get(self, request, path, namespace, name, version):
"""
Return a response to the "GET" action.
"""
distro = get_object_or_404(AnsibleDistribution, base_path=self.kwargs["path"])
if distro.repository_version:
distro_content = distro.repository_version.content
else:
distro_content = distro.repository.latest_version().content
version = CollectionVersion.objects.get(
collection__namespace=namespace, collection__name=name, version=version
)
get_object_or_404(
ContentArtifact, content__in=distro_content, relative_path=version.relative_path
)
download_url = "{content_hostname}/{base_path}/{relative_path}".format(
content_hostname=settings.ANSIBLE_CONTENT_HOSTNAME,
base_path=distro.base_path,
relative_path=version.relative_path,
)
version.path = path
data = GalaxyCollectionVersionSerializer(version).data
data["download_url"] = download_url
return response.Response(data)
def artifact(self):
"""
Returns artifact object.
"""
content_artifact = ContentArtifact.objects.filter(
content=self.distribution_tree,
relative_path=self.path,
).first()
artifact = content_artifact.artifact if content_artifact else None
return artifact
async def get_by_digest(self, request):
"""
Return a response to the "GET" action.
"""
Registry.verify_token(request, 'pull')
path = request.match_info['path']
digest = "sha256:{digest}".format(digest=request.match_info['digest'])
distribution = self._match_distribution(path)
repository_version = distribution.get_repository_version()
log.info(digest)
try:
ca = ContentArtifact.objects.get(content__in=repository_version.content,
relative_path=digest)
headers = {'Content-Type': ca.content.cast().media_type,
'Docker-Content-Digest': ca.content.cast().digest}
except ObjectDoesNotExist:
raise PathNotResolved(path)
else:
artifact = ca.artifact
if artifact:
return await Registry._dispatch(os.path.join(settings.MEDIA_ROOT,
artifact.file.name),
headers)
else:
return await self._stream_content_artifact(request, web.StreamResponse(), ca)