Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def get_url(self, obj, view_name, request, *args, **kwargs):
# ignore the passed in view name and return the url to the cast unit, not the generic unit
request = None
return super().get_url(obj, view_name, request, *args, **kwargs)
class AsyncOperationResponseSerializer(serializers.Serializer):
"""
Serializer for asynchronous operations.
"""
task = RelatedField(
required=True,
help_text=_("The href of the task."),
queryset=Task.objects,
view_name="tasks-detail",
allow_null=False,
)
Create a PulpExport to export pulp_exporter.repositories.
1) Spit out all Artifacts, ArtifactResource.json, and RepositoryResource.json
2) Spit out all *resource JSONs in per-repo-version directories
3) Compute and store the sha256 and filename of the resulting tar.gz/chunks
Args:
the_export (models.PulpExport): PulpExport instance
Raises:
ValidationError: When path is not in the ALLOWED_EXPORT_PATHS setting,
OR path exists and is not a directory
"""
pulp_exporter = the_export.exporter
the_export.task = Task.current()
tarfile_fp = the_export.export_tarfile_path()
os.makedirs(pulp_exporter.path, exist_ok=True)
rslts = {}
if the_export.validated_chunk_size:
# write it into chunks
with subprocess.Popen(
[
"split",
"-a",
"4",
"-b",
str(the_export.validated_chunk_size),
"-d",
"-",
"skipped",
"running",
"completed",
"canceled",
"failed",
"group_progress_reports",
)
class TaskCancelSerializer(ModelSerializer):
state = serializers.CharField(
help_text=_("The desired state of the task. Only 'canceled' is accepted."),
)
class Meta:
model = models.Task
fields = ("state",)
class ContentAppStatusSerializer(ModelSerializer):
name = serializers.CharField(help_text=_("The name of the worker."), read_only=True)
last_heartbeat = serializers.DateTimeField(
help_text=_("Timestamp of the last time the worker talked to the service."), read_only=True
)
class Meta:
model = models.ContentAppStatus
fields = ("name", "last_heartbeat")
class WorkerSerializer(ModelSerializer):
pulp_href = IdentityField(view_name="workers-detail")
Relations:
task: The task associated with this progress report. If left unset when save() is called
it will be set to the current task_id.
"""
message = models.TextField()
code = models.CharField(max_length=36)
state = models.TextField(choices=TASK_CHOICES, default=TASK_STATES.WAITING)
total = models.IntegerField(null=True)
done = models.IntegerField(default=0)
task = models.ForeignKey(
"Task", related_name="progress_reports", default=Task.current, on_delete=models.CASCADE
)
suffix = models.TextField(null=True)
_using_context_manager = False
_last_save_time = None
def save(self, *args, **kwargs):
"""
Auto-set the task_id if running inside a task
If the task_id is already set it will not be updated. If it is unset and this is running
inside of a task it will be auto-set prior to saving.
args (list): positional arguments to be passed on to the real save
kwargs (dict): keyword arguments to be passed on to the real save
def cancel(task_id):
"""
Cancel the task that is represented by the given task_id.
This method cancels only the task with given task_id, not the spawned tasks. This also updates
task's state to 'canceled'.
:param task_id: The ID of the task you wish to cancel
:type task_id: basestring
:raises MissingResource: if a task with given task_id does not exist
"""
try:
task_status = Task.objects.get(pk=task_id)
except Task.DoesNotExist:
raise MissingResource(task=task_id)
if task_status.state in TASK_FINAL_STATES:
# If the task is already done, just stop
msg = _('Task [{task_id}] already in a completed state: {state}')
_logger.info(msg.format(task_id=task_id, state=task_status.state))
return
redis_conn = connection.get_redis_connection()
job = Job(id=str(task_status.job_id), connection=redis_conn)
if job.is_started:
redis_conn.sadd(TASKING_CONSTANTS.KILL_KEY, job.get_id())
job.delete()
When using this field in a serializer, it serializes the related resource as a relative URL.
"""
def get_url(self, obj, view_name, request, *args, **kwargs):
# ignore the passed in view name and return the url to the cast unit, not the generic unit
request = None
return super().get_url(obj, view_name, request, *args, **kwargs)
class AsyncOperationResponseSerializer(serializers.Serializer):
"""
Serializer for asynchronous operations.
"""
task = RelatedField(
required=True,
help_text=_('The href of the task.'),
queryset=Task.objects,
view_name='tasks-detail',
allow_null=False
)
from pulpcore.app import models as pulp_models
class Task(models.Model):
"""
Generic table for handing tasks.
:var params: Task parameters json dictionary
:var result: Task result json dictionary
"""
params = psql_fields.JSONField(null=True)
result = psql_fields.JSONField(null=True)
pulp_task = models.OneToOneField(
pulp_models.Task, on_delete=models.CASCADE,
related_name='galaxy_task'
)
@property
def job_id(self):
return self.pulp_task.job_id
@property
def state(self):
return self.pulp_task.state
@property
def started_at(self):
return self.pulp_task.started_at
@property
"state": ["exact", "in"],
"worker": ["exact", "in"],
"started_at": DATETIME_FILTER_OPTIONS,
"finished_at": DATETIME_FILTER_OPTIONS,
"parent_task": ["exact"],
"child_tasks": ["exact"],
"task_group": ["exact"],
"reserved_resources_record": ["exact"],
"created_resources": ["exact"],
}
class TaskViewSet(
NamedModelViewSet, mixins.RetrieveModelMixin, mixins.ListModelMixin, mixins.DestroyModelMixin
):
queryset = Task.objects.all()
endpoint_name = "tasks"
filterset_class = TaskFilter
serializer_class = TaskSerializer
minimal_serializer_class = MinimalTaskSerializer
filter_backends = (OrderingFilter, DjangoFilterBackend)
ordering = "-pulp_created"
@extend_schema(
description="This operation cancels a task.",
summary="Cancel a task",
operation_id="tasks_cancel",
responses={200: TaskSerializer, 409: TaskSerializer},
)
def partial_update(self, request, pk=None, partial=True):
task = self.get_object()
if "state" not in request.data: