Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
if not isinstance(task_result.evidence, list):
log.warning(
'Task {0:s} from {1:s} did not return evidence list'.format(
task_result.task_name, task_result.worker_name))
task_result.evidence = []
job = self.get_job(task_result.job_id)
if not job:
log.warning(
'Received task results for unknown Job from Task ID {0:s}'.format(
task_result.task_id))
# Reprocess new evidence and save instance for later consumption by finalize
# tasks.
for evidence_ in task_result.evidence:
if isinstance(evidence_, evidence.Evidence):
log.info(
'Task {0:s} from {1:s} returned Evidence {2:s}'.format(
task_result.task_name, task_result.worker_name, evidence_.name))
self.add_evidence(evidence_)
if job:
job.evidence.add_evidence(evidence_)
else:
log.error(
'Task {0:s} from {1:s} returned non-Evidence output type '
'{2:s}'.format(
task_result.task_name, task_result.worker_name,
type(task_result.evidence)))
return job
class PlasoFile(Evidence):
"""Plaso output file evidence.
Attributes:
plaso_version: The version of plaso that processed this file.
"""
def __init__(self, plaso_version=None, *args, **kwargs):
"""Initialization for Plaso File evidence."""
self.plaso_version = plaso_version
super(PlasoFile, self).__init__(copyable=True, *args, **kwargs)
self.save_metadata = True
class PlasoCsvFile(Evidence):
"""Psort output file evidence. """
def __init__(self, plaso_version=None, *args, **kwargs):
"""Initialization for Plaso File evidence."""
self.plaso_version = plaso_version
super(PlasoCsvFile, self).__init__(copyable=True, *args, **kwargs)
self.save_metadata = False
# TODO(aarontp): Find a way to integrate this into TurbiniaTaskResult instead.
class ReportText(Evidence):
"""Text data for general reporting."""
def __init__(self, text_data=None, *args, **kwargs):
self.text_data = text_data
super(ReportText, self).__init__(copyable=True, *args, **kwargs)
Attributes:
browser_type: The type of browser.
Supported options are Chrome (default) and Brave.
format: Output format (default is sqlite, other options are xlsx and jsonl)
"""
REQUIRED_ATTRIBUTES = ['browser_type', 'output_format']
def __init__(self, browser_type=None, output_format=None, *args, **kwargs):
"""Initialization for chromium profile evidence object."""
super(ChromiumProfile, self).__init__(copyable=True, *args, **kwargs)
self.browser_type = browser_type
self.output_format = output_format
class RawDisk(Evidence):
"""Evidence object for Disk based evidence.
Attributes:
device_path (str): Path to a relevant 'raw' data source (ie: a block
device or a raw disk image).
mount_partition: The mount partition for this disk (if any).
size: The size of the disk in bytes.
"""
def __init__(self, mount_partition=1, size=None, *args, **kwargs):
"""Initialization for raw disk evidence object."""
if mount_partition < 1:
raise TurbiniaException(
'Partition numbers start at 1, but was given {0:d}'.format(
mount_partition))
self.save_metadata = True
class TextFile(Evidence):
"""Text data."""
def __init__(self, *args, **kwargs):
super(TextFile, self).__init__(copyable=True, *args, **kwargs)
class FilteredTextFile(TextFile):
"""Filtered text data."""
pass
class ExportedFileArtifact(Evidence):
"""Exported file artifact."""
REQUIRED_ATTRIBUTES = ['artifact_name']
def __init__(self, artifact_name=None, *args, **kwargs):
"""Initializes an exported file artifact."""
super(ExportedFileArtifact, self).__init__(copyable=True, *args, **kwargs)
self.artifact_name = artifact_name
class VolatilityReport(TextFile):
"""Volatility output file data."""
pass
class RawMemory(Evidence):
raise TurbiniaException(
'Unable to find raw disk image {0:s} in GoogleCloudDisk'.format(
rawdisk_path))
self.device_path, partition_paths = mount_local.PreprocessLosetup(
rawdisk_path)
self.mount_path = mount_local.PreprocessMountDisk(
partition_paths, self.mount_partition)
self.local_path = self.device_path
def _postprocess(self):
mount_local.PostprocessUnmountPath(self.mount_path)
mount_local.PostprocessDeleteLosetup(self.device_path)
class PlasoFile(Evidence):
"""Plaso output file evidence.
Attributes:
plaso_version: The version of plaso that processed this file.
"""
def __init__(self, plaso_version=None, *args, **kwargs):
"""Initialization for Plaso File evidence."""
self.plaso_version = plaso_version
super(PlasoFile, self).__init__(copyable=True, *args, **kwargs)
self.save_metadata = True
class PlasoCsvFile(Evidence):
"""Psort output file evidence. """
super(PlasoFile, self).__init__(copyable=True, *args, **kwargs)
self.save_metadata = True
class PlasoCsvFile(Evidence):
"""Psort output file evidence. """
def __init__(self, plaso_version=None, *args, **kwargs):
"""Initialization for Plaso File evidence."""
self.plaso_version = plaso_version
super(PlasoCsvFile, self).__init__(copyable=True, *args, **kwargs)
self.save_metadata = False
# TODO(aarontp): Find a way to integrate this into TurbiniaTaskResult instead.
class ReportText(Evidence):
"""Text data for general reporting."""
def __init__(self, text_data=None, *args, **kwargs):
self.text_data = text_data
super(ReportText, self).__init__(copyable=True, *args, **kwargs)
class FinalReport(ReportText):
"""Report format for the final complete Turbinia request report."""
def __init__(self, *args, **kwargs):
super(FinalReport, self).__init__(*args, **kwargs)
self.save_metadata = True
class TextFile(Evidence):
def add_evidence(self, evidence):
"""Adds evidence to the collection.
Args:
evidence (Evidence): The evidence to add.
"""
self.collection.append(evidence)
class Directory(Evidence):
"""Filesystem directory evidence."""
pass
class CompressedDirectory(Evidence):
"""CompressedDirectory based evidence.
Attributes:
compressed_directory: The path to the compressed directory.
uncompressed_directory: The path to the uncompressed directory.
"""
def __init__(
self, compressed_directory=None, uncompressed_directory=None, *args,
**kwargs):
"""Initialization for CompressedDirectory evidence object."""
super(CompressedDirectory, self).__init__(*args, **kwargs)
self.compressed_directory = compressed_directory
self.uncompressed_directory = uncompressed_directory
self.copyable = True
called by the worker, prior to the pre/post-processors running.
Raises:
TurbiniaException: If validation fails
"""
for attribute in self.REQUIRED_ATTRIBUTES:
attribute_value = getattr(self, attribute, None)
if not attribute_value:
message = (
'Evidence validation failed: Required attribute {0:s} for class '
'{1:s} is not set. Please check original request.'.format(
attribute, self.name))
raise TurbiniaException(message)
class EvidenceCollection(Evidence):
"""A Collection of Evidence objects.
Attributes:
collection(list): The underlying Evidence objects
"""
def __init__(self, collection=None, *args, **kwargs):
"""Initialization for Evidence Collection object."""
super(EvidenceCollection, self).__init__(*args, **kwargs)
self.collection = collection if collection else []
def serialize(self):
"""Return JSON serializable object."""
serialized_evidence = super(EvidenceCollection, self).serialize()
serialized_evidence['collection'] = [e.serialize() for e in self.collection]
return serialized_evidence
"""Exported file artifact."""
REQUIRED_ATTRIBUTES = ['artifact_name']
def __init__(self, artifact_name=None, *args, **kwargs):
"""Initializes an exported file artifact."""
super(ExportedFileArtifact, self).__init__(copyable=True, *args, **kwargs)
self.artifact_name = artifact_name
class VolatilityReport(TextFile):
"""Volatility output file data."""
pass
class RawMemory(Evidence):
"""Evidence object for Memory based evidence.
Attributes:
profile (string): Volatility profile used for the analysis
module_list (list): Module used for the analysis
"""
REQUIRED_ATTRIBUTES = ['module_list', 'profile']
def __init__(self, module_list=None, profile=None, *args, **kwargs):
"""Initialization for raw memory evidence object."""
super(RawMemory, self).__init__(*args, **kwargs)
self.profile = profile
self.module_list = module_list
self.local_path, tmp_dir)
self.local_path = self.uncompressed_directory
def compress(self):
""" Compresses a file or directory."""
# Compress a given directory and return the compressed path.
self.compressed_directory = archive.CompressDirectory(self.local_path)
self.local_path = self.compressed_directory
class BulkExtractorOutput(CompressedDirectory):
"""Bulk Extractor based evidence."""
pass
class ChromiumProfile(Evidence):
"""Chromium based browser profile evidence.
Attributes:
browser_type: The type of browser.
Supported options are Chrome (default) and Brave.
format: Output format (default is sqlite, other options are xlsx and jsonl)
"""
REQUIRED_ATTRIBUTES = ['browser_type', 'output_format']
def __init__(self, browser_type=None, output_format=None, *args, **kwargs):
"""Initialization for chromium profile evidence object."""
super(ChromiumProfile, self).__init__(copyable=True, *args, **kwargs)
self.browser_type = browser_type
self.output_format = output_format