How to use the turbinia.evidence.GoogleCloudDisk function in turbinia

To help you get started, we’ve selected a few turbinia examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github google / turbinia / turbinia / jobs / jenkins.py View on Github external
from turbinia.evidence import Directory
from turbinia.evidence import DockerContainer
from turbinia.evidence import RawDisk
from turbinia.evidence import GoogleCloudDisk
from turbinia.evidence import GoogleCloudDiskRawEmbedded
from turbinia.evidence import ReportText
from turbinia.jobs import interface
from turbinia.jobs import manager
from turbinia.workers.analysis.jenkins import JenkinsAnalysisTask


class JenkinsAnalysisJob(interface.TurbiniaJob):
  """Jenkins analysis job."""

  evidence_input = [
      Directory, DockerContainer, RawDisk, GoogleCloudDisk,
      GoogleCloudDiskRawEmbedded
  ]
  evidence_output = [ReportText]

  NAME = 'JenkinsAnalysisJob'

  def create_tasks(self, evidence):
    """Create task for Jenkins analysis job.

    Args:
      evidence: List of evidence objects to process

    Returns:
        A list of tasks to schedule.
    """
    tasks = [JenkinsAnalysisTask() for _ in evidence]
github google / turbinia / turbinia / jobs / strings.py View on Github external
from turbinia.evidence import TextFile
from turbinia.jobs import interface
from turbinia.jobs import manager
from turbinia.workers.strings import StringsAsciiTask
from turbinia.workers.strings import StringsUnicodeTask


class StringsJob(interface.TurbiniaJob):
  """Strings collection Job.

  This will generate a Unicode and ASCII string collection task for each piece
  of evidence.
  """

  # The types of evidence that this Job will process
  evidence_input = [RawDisk, GoogleCloudDisk, GoogleCloudDiskRawEmbedded]
  evidence_output = [TextFile]

  NAME = 'StringsJob'

  def create_tasks(self, evidence):
    """Create task for Strings.

    Args:
      evidence: List of evidence objects to process

    Returns:
        A list of tasks to schedule.
    """
    # Generate tasks for both types of Strings jobs
    tasks = [StringsAsciiTask() for _ in evidence]
    tasks.extend([StringsUnicodeTask() for _ in evidence])
github google / turbinia / turbinia / jobs / sshd.py View on Github external
from turbinia.evidence import DockerContainer
from turbinia.evidence import GoogleCloudDisk
from turbinia.evidence import GoogleCloudDiskRawEmbedded
from turbinia.evidence import ExportedFileArtifact
from turbinia.evidence import RawDisk
from turbinia.evidence import ReportText
from turbinia.jobs import interface
from turbinia.jobs import manager


class SSHDExtractionJob(interface.TurbiniaJob):
  """Filter input based on regular expression patterns."""

  # The types of evidence that this Job will process
  evidence_input = [
      Directory, DockerContainer, RawDisk, GoogleCloudDisk,
      GoogleCloudDiskRawEmbedded
  ]

  evidence_output = [ExportedFileArtifact]

  NAME = 'SSHDExtractionJob'

  def create_tasks(self, evidence):
    """Create task.

    Args:
      evidence: List of evidence objects to process

    Returns:
        A list of tasks to schedule.
    """
github google / turbinia / turbinia / jobs / tomcat.py View on Github external
from turbinia.evidence import DockerContainer
from turbinia.evidence import GoogleCloudDisk
from turbinia.evidence import GoogleCloudDiskRawEmbedded
from turbinia.evidence import ExportedFileArtifact
from turbinia.evidence import RawDisk
from turbinia.evidence import ReportText
from turbinia.jobs import interface
from turbinia.jobs import manager


class TomcatExtractionJob(interface.TurbiniaJob):
  """Extract Apache Tomcat files for analysis."""

  # The types of evidence that this Job will process
  evidence_input = [
      Directory, DockerContainer, RawDisk, GoogleCloudDisk,
      GoogleCloudDiskRawEmbedded
  ]

  evidence_output = [ExportedFileArtifact]

  NAME = 'TomcatExtractionJob'

  def create_tasks(self, evidence):
    """Create task.

     Args:
      evidence: List of evidence objects to process

     Returns:
        A list of tasks to schedule.
    """
github google / turbinia / turbinia / turbiniactl.py View on Github external
name=args.name, source_path=source_path, recovery_key=args.recovery_key,
        password=args.password, source=args.source)
  elif args.command == 'directory':
    args.name = args.name if args.name else args.source_path
    source_path = os.path.abspath(args.source_path)
    evidence_ = evidence.Directory(
        name=args.name, source_path=source_path, source=args.source)
  elif args.command == 'compressedirectory':
    archive.ValidateTarFile(args.source_path)
    args.name = args.name if args.name else args.source_path
    source_path = os.path.abspath(args.source_path)
    evidence_ = evidence.CompressedDirectory(
        name=args.name, source_path=source_path, source=args.source)
  elif args.command == 'googleclouddisk':
    args.name = args.name if args.name else args.disk_name
    evidence_ = evidence.GoogleCloudDisk(
        name=args.name, disk_name=args.disk_name, project=args.project,
        mount_partition=args.mount_partition, zone=args.zone,
        source=args.source)
  elif args.command == 'googleclouddiskembedded':
    args.name = args.name if args.name else args.disk_name
    parent_evidence_ = evidence.GoogleCloudDisk(
        name=args.name, disk_name=args.disk_name, project=args.project,
        mount_partition=args.mount_partition, zone=args.zone,
        source=args.source)
    evidence_ = evidence.GoogleCloudDiskRawEmbedded(
        name=args.name, disk_name=args.disk_name, project=args.project,
        mount_partition=args.mount_partition, zone=args.zone,
        embedded_path=args.embedded_path,
        embedded_partition=args.embedded_mount_partition)
    evidence_.parent_evidence = parent_evidence_
  elif args.command == 'hindsight':
github log2timeline / dftimewolf / dftimewolf / lib / processors / turbinia.py View on Github external
def Process(self):
    """Process files with Turbinia."""
    log_file_path = os.path.join(self._output_path, 'turbinia.log')
    print('Turbinia log file: {0:s}'.format(log_file_path))

    if self.state.input and not self.disk_name:
      _, disk = self.state.input[0]
      self.disk_name = disk.name
      print('Using disk {0:s} from previous collector'.format(self.disk_name))

    evidence_ = evidence.GoogleCloudDisk(
        disk_name=self.disk_name, project=self.project, zone=self.turbinia_zone)
    try:
      evidence_.validate()
    except TurbiniaException as exception:
      self.state.AddError(exception, critical=True)
      return

    request = TurbiniaRequest(requester=getpass.getuser())
    request.evidence.append(evidence_)
    if self.sketch_id:
      request.recipe['sketch_id'] = self.sketch_id
    if not self.run_all_jobs:
      request.recipe['jobs_blacklist'] = ['StringsJob']

    # Get threat intelligence data from any modules that have stored some.
    # In this case, observables is a list of containers.ThreatIntelligence
github google / turbinia / turbinia / jobs / hadoop.py View on Github external
from turbinia.evidence import DockerContainer
from turbinia.evidence import GoogleCloudDisk
from turbinia.evidence import GoogleCloudDiskRawEmbedded
from turbinia.evidence import RawDisk
from turbinia.evidence import ReportText
from turbinia.jobs import interface
from turbinia.jobs import manager
from turbinia.workers.hadoop import HadoopAnalysisTask


class HadoopAnalysisJob(interface.TurbiniaJob):
  """Analyzes Hadoop AppRoot files."""

  evidence_input = [
      DockerContainer, GoogleCloudDisk, GoogleCloudDiskRawEmbedded, RawDisk
  ]
  evidence_output = [ReportText]

  NAME = 'HadoopAnalysisJob'

  def create_tasks(self, evidence):
    """Create task.

    Args:
      evidence: List of evidence objects to process

    Returns:
        A list of tasks to schedule.
    """
    tasks = [HadoopAnalysisTask() for _ in evidence]
    return tasks
github google / turbinia / turbinia / evidence.py View on Github external
def __init__(self, project=None, zone=None, disk_name=None, *args, **kwargs):
    """Initialization for Google Cloud Disk."""
    self.project = project
    self.zone = zone
    self.disk_name = disk_name
    super(GoogleCloudDisk, self).__init__(*args, **kwargs)
    self.cloud_only = True