Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def testUnsetKeyConfig(self):
"""Test that config errors out when not all variables are set."""
self.WriteConfig('UNSETKEY = None\nSETKEY = "bar"\n')
config.CONFIGVARS = ['UNSETKEY', 'SETKEY']
self.assertRaises(config.TurbiniaConfigException, config.LoadConfig)
'-t', '--task_id', help='Show task for given Task ID', required=False)
parser_status.add_argument(
'-u', '--user', help='Show task for given user', required=False)
# Server
subparsers.add_parser('server', help='Run Turbinia Server')
args = parser.parse_args()
# Load the config before final logger setup so we can the find the path to the
# log file.
try:
if args.config_file:
config.LoadConfig(config_file=args.config_file)
else:
config.LoadConfig()
except TurbiniaException as exception:
print(
'Could not load config file ({0!s}).\n{1:s}'.format(
exception, config.CONFIG_MSG))
sys.exit(1)
if args.log_file:
config.LOG_FILE = args.log_file
if args.output_dir:
config.OUTPUT_DIR = args.output_dir
# Run logger setup again to get file-handler now that we have the logfile path
# from the config.
logger.setup()
if args.quiet:
log.setLevel(logging.ERROR)
need_file_handler = False
# pylint: disable=unidiomatic-typecheck
if type(handler) == logging.StreamHandler:
need_stream_handler = False
if need_file_handler:
try:
config.LoadConfig()
except TurbiniaException as exception:
print(
'Could not load config file ({0!s}).\n{1:s}'.format(
exception, config.CONFIG_MSG))
sys.exit(1)
file_handler = logging.FileHandler(config.LOG_FILE)
formatter = logging.Formatter('%(asctime)s:%(levelname)s:%(message)s')
file_handler.setFormatter(formatter)
file_handler.setLevel(logging.DEBUG)
logger.addHandler(file_handler)
console_handler = logging.StreamHandler()
formatter = logging.Formatter('[%(levelname)s] %(message)s')
console_handler.setFormatter(formatter)
if need_stream_handler:
logger.addHandler(console_handler)
# Configure the root logger to use exactly our handlers because other modules
# like PSQ use this, and we want to see log messages from it when executing
# from CLI.
root_log = logging.getLogger()
for handler in root_log.handlers:
Args:
evidence (Evidence object): The evidence we will process.
result (TurbiniaTaskResult): The object to place task results into.
Returns:
TurbiniaTaskResult object.
"""
config.LoadConfig()
psort_file = os.path.join(self.output_dir, '{0:s}.csv'.format(self.id))
psort_evidence = PlasoCsvFile(source_path=psort_file)
psort_log = os.path.join(self.output_dir, '{0:s}.log'.format(self.id))
cmd = ['psort.py', '--status_view', 'none', '--logfile', psort_log]
if config.DEBUG_TASKS:
cmd.append('-d')
cmd.extend(['-w', psort_file, evidence.source_path])
result.log('Running psort as [{0:s}]'.format(' '.join(cmd)))
self.execute(
cmd, result, log_files=[psort_log], new_evidence=[psort_evidence],
close=True)
return result
def _DetachDisk(self, client):
"""Detaches the disk from the machine.
Args:
client: Google Cloud service client object.
"""
WriteToStdOut(u'Detaching disk')
operation = client.instances().detachDisk(
instance=config.INSTANCE,
project=config.PROJECT,
zone=config.ZONE,
deviceName=config.DEVICE_NAME).execute()
self._WaitForOperation(client, operation[u'name'])
evidence (EvidenceCollection): All Evidence that has been generated as
part of this request.
result (TurbiniaTaskResult): The result to place task output into.
Returns:
TurbiniaTaskResult: Task execution results.
"""
# Doing a delayed import to avoid circular dependencies.
from turbinia.client import TurbiniaClient
client = TurbiniaClient()
report_file = os.path.join(
self.tmp_dir, 'final_turbinia_report_{0:s}.md'.format(self.id))
report = FinalReport(source_path=report_file)
report_data = client.format_task_status(
config.INSTANCE_ID, config.TURBINIA_PROJECT, config.TURBINIA_REGION,
request_id=evidence.request_id, full_report=True)
result.log('Writing report data to [{0:s}]'.format(report.local_path))
with open(report.local_path, 'wb') as file_handle:
file_handle.write(report_data.encode('utf-8'))
result.add_evidence(report, evidence.config)
result.close(self, True)
return result
Returns:
A TurbiniaTaskResult object.
Raises:
TurbiniaException: If the evidence can not be found.
"""
self.output_manager.setup(self)
self.tmp_dir, self.output_dir = self.output_manager.get_local_output_dirs()
if not self.result:
self.result = TurbiniaTaskResult(
input_evidence=evidence, base_output_dir=self.base_output_dir,
request_id=self.request_id, job_id=self.job_id)
self.result.setup(self)
if not self.run_local:
if evidence.copyable and not config.SHARED_FILESYSTEM:
self.output_manager.retrieve_evidence(evidence)
if evidence.source_path and not os.path.exists(evidence.source_path):
raise TurbiniaException(
'Evidence source path {0:s} does not exist'.format(
evidence.source_path))
evidence.preprocess(self.tmp_dir)
return self.result
from __future__ import unicode_literals
import json
import os
import sys
from turbinia import config
from turbinia import TurbiniaException
from turbinia.processors import docker
from turbinia.processors import mount_local
from turbinia.processors import archive
# pylint: disable=keyword-arg-before-vararg
config.LoadConfig()
if config.TASK_MANAGER.lower() == 'psq':
from turbinia.processors import google_cloud
def evidence_decode(evidence_dict):
"""Decode JSON into appropriate Evidence object.
Args:
evidence_dict: JSON serializable evidence object (i.e. a dict post JSON
decoding).
Returns:
An instantiated Evidence object (or a sub-class of it).
Raises:
TurbiniaException: If input is not a dict, does not have a type attribute,
or does not deserialize to an evidence object.
def _AttachDisk(self, client, disk):
"""Attaches a persistent disk to the machine.
Args:
client: Google Cloud service client object.
disk: String of cloud path to disk
"""
WriteToStdOut(u'Attaching disk')
operation = client.instances().attachDisk(
instance=config.INSTANCE,
project=config.PROJECT,
zone=config.ZONE,
body={u'deviceName': config.DEVICE_NAME,
u'source': disk}).execute()
self._WaitForOperation(client, operation[u'name'])
"""
config.LoadConfig()
psq_publisher = pubsub.PublisherClient()
psq_subscriber = pubsub.SubscriberClient()
datastore_client = datastore.Client(project=config.TURBINIA_PROJECT)
try:
self.psq = psq.Queue(
psq_publisher, psq_subscriber, config.TURBINIA_PROJECT,
name=config.PSQ_TOPIC, storage=psq.DatastoreStorage(datastore_client))
except exceptions.GoogleCloudError as e:
msg = 'Error creating PSQ Queue: {0:s}'.format(str(e))
log.error(msg)
raise TurbiniaException(msg)
# Deregister jobs from blacklist/whitelist.
disabled_jobs = list(config.DISABLED_JOBS) if config.DISABLED_JOBS else []
job_manager.JobsManager.DeregisterJobs(jobs_blacklist, jobs_whitelist)
if disabled_jobs:
log.info(
'Disabling jobs that were configured to be disabled in the '
'config file: {0:s}'.format(', '.join(disabled_jobs)))
job_manager.JobsManager.DeregisterJobs(jobs_blacklist=disabled_jobs)
# Check for valid dependencies/directories.
check_dependencies(config.DEPENDENCIES)
check_directory(config.MOUNT_DIR_PREFIX)
check_directory(config.OUTPUT_DIR)
check_directory(config.TMP_DIR)
log.info('Starting PSQ listener on queue {0:s}'.format(self.psq.name))
self.worker = psq.Worker(queue=self.psq)