Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
if logger.handlers:
for handler in logger.handlers:
# Want to do strict type-checking here because is instance will include
# subclasses and so won't distinguish between StreamHandlers and
# FileHandlers.
# pylint: disable=unidiomatic-typecheck
if type(handler) == logging.FileHandler:
need_file_handler = False
# pylint: disable=unidiomatic-typecheck
if type(handler) == logging.StreamHandler:
need_stream_handler = False
if need_file_handler:
try:
config.LoadConfig()
except TurbiniaException as exception:
print(
'Could not load config file ({0!s}).\n{1:s}'.format(
exception, config.CONFIG_MSG))
sys.exit(1)
file_handler = logging.FileHandler(config.LOG_FILE)
formatter = logging.Formatter('%(asctime)s:%(levelname)s:%(message)s')
file_handler.setFormatter(formatter)
file_handler.setLevel(logging.DEBUG)
logger.addHandler(file_handler)
console_handler = logging.StreamHandler()
formatter = logging.Formatter('[%(levelname)s] %(message)s')
console_handler.setFormatter(formatter)
if need_stream_handler:
disk_name(str): The name of the Cloud Disk to attach.
Returns:
(str, list(str)): a tuple consisting of the path to the 'disk' block device
and a list of paths to partition block devices. For example:
(
'/dev/disk/by-id/google-disk0',
['/dev/disk/by-id/google-disk0-part1', '/dev/disk/by-id/google-disk0-p2']
)
"""
path = '/dev/disk/by-id/google-{0:s}'.format(disk_name)
if IsBlockDevice(path):
log.info('Disk {0:s} already attached!'.format(disk_name))
return (path, glob.glob('{0:s}-part*'.format(path)))
config.LoadConfig()
instance_name = GetLocalInstanceName()
project = GoogleCloudProject(
project_id=config.TURBINIA_PROJECT, default_zone=config.TURBINIA_ZONE)
instance = project.GetInstance(instance_name, zone=config.TURBINIA_ZONE)
disk = instance.GetDisk(disk_name)
log.info(
'Attaching disk {0:s} to instance {1:s}'.format(disk_name, instance_name))
instance.AttachDisk(disk)
# Make sure we have a proper block device
for _ in xrange(RETRY_MAX):
if IsBlockDevice(path):
log.info('Block device {0:s} successfully attached'.format(path))
break
if os.path.exists(path):
def __init__(self):
config.LoadConfig()
import codecs
import json
import logging
from datetime import datetime
from datetime import timedelta
import six
from turbinia import config
from turbinia.config import DATETIME_FORMAT
from turbinia import TurbiniaException
from turbinia.workers import TurbiniaTask
from turbinia.workers import TurbiniaTaskResult
config.LoadConfig()
if config.STATE_MANAGER.lower() == 'datastore':
from google.cloud import datastore
from google.cloud import exceptions
elif config.STATE_MANAGER.lower() == 'redis':
import redis
else:
msg = 'State Manager type "{0:s}" not implemented'.format(
config.STATE_MANAGER)
raise TurbiniaException(msg)
MAX_DATASTORE_STRLEN = 1500
log = logging.getLogger('turbinia')
def get_state_manager():
"""Return state manager object based on config.
def __init__(self):
config.LoadConfig()
try:
self.client = datastore.Client(project=config.TURBINIA_PROJECT)
except EnvironmentError as e:
message = (
'Could not create Datastore client: {0!s}\n'
'Have you run $ gcloud auth application-default login?'.format(e))
raise TurbiniaException(message)
def __init__(self):
self.celery = None
self.kombu = None
self.celery_runner = None
config.LoadConfig()
super(CeleryTaskManager, self).__init__()
Args:
task: A TurbiniaTask object
Returns:
A list of OutputWriter objects.
"""
epoch = str(int(time.time()))
unique_dir = '{0:s}-{1:s}-{2:s}'.format(epoch, str(task.id), task.name)
writers = [
LocalOutputWriter(
base_output_dir=task.base_output_dir, unique_dir=unique_dir)
]
local_output_dir = writers[0].local_output_dir
config.LoadConfig()
if config.GCS_OUTPUT_PATH:
writer = GCSOutputWriter(
unique_dir=unique_dir, gcs_path=config.GCS_OUTPUT_PATH,
local_output_dir=local_output_dir)
writers.append(writer)
return writers
# module or by the CLI.
if project is None or turbinia_zone is None:
self.state.AddError(
'project or turbinia_zone are not all specified, bailing out',
critical=True)
return
self.disk_name = disk_name
self.project = project
self.turbinia_zone = turbinia_zone
self.sketch_id = sketch_id
self.run_all_jobs = run_all_jobs
try:
turbinia_config.LoadConfig()
self.turbinia_region = turbinia_config.TURBINIA_REGION
self.instance = turbinia_config.PUBSUB_TOPIC
if turbinia_config.TURBINIA_PROJECT != self.project:
self.state.AddError(
'Specified project {0!s} does not match Turbinia configured '
'project {1!s}. Use gcp_turbinia_import recipe to copy the disk '
'into the same project.'.format(
self.project, turbinia_config.TURBINIA_PROJECT), critical=True)
return
self._output_path = tempfile.mkdtemp()
self.client = turbinia_client.TurbiniaClient()
except TurbiniaException as exception:
# TODO: determine if exception should be converted into a string as
# elsewhere in the codebase.
self.state.AddError(exception, critical=True)
return
Args:
base_output_dir (string): The base path for output. Set to the configured
OUTPUT_DIR by default.
local_output_dir: The full path for the local output dir. This will be
generated automatically if not set.
unique_dir (string): A psuedo-unique string to be used in paths. This
will be generated automatically if not set.
"""
self.unique_dir = unique_dir
self.name = self.NAME
if base_output_dir:
self.base_output_dir = base_output_dir
else:
config.LoadConfig()
self.base_output_dir = config.OUTPUT_DIR
if local_output_dir:
self.local_output_dir = local_output_dir
else:
self.local_output_dir = self.create_output_dir()