Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
if args.run_local and not args.task:
log.error('--run_local flag requires --task flag')
sys.exit(1)
# Set zone/project to defaults if flags are not set, and also copy remote
# disk if needed.
if args.command in ('googleclouddisk', 'googleclouddiskrawembedded'):
if not args.zone and config.TURBINIA_ZONE:
args.zone = config.TURBINIA_ZONE
elif not args.zone and not config.TURBINIA_ZONE:
log.error('Turbinia zone must be set by --zone or in config')
sys.exit(1)
if not args.project and config.TURBINIA_PROJECT:
args.project = config.TURBINIA_PROJECT
elif not args.project and not config.TURBINIA_PROJECT:
log.error('Turbinia project must be set by --project or in config')
sys.exit(1)
if args.project and args.project != config.TURBINIA_PROJECT:
new_disk = libcloudforensics.create_disk_copy(
args.project, config.TURBINIA_PROJECT, None, config.TURBINIA_ZONE,
args.disk_name)
args.disk_name = new_disk.name
if args.copy_only:
log.info('--copy_only specified, so not processing with Turbinia')
sys.exit(0)
# Start Evidence configuration
evidence_ = None
if args.command == 'rawdisk':
def __init__(self):
config.LoadConfig()
try:
self.client = datastore.Client(project=config.TURBINIA_PROJECT)
except EnvironmentError as e:
message = (
'Could not create Datastore client: {0!s}\n'
'Have you run $ gcloud auth application-default login?'.format(e))
raise TurbiniaException(message)
index_file = './index.yaml'
if len(sys.argv) > 1:
function_names = [sys.argv[1]]
else:
function_names = ['gettasks', 'closetasks']
config.LoadConfig()
for cloud_function in function_names:
print('Deploying function {0:s}'.format(cloud_function))
cmd = (
'gcloud --project {0:s} functions deploy {1:s} --stage-bucket {2:s} '
'--region {3:s} --runtime nodejs6 --trigger-http'.format(
config.TURBINIA_PROJECT, cloud_function, config.BUCKET_NAME,
config.TURBINIA_REGION))
print(subprocess.check_call(cmd, shell=True))
print('/nCreating Datastore index from {0:s}'.format(index_file))
cmd = 'gcloud --quiet --project {0:s} datastore indexes create {1:s}'.format(
config.TURBINIA_PROJECT, index_file)
subprocess.check_call(cmd, shell=True)
def __init__(self, jobs_blacklist=None, jobs_whitelist=None):
"""Initialization for PSQ Worker.
Args:
jobs_blacklist (Optional[list[str]]): Jobs we will exclude from running
jobs_whitelist (Optional[list[str]]): The only Jobs we will include to run
"""
config.LoadConfig()
psq_publisher = pubsub.PublisherClient()
psq_subscriber = pubsub.SubscriberClient()
datastore_client = datastore.Client(project=config.TURBINIA_PROJECT)
try:
self.psq = psq.Queue(
psq_publisher, psq_subscriber, config.TURBINIA_PROJECT,
name=config.PSQ_TOPIC, storage=psq.DatastoreStorage(datastore_client))
except exceptions.GoogleCloudError as e:
msg = 'Error creating PSQ Queue: {0:s}'.format(str(e))
log.error(msg)
raise TurbiniaException(msg)
# Deregister jobs from blacklist/whitelist.
disabled_jobs = list(config.DISABLED_JOBS) if config.DISABLED_JOBS else []
job_manager.JobsManager.DeregisterJobs(jobs_blacklist, jobs_whitelist)
if disabled_jobs:
log.info(
'Disabling jobs that were configured to be disabled in the '
'config file: {0:s}'.format(', '.join(disabled_jobs)))
job_manager.JobsManager.DeregisterJobs(jobs_blacklist=disabled_jobs)
# Check for valid dependencies/directories.
local_path(str): The local path to the block device to detach.
"""
#TODO: can local_path be something diffferent than the /dev/disk/by-id/google*
if local_path:
path = local_path
else:
path = '/dev/disk/by-id/google-{0:s}'.format(disk_name)
if not IsBlockDevice(path):
log.info('Disk {0:s} already detached!'.format(disk_name))
return
config.LoadConfig()
instance_name = GetLocalInstanceName()
project = GoogleCloudProject(
project_id=config.TURBINIA_PROJECT, default_zone=config.TURBINIA_ZONE)
instance = project.GetInstance(instance_name, zone=config.TURBINIA_ZONE)
disk = instance.GetDisk(disk_name)
log.info(
'Detaching disk {0:s} from instance {1:s}'.format(
disk_name, instance_name))
instance.DetachDisk(disk)
# Make sure device is Detached
for _ in xrange(RETRY_MAX):
if not os.path.exists(path):
log.info('Block device {0:s} is no longer attached'.format(path))
break
time.sleep(5)
def _backend_setup(self, server=True, *args, **kwargs):
"""
Args:
server (bool): Whether this is the client or a server
Raises:
TurbiniaException: When there are errors creating PSQ Queue
"""
log.debug(
'Setting up PSQ Task Manager requirements on project {0:s}'.format(
config.TURBINIA_PROJECT))
self.server_pubsub = turbinia_pubsub.TurbiniaPubSub(config.PUBSUB_TOPIC)
if server:
self.server_pubsub.setup_subscriber()
else:
self.server_pubsub.setup_publisher()
psq_publisher = pubsub.PublisherClient()
psq_subscriber = pubsub.SubscriberClient()
datastore_client = datastore.Client(project=config.TURBINIA_PROJECT)
try:
self.psq = psq.Queue(
psq_publisher, psq_subscriber, config.TURBINIA_PROJECT,
name=config.PSQ_TOPIC, storage=psq.DatastoreStorage(datastore_client))
except exceptions.GoogleCloudError as e:
msg = 'Error creating PSQ Queue: {0:s}'.format(str(e))
log.error(msg)
raise turbinia.TurbiniaException(msg)
def setup_subscriber(self):
"""Set up the pubsub subscriber."""
config.LoadConfig()
self.subscriber = pubsub.SubscriberClient()
subscription_path = self.subscriber.subscription_path(
config.TURBINIA_PROJECT, self.topic_name)
if not self.topic_path:
self.topic_path = self.subscriber.topic_path(
config.TURBINIA_PROJECT, self.topic_name)
try:
log.debug(
'Trying to create subscription {0:s} on topic {1:s}'.format(
subscription_path, self.topic_path))
self.subscriber.create_subscription(subscription_path, self.topic_path)
except exceptions.Conflict:
log.debug('Subscription {0:s} already exists.'.format(subscription_path))
log.debug('Setup PubSub Subscription {0:s}'.format(subscription_path))
self.subscription = self.subscriber.subscribe(
subscription_path, self._callback)
self.disk_name = disk_name
self.project = project
self.turbinia_zone = turbinia_zone
self.sketch_id = sketch_id
self.run_all_jobs = run_all_jobs
try:
turbinia_config.LoadConfig()
self.turbinia_region = turbinia_config.TURBINIA_REGION
self.instance = turbinia_config.PUBSUB_TOPIC
if turbinia_config.TURBINIA_PROJECT != self.project:
self.state.AddError(
'Specified project {0!s} does not match Turbinia configured '
'project {1!s}. Use gcp_turbinia_import recipe to copy the disk '
'into the same project.'.format(
self.project, turbinia_config.TURBINIA_PROJECT), critical=True)
return
self._output_path = tempfile.mkdtemp()
self.client = turbinia_client.TurbiniaClient()
except TurbiniaException as exception:
# TODO: determine if exception should be converted into a string as
# elsewhere in the codebase.
self.state.AddError(exception, critical=True)
return
def setup_subscriber(self):
"""Set up the pubsub subscriber."""
config.LoadConfig()
self.subscriber = pubsub.SubscriberClient()
subscription_path = self.subscriber.subscription_path(
config.TURBINIA_PROJECT, self.topic_name)
if not self.topic_path:
self.topic_path = self.subscriber.topic_path(
config.TURBINIA_PROJECT, self.topic_name)
try:
log.debug(
'Trying to create subscription {0:s} on topic {1:s}'.format(
subscription_path, self.topic_path))
self.subscriber.create_subscription(subscription_path, self.topic_path)
except exceptions.Conflict:
log.debug('Subscription {0:s} already exists.'.format(subscription_path))
log.debug('Setup PubSub Subscription {0:s}'.format(subscription_path))
self.subscription = self.subscriber.subscribe(
subscription_path, self._callback)
def setup_publisher(self):
"""Set up the pubsub publisher."""
config.LoadConfig()
self.publisher = pubsub.PublisherClient()
self.topic_path = self.publisher.topic_path(
config.TURBINIA_PROJECT, self.topic_name)
try:
log.debug('Trying to create pubsub topic {0:s}'.format(self.topic_path))
self.publisher.create_topic(self.topic_path)
except exceptions.Conflict:
log.debug('PubSub topic {0:s} already exists.'.format(self.topic_path))
log.debug('Setup PubSub publisher at {0:s}'.format(self.topic_path))