Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
above_inventory_dir, _ = path.split(inventory_dir)
potential_deploy_dirs.extend((
inventory_dir, above_inventory_dir,
))
for potential_deploy_dir in potential_deploy_dirs:
logger.debug('Checking potential directory: {0}'.format(
potential_deploy_dir,
))
if any((
path.isdir(path.join(potential_deploy_dir, 'group_data')),
path.isfile(path.join(potential_deploy_dir, 'config.py')),
)):
logger.debug('Setting directory to: {0}'.format(potential_deploy_dir))
deploy_dir = potential_deploy_dir
break
# List facts
if operations[0] == 'fact':
command = 'fact'
fact_names = operations[1:]
facts = []
for name in fact_names:
args = None
if ':' in name:
name, args = name.split(':', 1)
args = args.split(',')
hostname (string): hostname of the target
command (string): actual command to execute
sudo (boolean): whether to wrap the command with sudo
sudo_user (string): user to sudo to
get_pty (boolean): whether to get a PTY before executing the command
env (dict): envrionment variables to set
timeout (int): timeout for this command to complete before erroring
Returns:
tuple: (exit_code, stdout, stderr)
stdout and stderr are both lists of strings from each buffer.
'''
command = make_command(command, **command_kwargs)
logger.debug('--> Running command on localhost: {0}'.format(command))
if print_output:
print('{0}>>> {1}'.format(host.print_prefix, command))
process = Popen(command, shell=True, stdout=PIPE, stderr=PIPE)
combined_output = read_buffers_into_queue(
host,
process.stdout,
process.stderr,
timeout=timeout,
print_output=print_output,
)
logger.debug('--> Waiting for exit status...')
process.wait()
data = fact.process(stdout)
hostname_facts[host] = data
log_name = click.style(name, bold=True)
filtered_args = list(filter(None, args))
if filtered_args:
log = 'Loaded fact {0}: {1}'.format(log_name, tuple(filtered_args))
else:
log = 'Loaded fact {0}'.format(log_name)
if state.print_fact_info:
logger.info(log)
else:
logger.debug(log)
# Check we've not failed
if not ignore_errors:
state.fail_hosts(failed_hosts)
# Assign the facts
state.facts.setdefault(fact_hash, {}).update(hostname_facts)
return state.facts[fact_hash]
def _run_server_op(state, host, op_hash):
# Noop for this host?
if op_hash not in state.ops[host]:
logger.info('{0}{1}'.format(
host.print_prefix,
click.style(
'Skipped',
'blue',
),
))
return True
op_data = state.ops[host][op_hash]
op_meta = state.op_meta[op_hash]
logger.debug('Starting operation {0} on {1}'.format(
', '.join(op_meta['names']), host,
))
state.ops_run.add(op_hash)
# ...loop through each command
for i, command in enumerate(op_data['commands']):
status = False
shell_executable = op_meta['shell_executable']
sudo = op_meta['sudo']
sudo_user = op_meta['sudo_user']
su_user = op_meta['su_user']
preserve_sudo_env = op_meta['preserve_sudo_env']
# As dicts, individual commands can override meta settings (ie on a
def _get_vagrant_ssh_config(queue, progress, target):
logger.debug('Loading SSH config for {0}'.format(target))
queue.put(local.shell(
'vagrant ssh-config {0}'.format(target),
splitlines=True,
))
progress(target)
def connect(state, host, for_fact=None):
'''
Connect to a single host. Returns the SSH client if succesful. Stateless by
design so can be run in parallel.
'''
kwargs = _make_paramiko_kwargs(state, host)
logger.debug('Connecting to: {0} ({1})'.format(host.name, kwargs))
# Hostname can be provided via SSH config (alias), data, or the hosts name
hostname = kwargs.pop(
'hostname',
host.data.ssh_hostname or host.name,
)
try:
# Create new client & connect to the host
client = SSHClient()
client.set_missing_host_key_policy(MissingHostKeyPolicy())
client.connect(hostname, **kwargs)
# Enable SSH forwarding
session = client.get_transport().open_session()
AgentRequestHandler(session)
def get_facts(state, name, args=None, ensure_hosts=None):
'''
Get a single fact for all hosts in the state.
'''
# Create an instance of the fact
fact = FACTS[name]()
if isinstance(fact, ShortFactBase):
return get_short_facts(state, fact, args=args, ensure_hosts=ensure_hosts)
logger.debug('Getting fact: {0} (ensure_hosts: {1})'.format(
name, ensure_hosts,
))
args = args or []
# Apply args or defaults
sudo = state.config.SUDO
sudo_user = state.config.SUDO_USER
su_user = state.config.SU_USER
ignore_errors = state.config.IGNORE_ERRORS
# Timeout for operations !== timeout for connect (config.CONNECT_TIMEOUT)
timeout = None
# Get the current op meta
current_op_hash = state.current_op_hash
def __exit__(self, type_, value, traceback):
self.state.pipelining = False
# Get pipelined facts!
# for name, args in six.iteritems(self.state.facts_to_pipeline):
# get_facts(self.state, name, pipeline_args=args)
# Actually build our ops
for (host_name, func, args, kwargs) in self.state.ops_to_pipeline:
logger.debug(
'Replaying op: {0}, args={1}, kwargs={2}'.format(func, args, kwargs),
)
func(self.state, self.state.inventory[host_name], *args, **kwargs)