Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
"""Form NETCONF message and send to testbed."""
log.debug('NETCONF MESSAGE')
try:
device.raise_mode = RaiseMode.NONE
except NameError:
log.error('Make sure you have ncclient installed in your virtual env')
return False
try:
et.iselement('')
except NameError:
log.error('The "lxml" library is required for NETCONF testing')
return False
if operation == 'capabilities':
if not returns:
log.error(banner('No NETCONF data to compare capability.'))
return False
return in_capabilities(
list(device.server_capabilities),
returns
)
rpc_verify = RpcVerify(
log=log,
capabilities=list(device.server_capabilities)
)
if not rpc_data:
log.error('NETCONF message data not present')
return False
if not datastore:
def setup(self, a):
logger.info(banner('Value A: %s' % a))
sleep_time (`int`) : sleep after all commands (unit: seconds)
max_retry (`int`) : Retry issuing command in case any error (max_retry 1 by default)
save_to_file (`str`) : Set either one of below modes when show output needs to be saved as file. folder for the processor is generated and store files in the folder. (Disabled by default)
per_device : file generated per device
per_command : file generated per command
zipped_folder (`bool`) : Set if archive folder needs to be zipped.
If True, zip file generated and removed the folder with files.
Returns:
AETEST results
Raises:
None
'''
# Init
log.info(banner("processor: 'execute_command'"))
# sanitize arguments
if save_to_file and save_to_file not in ['per_device', 'per_command']:
section.errored(
"`save_to_file` in datafile must be `per_device` or `per_command`")
if not save_to_file and isinstance(zipped_folder, bool):
log.warning(
"`zipped_folder` was ignored because `save_to_file` was not set in datafile"
)
elif save_to_file and not isinstance(zipped_folder, bool):
section.errored("`zipped_folder` must be True or False in datafile")
# prepare save location
if save_to_file:
file_list = {}
now = datetime.datetime.now()
def learn_ops(feature, device, attributes=None, **kwargs):
'''function to get ops class and create object then learn the features.
It can be the child process from pcall as well'''
# print the messages
log.info(banner('Sending the corresponding clis to learn {} \n'
'Operational status on device {}'
.format(feature, device.name), align='left'))
# get uut os corresponding feature ops class by genie provided function
try:
ops = get_ops(feature, device)
except Exception as e:
raise Exception('Cannot get the {f} corresponding '
'abstracted class on {d}\n{m}'
.format(m=str(e), f=feature, d=device.name))
ops_obj = ops(device, attributes=attributes)
# learn the ops
ops_obj.learn_poll(**kwargs)
return ops_obj
tgn_device.name not in section.parent.mapping_data['devices']:
log.info("TGN '{}' information not found in mapping datafile".
format(tgn_device.name))
return
# Check if TGN is connected
if not tgn_device.is_connected():
log.info("TGN '{}' not connected.".format(tgn_device.name))
return
# Set connection alias
tgn_alias = getattr(
tgn_device, section.parent.mapping_data['devices'][tgn_device.name]['context'])
# Check for traffic loss
log.info(banner("Check for traffic loss"))
if tgn_max_outage_ms:
try:
log.info("Verify traffic outage")
# Traffic loss is not expected beyond max_outage seconds
tgn_alias.\
calculate_absolute_outage(max_outage_ms=tgn_max_outage_ms)
log.info("PASS: Traffic stats OK")
except GenieTgnError:
traffic_loss = True
else:
try:
# Verify traffic is restored within timeout if there is a loss
tgn_alias. poll_traffic_until_traffic_resumes(
timeout=tgn_max_outage, delay_check_traffic=delay)
log.info("PASS: Traffic stats OK")
log.info('Starting Global recovery processor')
device = section.parameters['device']
# Step 1 - Do we have connectivity to the device - Try to reconnect
if device.api.verify_connectivity() or _disconnect_reconnect(device):
# All good!
log.info("Success - Device '{}' is still connected - "
"No need to recover the device".format(device.name))
return True
else:
# Not good! Lets attempt recovery
log.warning("Device '{}' is unreachable - attempting recovery".\
format(device.name))
# Start Recovery Processor
log.info(banner('Recovery Processor'))
log.info('''\
Recovery Steps:
1. Attempt to connect to the device - Failed
2. Disconnect and reconnect from the device - Failed
3. Clear line if provided
4. Powercycler the device if provided
5. From rommon, boot the device with golden image TFTP boot or type boot''')
try:
_connectivity(device, console_activity_pattern, break_count, timeout,
golden_image, tftp_boot, recovery_password, clear_line, powercycler,
powercycler_delay, section, reconnect_delay)
except Exception as e:
# Could not recover the device!
log.error(banner("*** Terminating Genie Clean ***"))
section.parent.parameters['block_section'] = True
def connect_traffic_device(section, wait_time=30):
'''Trigger Processor:
* Connects to traffic generator device
'''
# Init
log.info(banner("processor: 'connect_traffic_device'"))
# Find TGN devices
tgn_devices = section.parameters['testbed'].find_devices(type='tgn')
if not tgn_devices:
log.info("SKIP: Traffic generator devices not found in testbed YAML")
return
for dev in tgn_devices:
if dev.name not in section.parent.mapping_data['devices']:
log.info("Traffic generator devices not specified in --devices")
return
# Connect to TGN
try:
dev.connect(via='tgn')
except GenieTgnError as e:
format(destination_stby)) as step:
try:
device.execute("copy {} {}".format(dest_file_path,destination_stby),\
reply=Dialog([proceed]))
except Exception as e:
log.warning("Unable to copy {} to {} on device {} due to:\n{}".\
format(dest_file_path, destination_stby, device.name, e))
with steps.start("Show dir on {} to see if image copied to standby".\
format(destination_stby)) as step:
try:
device.execute("dir {}".format(destination_stby))
except Exception as e:
log.warning("Unable to show dir on {} on device {} due to:\n{}".\
format(destination_stby, device.name, e))
else:
log.warning(banner("Failed to copy to active device"))
log.warning("Unable to copy file to active dir on {} on device {} due to:\n".\
format(destination_act, device.name))
for name, image_data in section.history['copy_to_device'].\
parameters['files_copied'].items():
with steps.start("Verify image '{}' copied to device {}".\
format(image_data['dest_path'], device.name)) as step:
# if size is -1 it means it failed to get the size
if image_data['size'] != -1:
if not device.api.verify_file_exists(
file=image_data['dest_path'],
size=image_data['size'],
dir_output=dir_after):
log.error(banner("*** Terminating Genie Clean ***"))
def check_regexp_uptime(log_output, expect_uptime, pre_time, tolerance=0.5):
'''Get the uptime by given regexp from the routers show logging,
and compare them with the given expected uptime.'''
# create table info for Neighbors
log.info(banner('Calculate Method for "tolerance check" is below:\n'
'|a - b| <= 0.5 * (a + b) * tolerance'))
# create table headers
table = ptable(['log pattern', 'expected time', 'actual time', 'tolerance check', 'result'])
# initial
flag = True
# check feature uptime
# setup the regexp pattern
p = r'.+ +(?P\d+\:\d+\:\d+).\d+.+{}.+'
for item in (expect_uptime or []):
for regexp, expect_up in item.items():
# *Dec 6 11:51:37.043: %OSPF-5-ADJCHG: Process 1, Nbr 10.2.2.2 on GigabitEthernet3 from LOADING to FULL, Loading Done
if not updates and deletes:
log.info('All configs were deleted')
return True
return result
elif operation == 'subscribe':
format = kwargs.get('format', {})
rpc_data['format'] = format
if format.get('request_mode', 'STREAM') == 'ONCE':
response = device.subscribe(rpc_data)
else:
rpc_data['returns'] = returns
rpc_data['verifier'] = rpc_verify.process_operational_state
return device.subscribe(rpc_data)
elif operation == 'capabilities':
if not returns:
log.error(banner('No gNMI data to compare to GET'))
return False
resp = device.capabilities()
result = in_capabilities(resp, returns)
else:
log.warning(banner('OPERATION: {0} not allowed'.format(operation)))
return result