Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_poutput_ansi_never(outsim_app):
msg = 'Hello World'
ansi.allow_style = ansi.STYLE_NEVER
colored_msg = ansi.style(msg, fg='cyan')
outsim_app.poutput(colored_msg)
out = outsim_app.stdout.getvalue()
expected = msg + '\n'
assert colored_msg != msg
assert out == expected
# These are invalid names for aliases and macros
invalid_command_name = [
'""', # Blank name
constants.COMMENT_CHAR,
'!no_shortcut',
'">"',
'"no>pe"',
'"no spaces"',
'"nopipe|"',
'"noterm;"',
'noembedded"quotes',
]
def test_get_alias_completion_items(base_app):
run_cmd(base_app, 'alias create fake run_pyscript')
run_cmd(base_app, 'alias create ls !ls -hal')
results = base_app._get_alias_completion_items()
assert len(results) == len(base_app.aliases)
if self._custom_error_message:
message = self._custom_error_message
self._custom_error_message = ''
lines = message.split('\n')
linum = 0
formatted_message = ''
for line in lines:
if linum == 0:
formatted_message = 'Error: ' + line
else:
formatted_message += '\n ' + line
linum += 1
self.print_usage(sys.stderr)
self.exit(2, constants.ERROR_COLOR + '{}\n\n'.format(formatted_message) + constants.RESET_COLOR)
def tokenize(self, line: str) -> List[str]:
"""
Lex a string into a list of tokens. Shortcuts and aliases are expanded and comments are removed
:param line: the command line being lexed
:return: A list of tokens
:raises ValueError if there are unclosed quotation marks.
"""
# expand shortcuts and aliases
line = self._expand(line)
# check if this line is a comment
if line.lstrip().startswith(constants.COMMENT_CHAR):
return []
# split on whitespace
tokens = shlex_split(line)
# custom lexing
tokens = self.split_on_punctuation(tokens)
return tokens
def split_on_punctuation(self, tokens: List[str]) -> List[str]:
"""Further splits tokens from a command line using punctuation characters
Punctuation characters are treated as word breaks when they are in
unquoted strings. Each run of punctuation characters is treated as a
single token.
:param tokens: the tokens as parsed by shlex
:return: the punctuated tokens
"""
punctuation = []
punctuation.extend(self.terminators)
punctuation.extend(constants.REDIRECTION_CHARS)
punctuated_tokens = []
for cur_initial_token in tokens:
# Save tokens up to 1 character in length or quoted tokens. No need to parse these.
if len(cur_initial_token) <= 1 or cur_initial_token[0] in constants.QUOTES:
punctuated_tokens.append(cur_initial_token)
continue
# Iterate over each character in this token
cur_index = 0
cur_char = cur_initial_token[cur_index]
# Keep track of the token we are building
new_token = ''
"""
Tokenize the input and parse it into a Statement object, stripping
comments, expanding aliases and shortcuts, and extracting output
redirection directives.
:param line: the command line being parsed
:return: the created Statement
:raises ValueError if there are unclosed quotation marks
"""
# handle the special case/hardcoded terminator of a blank line
# we have to do this before we tokenize because tokenizing
# destroys all unquoted whitespace in the input
terminator = ''
if line[-1:] == constants.LINE_FEED:
terminator = constants.LINE_FEED
command = ''
args = ''
arg_list = []
# lex the input into a list of tokens
tokens = self.tokenize(line)
# of the valid terminators, find the first one to occur in the input
terminator_pos = len(tokens) + 1
for pos, cur_token in enumerate(tokens):
for test_terminator in self.terminators:
if cur_token.startswith(test_terminator):
terminator_pos = pos
terminator = test_terminator
# break the inner loop, and we want to break the
def parse(self, line: str) -> Statement:
"""
Tokenize the input and parse it into a Statement object, stripping
comments, expanding aliases and shortcuts, and extracting output
redirection directives.
:param line: the command line being parsed
:return: the created Statement
:raises ValueError if there are unclosed quotation marks
"""
# handle the special case/hardcoded terminator of a blank line
# we have to do this before we tokenize because tokenizing
# destroys all unquoted whitespace in the input
terminator = ''
if line[-1:] == constants.LINE_FEED:
terminator = constants.LINE_FEED
command = ''
args = ''
arg_list = []
# lex the input into a list of tokens
tokens = self.tokenize(line)
# of the valid terminators, find the first one to occur in the input
terminator_pos = len(tokens) + 1
for pos, cur_token in enumerate(tokens):
for test_terminator in self.terminators:
if cur_token.startswith(test_terminator):
terminator_pos = pos
terminator = test_terminator
unquoted strings. Each run of punctuation characters is treated as a
single token.
:param tokens: the tokens as parsed by shlex
:return: the punctuated tokens
"""
punctuation = []
punctuation.extend(self.terminators)
punctuation.extend(constants.REDIRECTION_CHARS)
punctuated_tokens = []
for cur_initial_token in tokens:
# Save tokens up to 1 character in length or quoted tokens. No need to parse these.
if len(cur_initial_token) <= 1 or cur_initial_token[0] in constants.QUOTES:
punctuated_tokens.append(cur_initial_token)
continue
# Iterate over each character in this token
cur_index = 0
cur_char = cur_initial_token[cur_index]
# Keep track of the token we are building
new_token = ''
while True:
if cur_char not in punctuation:
# Keep appending to new_token until we hit a punctuation char
while cur_char not in punctuation:
new_token += cur_char
args = testargs
arg_list = tokens[1:]
tokens = []
pipe_to = ''
output = ''
output_to = ''
# Find which redirector character appears first in the command
try:
pipe_index = tokens.index(constants.REDIRECTION_PIPE)
except ValueError:
pipe_index = len(tokens)
try:
redir_index = tokens.index(constants.REDIRECTION_OUTPUT)
except ValueError:
redir_index = len(tokens)
try:
append_index = tokens.index(constants.REDIRECTION_APPEND)
except ValueError:
append_index = len(tokens)
# Check if output should be piped to a shell command
if pipe_index < redir_index and pipe_index < append_index:
# Get the tokens for the pipe command and expand ~ where needed
pipe_to_tokens = tokens[pipe_index + 1:]
utils.expand_user_in_tokens(pipe_to_tokens)
# Build the pipe command line string
pipe_to_tokens = tokens[pipe_index + 1:]
utils.expand_user_in_tokens(pipe_to_tokens)
# Build the pipe command line string
pipe_to = ' '.join(pipe_to_tokens)
# remove all the tokens after the pipe
tokens = tokens[:pipe_index]
# Check for output redirect/append
elif redir_index != append_index:
if redir_index < append_index:
output = constants.REDIRECTION_OUTPUT
output_index = redir_index
else:
output = constants.REDIRECTION_APPEND
output_index = append_index
# Check if we are redirecting to a file
if len(tokens) > output_index + 1:
unquoted_path = utils.strip_quotes(tokens[output_index + 1])
if unquoted_path:
output_to = utils.expand_user(tokens[output_index + 1])
# remove all the tokens after the output redirect
tokens = tokens[:output_index]
if terminator:
# whatever is left is the suffix
suffix = ' '.join(tokens)
else:
# no terminator, so whatever is left is the command and the args