Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
- reads self.cpos to see what parens should be highlighted
- reads self.buffer to see what came before the passed in line
- sets self.highlighted_paren to (buffer_lineno, tokens_for_that_line)
for buffer line that should replace that line to unhighlight it,
or None if no paren is currently highlighted
- calls reprint_line with a buffer's line's tokens and the buffer
lineno that has changed if line other than the current line changes
"""
highlighted_paren = None
source = "\n".join(self.buffer + [s])
cursor = len(source) - self.cpos
if self.cpos:
cursor += 1
stack = list()
all_tokens = list(PythonLexer().get_tokens(source))
# Unfortunately, Pygments adds a trailing newline and strings with
# no size, so strip them
while not all_tokens[-1][1]:
all_tokens.pop()
all_tokens[-1] = (all_tokens[-1][0], all_tokens[-1][1].rstrip("\n"))
line = pos = 0
parens = dict(zip("{([", "})]"))
line_tokens = list()
saved_tokens = list()
search_for_paren = True
for (token, value) in split_lines(all_tokens):
pos += len(value)
if token is Token.Text and value == "\n":
line += 1
# Remove trailing newline
line_tokens = list()
def show_source_in_new_window(source, color_scheme=None, highlight=True):
win = gtk.Window()
sw = gtk.ScrolledWindow()
view = gtk.TextView()
buffer = view.get_buffer()
if highlight:
add_tags_to_buffer(color_scheme, buffer)
for (token, value) in PythonLexer().get_tokens(source):
while token not in theme_map:
token = token.parent
iter_ = buffer.get_end_iter()
buffer.insert_with_tags_by_name(iter_, value, theme_map[token])
else:
buffer.insert(buffer.get_end_iter(), source)
sw.add(view)
win.add(sw)
win.show_all()
def _funcname_and_argnum(cls, line):
"""Parse out the current function name and arg from a line of code."""
# each list in stack:
# [full_expr, function_expr, arg_number, opening]
# arg_number may be a string if we've encountered a keyword
# argument so we're done counting
stack = [["", "", 0, ""]]
try:
for (token, value) in PythonLexer().get_tokens(line):
if token is Token.Punctuation:
if value in "([{":
stack.append(["", "", 0, value])
elif value in ")]}":
full, _, _, start = stack.pop()
expr = start + full + value
stack[-1][1] += expr
stack[-1][0] += expr
elif value == ",":
try:
stack[-1][2] += 1
except TypeError:
stack[-1][2] = ""
stack[-1][1] = ""
stack[-1][0] += value
elif value == ":" and stack[-1][3] == "lambda":
def string_to_fmtstr(x):
from pygments import format
from bpython.formatter import BPythonFormatter
from bpython._py3compat import PythonLexer
from bpython.config import Struct, loadini, default_config_path
config = Struct()
loadini(config, default_config_path())
return parse(format(PythonLexer().get_tokens(x), BPythonFormatter(config.color_scheme)))
def parsekeywordpairs(signature):
tokens = PythonLexer().get_tokens(signature)
preamble = True
stack = []
substack = []
parendepth = 0
for token, value in tokens:
if preamble:
if token is Token.Punctuation and value == u"(":
preamble = False
continue
if token is Token.Punctuation:
if value in [u"(", u"{", u"["]:
parendepth += 1
elif value in [u")", u"}", u"]"]:
parendepth -= 1
elif value == ":" and parendepth == -1:
def show_source(self):
try:
source = self.get_source_of_current_name()
except SourceNotFound as e:
self.status_bar.message("%s" % (e,))
else:
if self.config.highlight_show_source:
source = format(
PythonLexer().get_tokens(source), TerminalFormatter()
)
self.pager(source)
def next_token_inside_string(code_string, inside_string):
"""Given a code string s and an initial state inside_string, return
whether the next token will be inside a string or not."""
for token, value in PythonLexer().get_tokens(code_string):
if token is Token.String:
value = value.lstrip("bBrRuU")
if value in ['"""', "'''", '"', "'"]:
if not inside_string:
inside_string = value
elif value == inside_string:
inside_string = False
return inside_string