Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_children(self):
"""test Heading.__children__()"""
node = Heading(wrap([Text("foo"), Text("bar")]), 3)
gen = node.__children__()
self.assertEqual(node.title, next(gen))
self.assertRaises(StopIteration, next, gen)
def test_title(self):
"""test getter/setter for the title attribute"""
title = wraptext("foobar")
node = Heading(title, 3)
self.assertIs(title, node.title)
node.title = "héhehé"
self.assertWikicodeEqual(wraptext("héhehé"), node.title)
def test_heading(self):
"""tests for building Heading nodes"""
tests = [
([tokens.HeadingStart(level=2), tokens.Text(text="foobar"),
tokens.HeadingEnd()],
wrap([Heading(wraptext("foobar"), 2)])),
([tokens.HeadingStart(level=4), tokens.Text(text="spam"),
tokens.Text(text="eggs"), tokens.HeadingEnd()],
wrap([Heading(wraptext("spam", "eggs"), 4)])),
]
for test, valid in tests:
self.assertWikicodeEqual(valid, self.builder.build(test))
def test_filter_family(self):
"""test the Wikicode.i?filter() family of functions"""
def genlist(gen):
self.assertIsInstance(gen, GeneratorType)
return list(gen)
ifilter = lambda code: (lambda *a, **k: genlist(code.ifilter(*a, **k)))
code = parse("a{{b}}c[[d]]{{{e}}}{{f}}[[g]]")
for func in (code.filter, ifilter(code)):
self.assertEqual(["a", "{{b}}", "b", "c", "[[d]]", "d", "{{{e}}}",
"e", "{{f}}", "f", "[[g]]", "g"], func())
self.assertEqual(["{{{e}}}"], func(forcetype=Argument))
self.assertIs(code.get(4), func(forcetype=Argument)[0])
self.assertEqual(list("abcdefg"), func(forcetype=Text))
self.assertEqual([], func(forcetype=Heading))
self.assertRaises(TypeError, func, forcetype=True)
funcs = [
lambda name, **kw: getattr(code, "filter_" + name)(**kw),
lambda name, **kw: genlist(getattr(code, "ifilter_" + name)(**kw))
]
for get_filter in funcs:
self.assertEqual(["{{{e}}}"], get_filter("arguments"))
self.assertIs(code.get(4), get_filter("arguments")[0])
self.assertEqual([], get_filter("comments"))
self.assertEqual([], get_filter("external_links"))
self.assertEqual([], get_filter("headings"))
self.assertEqual([], get_filter("html_entities"))
self.assertEqual([], get_filter("tags"))
self.assertEqual(["{{b}}", "{{f}}"], get_filter("templates"))
self.assertEqual(list("abcdefg"), get_filter("text"))
def test_unicode(self):
"""test Heading.__unicode__()"""
node = Heading(wraptext("foobar"), 2)
self.assertEqual("==foobar==", str(node))
node2 = Heading(wraptext(" zzz "), 5)
self.assertEqual("===== zzz =====", str(node2))
def test_heading(self):
"""tests for building Heading nodes"""
tests = [
([tokens.HeadingStart(level=2), tokens.Text(text="foobar"),
tokens.HeadingEnd()],
wrap([Heading(wraptext("foobar"), 2)])),
([tokens.HeadingStart(level=4), tokens.Text(text="spam"),
tokens.Text(text="eggs"), tokens.HeadingEnd()],
wrap([Heading(wraptext("spam", "eggs"), 4)])),
]
for test, valid in tests:
self.assertWikicodeEqual(valid, self.builder.build(test))
| 1
= bar
| 2
= {{
baz
}}
| spam
= eggs
}}
"""
marker = object() # Random object we can find with certainty in a list
return "\n".join(self._get_tree(self, [], marker, 0))
Wikicode._build_filter_methods(
arguments=Argument, comments=Comment, external_links=ExternalLink,
headings=Heading, html_entities=HTMLEntity, tags=Tag, templates=Template,
text=Text, wikilinks=Wikilink)
)
"""
A map of mwparserfromhell.wikicode. to lists of nodes of
that type.
"""
self.content = execute_method(
"strip_code", self.wikicode,
name=self._name + ".content"
)
"""
The viewable content (no markup or templates) of the revision.
"""
self.headings = get_key(
mwparserfromhell.nodes.Heading, self.node_class_map,
default=[],
name=self._name + ".headings"
)
"""
A list of :class:`mwparserfromhell.nodes.heading.Heading`'s
"""
self.heading_titles = mappers.map(
_extract_heading_title, self.headings,
name=self._name + ".heading_titles"
)
"""
A list of heading titles
"""
self.external_links = get_key(
def _handle_heading(self, token):
"""Handle a case where a heading is at the head of the tokens."""
level = token.level
self._push()
while self._tokens:
token = self._tokens.pop()
if isinstance(token, tokens.HeadingEnd):
title = self._pop()
return Heading(title, level)
else:
self._write(self._handle_token(token))
raise ParserError("_handle_heading() missed a close token")
heading) will be included in the list; ``False`` will not include it;
the default will include it only if no specific *levels* were given. If
*include_headings* is ``True``, the section's beginning
:class:`.Heading` object will be included; otherwise, this is skipped.
"""
title_matcher = self._build_matcher(matches, flags)
matcher = lambda heading: (title_matcher(heading.title) and
(not levels or heading.level in levels))
iheadings = self._indexed_ifilter(recursive=False, forcetype=Heading)
sections = [] # Tuples of (index_of_first_node, section)
open_headings = [] # Tuples of (index, heading), where index and
# heading.level are both monotonically increasing
# Add the lead section if appropriate:
if include_lead or not (include_lead is not None or matches or levels):
itr = self._indexed_ifilter(recursive=False, forcetype=Heading)
try:
first = next(itr)[0]
sections.append((0, Wikicode(self.nodes[:first])))
except StopIteration: # No headings in page
sections.append((0, Wikicode(self.nodes[:])))
# Iterate over headings, adding sections to the list as they end:
for i, heading in iheadings:
if flat: # With flat, all sections close at the next heading
newly_closed, open_headings = open_headings, []
else: # Otherwise, figure out which sections have closed, if any
closed_start_index = len(open_headings)
for j, (start, last_heading) in enumerate(open_headings):
if heading.level <= last_heading.level:
closed_start_index = j
break