Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_entities():
example = '&\n\n&'
assert fix_text(example) == '&\n\n&'
assert fix_text_segment(example) == '&\n\n&'
assert fix_text(example, fix_entities=True) == '&\n\n&'
assert fix_text_segment(example, fix_entities=True) == '&\n\n&'
assert fix_text(example, fix_entities=False) == '&\n\n&'
assert fix_text_segment(example, fix_entities=False) == '&\n\n&'
assert fix_text_segment('<>', fix_entities=False) == '<>'
assert fix_text_segment('<>', fix_entities=True) == '<>'
assert fix_text_segment('<>') == '<>'
assert fix_text_segment('jednocześnie') == 'jednocześnie'
assert fix_text_segment('JEDNOCZEŚNIE') == 'JEDNOCZEŚNIE'
assert fix_text_segment('ellipsis…', normalization='NFKC') == 'ellipsis...'
assert fix_text_segment('ellipsis…', normalization='NFKC') == 'ellipsis...'
assert fix_text_segment('broken') == 'broken\x81'
assert unescape_html('euro €') == 'euro €'
assert unescape_html('not an entity x6;') == 'not an entity x6;'
assert fix_text(example) == '&\n\n&'
assert fix_text_segment(example) == '&\n\n&'
assert fix_text(example, fix_entities=True) == '&\n\n&'
assert fix_text_segment(example, fix_entities=True) == '&\n\n&'
assert fix_text(example, fix_entities=False) == '&\n\n&'
assert fix_text_segment(example, fix_entities=False) == '&\n\n&'
assert fix_text_segment('<>', fix_entities=False) == '<>'
assert fix_text_segment('<>', fix_entities=True) == '<>'
assert fix_text_segment('<>') == '<>'
assert fix_text_segment('jednocześnie') == 'jednocześnie'
assert fix_text_segment('JEDNOCZEŚNIE') == 'JEDNOCZEŚNIE'
assert fix_text_segment('ellipsis…', normalization='NFKC') == 'ellipsis...'
assert fix_text_segment('ellipsis…', normalization='NFKC') == 'ellipsis...'
assert fix_text_segment('broken') == 'broken\x81'
assert unescape_html('euro €') == 'euro €'
assert unescape_html('not an entity x6;') == 'not an entity x6;'
assert fix_text_segment(example) == '&\n\n&'
assert fix_text(example, fix_entities=True) == '&\n\n&'
assert fix_text_segment(example, fix_entities=True) == '&\n\n&'
assert fix_text(example, fix_entities=False) == '&\n\n&'
assert fix_text_segment(example, fix_entities=False) == '&\n\n&'
assert fix_text_segment('<>', fix_entities=False) == '<>'
assert fix_text_segment('<>', fix_entities=True) == '<>'
assert fix_text_segment('<>') == '<>'
assert fix_text_segment('jednocześnie') == 'jednocześnie'
assert fix_text_segment('JEDNOCZEŚNIE') == 'JEDNOCZEŚNIE'
assert fix_text_segment('ellipsis…', normalization='NFKC') == 'ellipsis...'
assert fix_text_segment('ellipsis…', normalization='NFKC') == 'ellipsis...'
assert fix_text_segment('broken') == 'broken\x81'
assert unescape_html('euro €') == 'euro €'
assert unescape_html('not an entity x6;') == 'not an entity x6;'
def test_entities():
example = '&\n\n&'
assert fix_text(example) == '&\n\n&'
assert fix_text_segment(example) == '&\n\n&'
assert fix_text(example, fix_entities=True) == '&\n\n&'
assert fix_text_segment(example, fix_entities=True) == '&\n\n&'
assert fix_text(example, fix_entities=False) == '&\n\n&'
assert fix_text_segment(example, fix_entities=False) == '&\n\n&'
assert fix_text_segment('<>', fix_entities=False) == '<>'
assert fix_text_segment('<>', fix_entities=True) == '<>'
assert fix_text_segment('<>') == '<>'
assert fix_text_segment('jednocześnie') == 'jednocześnie'
assert fix_text_segment('JEDNOCZEŚNIE') == 'JEDNOCZEŚNIE'
assert fix_text_segment('ellipsis…', normalization='NFKC') == 'ellipsis...'
assert fix_text_segment('ellipsis…', normalization='NFKC') == 'ellipsis...'
assert fix_text_segment('broken') == 'broken\x81'
assert unescape_html('euro €') == 'euro €'
assert unescape_html('not an entity x6;') == 'not an entity x6;'
def test_entities():
example = '&\n\n&'
assert fix_text(example) == '&\n\n&'
assert fix_text_segment(example) == '&\n\n&'
assert fix_text(example, fix_entities=True) == '&\n\n&'
assert fix_text_segment(example, fix_entities=True) == '&\n\n&'
assert fix_text(example, fix_entities=False) == '&\n\n&'
assert fix_text_segment(example, fix_entities=False) == '&\n\n&'
assert fix_text_segment('<>', fix_entities=False) == '<>'
assert fix_text_segment('<>', fix_entities=True) == '<>'
assert fix_text_segment('<>') == '<>'
assert fix_text_segment('jednocześnie') == 'jednocześnie'
assert fix_text_segment('JEDNOCZEŚNIE') == 'JEDNOCZEŚNIE'
assert fix_text_segment('ellipsis…', normalization='NFKC') == 'ellipsis...'
assert fix_text_segment('ellipsis…', normalization='NFKC') == 'ellipsis...'
assert fix_text_segment('broken') == 'broken\x81'
assert unescape_html('euro €') == 'euro €'
def test_entities():
example = '&\n\n&'
assert fix_text(example) == '&\n\n&'
assert fix_text_segment(example) == '&\n\n&'
assert fix_text(example, fix_entities=True) == '&\n\n&'
assert fix_text_segment(example, fix_entities=True) == '&\n\n&'
assert fix_text(example, fix_entities=False) == '&\n\n&'
assert fix_text_segment(example, fix_entities=False) == '&\n\n&'
assert fix_text_segment('<>', fix_entities=False) == '<>'
assert fix_text_segment('<>', fix_entities=True) == '<>'
assert fix_text_segment('<>') == '<>'
assert fix_text_segment('jednocześnie') == 'jednocześnie'
assert fix_text_segment('JEDNOCZEŚNIE') == 'JEDNOCZEŚNIE'
assert fix_text_segment('ellipsis…', normalization='NFKC') == 'ellipsis...'
assert fix_text_segment('ellipsis…', normalization='NFKC') == 'ellipsis...'
assert fix_text_segment('broken') == 'broken\x81'
assert unescape_html('euro €') == 'euro €'
assert unescape_html('not an entity x6;') == 'not an entity x6;'
def test_entities():
example = '&\n\n&'
assert fix_text(example) == '&\n\n&'
assert fix_text_segment(example) == '&\n\n&'
assert fix_text(example, fix_entities=True) == '&\n\n&'
assert fix_text_segment(example, fix_entities=True) == '&\n\n&'
assert fix_text(example, fix_entities=False) == '&\n\n&'
assert fix_text_segment(example, fix_entities=False) == '&\n\n&'
assert fix_text_segment('<>', fix_entities=False) == '<>'
assert fix_text_segment('<>', fix_entities=True) == '<>'
assert fix_text_segment('<>') == '<>'
assert fix_text_segment('jednocześnie') == 'jednocześnie'
assert fix_text_segment('JEDNOCZEŚNIE') == 'JEDNOCZEŚNIE'
assert fix_text_segment('ellipsis…', normalization='NFKC') == 'ellipsis...'
assert fix_text_segment('ellipsis…', normalization='NFKC') == 'ellipsis...'
assert fix_text_segment('broken') == 'broken\x81'
assert unescape_html('euro €') == 'euro €'
assert unescape_html('not an entity x6;') == 'not an entity x6;'
example = '&\n\n&'
assert fix_text(example) == '&\n\n&'
assert fix_text_segment(example) == '&\n\n&'
assert fix_text(example, fix_entities=True) == '&\n\n&'
assert fix_text_segment(example, fix_entities=True) == '&\n\n&'
assert fix_text(example, fix_entities=False) == '&\n\n&'
assert fix_text_segment(example, fix_entities=False) == '&\n\n&'
assert fix_text_segment('<>', fix_entities=False) == '<>'
assert fix_text_segment('<>', fix_entities=True) == '<>'
assert fix_text_segment('<>') == '<>'
assert fix_text_segment('jednocześnie') == 'jednocześnie'
assert fix_text_segment('JEDNOCZEŚNIE') == 'JEDNOCZEŚNIE'
assert fix_text_segment('ellipsis…', normalization='NFKC') == 'ellipsis...'
assert fix_text_segment('ellipsis…', normalization='NFKC') == 'ellipsis...'
assert fix_text_segment('broken') == 'broken\x81'
assert unescape_html('euro €') == 'euro €'
assert unescape_html('not an entity x6;') == 'not an entity x6;'
def test_entities():
example = '&\n\n&'
assert fix_text(example) == '&\n\n&'
assert fix_text_segment(example) == '&\n\n&'
assert fix_text(example, fix_entities=True) == '&\n\n&'
assert fix_text_segment(example, fix_entities=True) == '&\n\n&'
assert fix_text(example, fix_entities=False) == '&\n\n&'
assert fix_text_segment(example, fix_entities=False) == '&\n\n&'
assert fix_text_segment('<>', fix_entities=False) == '<>'
assert fix_text_segment('<>', fix_entities=True) == '<>'
assert fix_text_segment('<>') == '<>'
assert fix_text_segment('jednocześnie') == 'jednocześnie'
assert fix_text_segment('JEDNOCZEŚNIE') == 'JEDNOCZEŚNIE'
assert fix_text_segment('ellipsis…', normalization='NFKC') == 'ellipsis...'
assert fix_text_segment('ellipsis…', normalization='NFKC') == 'ellipsis...'
assert fix_text_segment('broken') == 'broken\x81'
assert unescape_html('euro €') == 'euro €'
assert unescape_html('not an entity x6;') == 'not an entity x6;'