Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
# 'typetg': ['Web Novel'],
# 'genretg': ['Action', 'Adventure', 'Comedy', 'Ecchi', 'Fantasy', 'Romance', 'Seinen'],
# 'licensedtg': ['No'],
# 'altnames': ['Sendai Yuusha wa Inkyoshitai', 'The Previous Hero wants to Retire', '先代勇者は隠居したい'],
# 'authortg': ['Iida K'],
# 'artisttg': ['Shimotsuki Eito'],
# 'title': 'Sendai Yuusha wa Inkyou Shitai',
# 'description': '<p>\n Three years ago, in the land of Reinbulk, a Legendary Hero was summoned in the Kindom of Leezalion and he succeeded in repelling the Demon King. Now, five students are summoned back into Reinbulk by the Kingdom of Luxeria to fight against the Demon King and the demon army. Unlike the other heroes, Yashiro Yuu has no magical affinity and the Luxeria Kingdom has no intention on acknowledging his existence or returning him to his world.\n </p>\n <p>\n However, Yuu is actually the previous Hero that had fought the Demon King. Moreover, he is perplexed at the situation since he knows the Demon King has not returned since he sealed him. If the seal was ever broken then he would be automatically summoned instead of normal summoned. Since he already saved the world once and the Demon King hasn’t been unsealed, Yuu decides to leave the demons to the new heroes and retire from the Hero business. So he decides to become an adventurer.\n </p>',
# 'tagstg': ['Elves', 'Heroes', 'Magic', 'Monsters', 'Multiple Narrators', 'Protagonist Strong from the Start', 'Strong Male Lead', 'Sword and Sorcery', 'Transported to Another World'],
# 'langtg': ['Japanese'],
# 'yeartg': ['2013']
# 'transcompletetg': ['No'],
# }
data_sets['description'] = bleach.clean(descrtg.prettify(), tags=['a', 'abbr', 'acronym', 'b', 'blockquote', 'code', 'em', 'i', 'li', 'ol', 'strong', 'ul', 'p'], strip=True).strip()
series_message = {
'update_only' : False,
'sourcesite' : "NovelUpdates",
'title' : data_sets['title'],
'alt_titles' : data_sets['altnames'] + [data_sets['title'], ],
'desc' : data_sets['description'],
# 'homepage' : data_sets[''],
'author' : data_sets['authortg'],
'illust' : data_sets['artisttg'],
'pubdate' : data_sets['yeartg'],
'pubnames' : data_sets['orig_pub_tg'] + data_sets['eng_pub_tg'],
# 'sourcesite' : data_sets[''],
'tags' : data_sets['tagstg'],
def task_instance_link(v, c, m, p):
dag_id = bleach.clean(m.dag_id)
task_id = bleach.clean(m.task_id)
url = url_for(
'airflow.task',
dag_id=dag_id,
task_id=task_id,
execution_date=m.execution_date.isoformat())
url_root = url_for(
'airflow.graph',
dag_id=dag_id,
root=task_id,
execution_date=m.execution_date.isoformat())
return Markup(
"""
<span style="white-space: nowrap;">
<a href="{url}">{task_id}</a>
<a title="Filter on this task and upstream" href="{url_root}">
</a></span>
old_rev = l10n.order_by('-created')[1]
diff = get_diff_for(revision.document, old_rev, revision)
elif revision_approved and approved.count() > 1:
old_rev = approved.order_by('-created')[1]
diff = get_diff_for(revision.document, old_rev, revision)
elif revision.document.current_revision is not None:
old_rev = revision.document.current_revision
diff = get_diff_for(revision.document, old_rev, revision)
return {
'document_title': revision.document.title,
'creator': revision.creator,
'host': Site.objects.get_current().domain,
'diff': diff,
'summary': clean(revision.summary, ALLOWED_TAGS, ALLOWED_ATTRIBUTES),
'fulltext': clean(revision.content, ALLOWED_TAGS, ALLOWED_ATTRIBUTES),
}
tags_provided = ('clean' in markup_kwargs
and 'extra_tags' in markup_kwargs['clean'])
if tags_provided:
tags += markup_kwargs['clean']['extra_tags']
attrs_provided = ('clean' in markup_kwargs
and 'extra_attrs' in markup_kwargs['clean'])
if attrs_provided:
attrs.update(markup_kwargs['clean']['extra_attrs'])
styles_provided = ('clean' in markup_kwargs
and 'extra_styles' in markup_kwargs['clean'])
if styles_provided:
styles += markup_kwargs['clean']['extra_styles']
html = bleach.clean(markdown.markdown(text, **markup_kwargs),
tags=tags, attributes=attrs, styles=styles)
elif markup_filter_name == 'restructuredtext':
from docutils import core
if 'settings_overrides' not in markup_kwargs:
markup_kwargs.update(
settings_overrides=getattr(
settings,
"RESTRUCTUREDTEXT_FILTER_SETTINGS",
{},
)
)
if 'writer_name' not in markup_kwargs:
markup_kwargs.update(writer_name='html4css1')
parts = core.publish_parts(source=text, **markup_kwargs)
def sanitize_html(htmlSource, allow_data_urls=False):
kwd = dict(tags=_acceptable_elements, attributes=_acceptable_attributes, strip=True)
if allow_data_urls:
kwd["protocols"] = bleach.ALLOWED_PROTOCOLS + ["data"]
return bleach.clean(htmlSource, **kwd)
def handle_add_comment(request, task):
if not request.POST.get("add_comment"):
return
Comment.objects.create(
author=request.user, task=task, body=bleach.clean(request.POST["comment-body"], strip=True)
)
send_email_to_thread_participants(
task,
request.POST["comment-body"],
request.user,
subject='New comment posted on task "{}"'.format(task.title),
)
messages.success(request, "Comment posted. Notification email sent to thread participants.")
is_open = bleach.clean(is_open);
app.logger.info(is_open)
is_closed = get_filter_value(filters_map=filters_map, filter_name='is_closed', is_boolean=True)
is_closed = bleach.clean(is_closed);
app.logger.info(is_closed)
due_soon = get_filter_value(filters_map=filters_map, filter_name='due_soon', is_boolean=True)
due_soon = bleach.clean(due_soon);
app.logger.info(due_soon)
overdue = get_filter_value(filters_map=filters_map, filter_name='overdue', is_boolean=True)
overdue = bleach.clean(overdue);
app.logger.info(overdue)
mine_as_poc = get_filter_value(filters_map=filters_map, filter_name='mine_as_poc', is_boolean=True)
mine_as_poc = bleach.clean(mine_as_poc);
app.logger.info(mine_as_poc)
mine_as_helper = get_filter_value(filters_map=filters_map, filter_name='mine_as_helper', is_boolean=True)
mine_as_helper = bleach.clean(mine_as_helper);
app.logger.info(mine_as_helper)
sort_column = get_filter_value(filters_map, 'sort_column') or 'id'
sort_column = bleach.clean(sort_column);
app.logger.info(sort_column)
sort_direction = get_filter_value(filters_map, 'sort_direction') or 'asc'
sort_direction = bleach.clean(sort_direction);
# sort_direction = str(utils.escape(sort_direction))
#sort_direction = clean_html(sort_direction)
app.logger.info(sort_direction)
search_term = get_filter_value(filters_map, 'search_term')
search_term = bleach.clean(search_term);
app.logger.info(search_term)
min_due_date = get_filter_value(filters_map, 'min_due_date')
min_due_date = bleach.clean(min_due_date);
app.logger.info(min_due_date)
max_due_date = get_filter_value(filters_map, 'max_due_date')
def clean(stream):
return bleach.clean(stream, strip=True, strip_comments=False,
tags=ALLOWED_TAGS, attributes=ALLOWED_ATTRS)
def task_instance_link(attr):
dag_id = bleach.clean(attr.get('dag_id')) if attr.get('dag_id') else None
task_id = bleach.clean(attr.get('task_id')) if attr.get('task_id') else None
execution_date = attr.get('execution_date')
url = url_for(
'Airflow.task',
dag_id=dag_id,
task_id=task_id,
execution_date=execution_date.isoformat())
url_root = url_for(
'Airflow.graph',
dag_id=dag_id,
root=task_id,
execution_date=execution_date.isoformat())
return Markup(
"""
<span style="white-space: nowrap;">
<a href="{url}">{task_id}</a></span>