Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def _ask_if_quit(sender_id: Text, endpoint: EndpointConfig) -> bool:
"""Display the exit menu.
Return `True` if the previous question should be retried."""
answer = questionary.select(
message="Do you want to stop?",
choices=[Choice("Continue", "continue"),
Choice("Undo Last", "undo"),
Choice("Fork", "fork"),
Choice("Start Fresh", "restart"),
Choice("Export & Quit", "quit")]).ask()
if not answer or answer == "quit":
# this is also the default answer if the user presses Ctrl-C
_write_data_to_file(sender_id, endpoint)
sys.exit()
elif answer == "continue":
# in this case we will just return, and the original
# question will get asked again
return True
elif answer == "undo":
raise UndoLastStep()
elif answer == "fork":
raise ForkTracker()
def _ask_if_quit(sender_id: Text, endpoint: EndpointConfig) -> bool:
"""Display the exit menu.
Return `True` if the previous question should be retried."""
answer = questionary.select(
message="Do you want to stop?",
choices=[Choice("Continue", "continue"),
Choice("Undo Last", "undo"),
Choice("Fork", "fork"),
Choice("Start Fresh", "restart"),
Choice("Export & Quit", "quit")]).ask()
if not answer or answer == "quit":
# this is also the default answer if the user presses Ctrl-C
_write_data_to_file(sender_id, endpoint)
sys.exit()
elif answer == "continue":
# in this case we will just return, and the original
# question will get asked again
return True
elif answer == "undo":
raise UndoLastStep()
elif answer == "fork":
def _ask_if_quit(sender_id: Text, endpoint: EndpointConfig) -> bool:
"""Display the exit menu.
Return `True` if the previous question should be retried."""
answer = questionary.select(
message="Do you want to stop?",
choices=[Choice("Continue", "continue"),
Choice("Undo Last", "undo"),
Choice("Fork", "fork"),
Choice("Start Fresh", "restart"),
Choice("Export & Quit", "quit")]).ask()
if not answer or answer == "quit":
# this is also the default answer if the user presses Ctrl-C
story_path, nlu_path, domain_path = _request_export_info()
tracker = retrieve_tracker(endpoint, sender_id)
evts = tracker.get("events", [])
_write_stories_to_file(story_path, evts)
_write_nlu_to_file(nlu_path, evts)
_write_domain_to_file(domain_path, evts, endpoint)
logger.info("Successfully wrote stories and NLU data")
sys.exit()
elif answer == "continue":
def _ask_if_quit(sender_id: Text, endpoint: EndpointConfig) -> bool:
"""Display the exit menu.
Return `True` if the previous question should be retried."""
answer = questionary.select(
message="Do you want to stop?",
choices=[Choice("Continue", "continue"),
Choice("Undo Last", "undo"),
Choice("Fork", "fork"),
Choice("Start Fresh", "restart"),
Choice("Export & Quit", "quit")]).ask()
if not answer or answer == "quit":
# this is also the default answer if the user presses Ctrl-C
_write_data_to_file(sender_id, endpoint)
sys.exit()
elif answer == "continue":
# in this case we will just return, and the original
# question will get asked again
return True
elif answer == "undo":
raise UndoLastStep()
elif answer == "fork":
raise ForkTracker()
elif answer == "restart":
This method provides various options for exploration of the different models.
- ``See-Confusion-Matrix`` shows the confusion matrix on the test dataset.
- ``See-Examples-of-Classification`` is to explore correct and mis-classifications. You can provide two class numbers as in, ``2 3`` and it shows examples in the test dataset where text that belong to class ``2`` is classified as class ``3``.
- ``See-prf-table`` shows the precision recall and fmeasure per class.
- ``See-text`` - Manually enter text and look at the classification results.
"""
self.infer_obj.run_test()
while True:
choices = [
Choice("See-Confusion-Matrix"),
Choice("See-examples-of-Classifications"),
Choice("See-prf-table"),
Choice(title="Enter text ", value="enter_text"),
Choice(
title="If this is ScienceIE chose this to generate results",
value="science-ie-official-results",
),
Choice("exit"),
]
interaction_choice = questionary.rawselect(
"What would you like to do now", qmark="❓", choices=choices
).ask()
if interaction_choice == "See-Confusion-Matrix":
self.infer_obj.print_confusion_matrix()
elif interaction_choice == "See-examples-of-Classifications":
misclassification_choice = questionary.text(
"Enter Two Classes separated by a space. [Hint: 1 2]"
def ask_deletion() -> str:
""" Since this is deletion, we want confirmation, just to be sure
whether to keep the deleted folder locally or to remove it
Returns
-------
str
An yes or no answer to the question
"""
deletion_question = questionary.rawselect(
"Do you also want to delete the file locally. Caution! File will be removed locally",
qmark="❓",
choices=[Choice("yes"), Choice("no")],
)
deletion_answer = deletion_question.ask()
return deletion_answer
def ask_pystyle(**kwargs):
# create the question object
question = questionary.select(
"What do you want to do?",
qmark="😃",
choices=[
"Order a pizza",
"Make a reservation",
Separator(),
"Ask for opening hours",
Choice("Contact support", disabled="Unavailable at this time"),
"Talk to the receptionist",
],
style=custom_style_dope,
**kwargs,
)
# prompt the user for an answer
return question.ask()
def ask_generate_report_or_interact():
""" Ask the user to chose between interacting with the model or generate the report
for all the experiments
Returns
-------
str
choce of the user as a string
"""
choices = [
Choice("Interact with model", "interact"),
Choice("Generate report (for all experiments)", "gen-report"),
]
generate_report_or_interact = questionary.rawselect(
"What would you like to do ", qmark="❓", choices=choices
)
return generate_report_or_interact.ask()
def return_model_type_choices(self) -> List[Choice]:
""" Returns a list of different model types availabel to the user
Returns
-------
List[Choice]
A list of different model types available to the user
"""
choices = []
for model_type in self.trained_model_types:
choices.append(Choice(model_type))
return choices