Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
help="output grepable report", default=False)
parser.add_option(
"--update-db", "--udb", action="store_true",
help="force update of remote db files", default=False)
parser.add_option(
"--timeout", type="float", help="maximum timeout for scrape requests", default=10)
(options, _args) = parser.parse_args(sys.argv)
options = vars(options)
if options.get('urls') is None and options.get('urls_file') is None and not options.get('update_db'):
print("No URL(s) given!")
parser.print_help()
exit()
wt = WebTech(options)
wt.start()
parser.add_option(
"--json", "--oj", action="store_true",
help="output json-encoded report", default=False)
parser.add_option(
"--grep", "--og", action="store_true",
help="output grepable report", default=False)
parser.add_option(
"--update-db", "--udb", action="store_true",
help="force update of remote db files", default=False)
parser.add_option(
"--timeout", type="float", help="maximum timeout for scrape requests", default=10)
(options, _args) = parser.parse_args(sys.argv)
options = vars(options)
wt = WebTech(options)
if options.get('scrape'):
"""
Bad style of making arguments mutually exclusive.
Use Argparse's mutually exclusive groups.
"""
wt.scraping()
else:
if options.get('urls') is None and options.get('urls_file') is None and not options.get('update_db'):
print("No URL(s) given!")
parser.print_help()
exit()
wt.start()
if __name__ == "__main__":