Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
# Create the output file
filename = '{0}-{1}.json.gz'.format(port, protocol)
counter = 0
with helpers.open_file(filename, 'w') as fout:
click.echo('Saving results to file: {0}'.format(filename))
# Start listening for results
done = False
# Keep listening for results until the scan is done
click.echo('Waiting for data, please stand by...')
while not done:
try:
for banner in api.stream.ports([port], timeout=90):
counter += 1
helpers.write_banner(fout, banner)
if not quiet:
click.echo('{0:<40} {1:<20} {2}'.format(
click.style(helpers.get_ip(banner), fg=COLORIZE_FIELDS['ip_str']),
click.style(str(banner['port']), fg=COLORIZE_FIELDS['port']),
';'.join(banner['hostnames']))
)
except shodan.APIError:
# We stop waiting for results if the scan has been processed by the crawlers and
# there haven't been new results in a while
if done:
break
scan = api.scan_status(scan['id'])
if scan['status'] == 'DONE':
done = True
try:
info = api.dns.domain_info(domain, history=history, type=type)
except shodan.APIError as e:
raise click.ClickException(e.value)
# Grab the host information for any IP records that were returned
hosts = {}
if details:
ips = [record['value'] for record in info['data'] if record['type'] in ['A', 'AAAA']]
ips = set(ips)
fout = None
if save:
filename = u'{}-hosts.json.gz'.format(domain)
fout = helpers.open_file(filename)
for ip in ips:
try:
hosts[ip] = api.host(ip)
# Store the banners if requested
if fout:
for banner in hosts[ip]['data']:
if 'placeholder' not in banner:
helpers.write_banner(fout, banner)
except shodan.APIError:
pass # Ignore any API lookup errors as this isn't critical information
# Save the DNS data
if save:
filename = u'{}.json.gz'.format(domain)
# Submit the request to Shodan
click.echo('Submitting Internet scan to Shodan...', nl=False)
scan = api.scan_internet(port, protocol)
click.echo('Done')
# If the requested port is part of the regular Shodan crawling, then
# we don't know when the scan is done so lets return immediately and
# let the user decide when to stop waiting for further results.
official_ports = api.ports()
if port in official_ports:
click.echo('The requested port is already indexed by Shodan. A new scan for the port has been launched, please subscribe to the real-time stream for results.')
else:
# Create the output file
filename = '{0}-{1}.json.gz'.format(port, protocol)
counter = 0
with helpers.open_file(filename, 'w') as fout:
click.echo('Saving results to file: {0}'.format(filename))
# Start listening for results
done = False
# Keep listening for results until the scan is done
click.echo('Waiting for data, please stand by...')
while not done:
try:
for banner in api.stream.ports([port], timeout=90):
counter += 1
helpers.write_banner(fout, banner)
if not quiet:
click.echo('{0:<40} {1:<20} {2}'.format(
click.style(helpers.get_ip(banner), fg=COLORIZE_FIELDS['ip_str']),
if len(fields) == 0:
raise click.ClickException('Please define at least one property to show')
has_filters = len(filters) > 0
# Setup the output file handle
fout = None
if filename:
# If no filters were provided raise an error since it doesn't make much sense w/out them
if not has_filters:
raise click.ClickException('Output file specified without any filters. Need to use filters with this option.')
# Add the appropriate extension if it's not there atm
if not filename.endswith('.json.gz'):
filename += '.json.gz'
fout = helpers.open_file(filename)
for banner in helpers.iterate_files(filenames):
row = u''
# Validate the banner against any provided filters
if has_filters and not match_filters(banner, filters):
continue
# Append the data
if fout:
helpers.write_banner(fout, banner)
# Loop over all the fields and print the banner as a row
for i, field in enumerate(fields):
tmp = u''
value = get_banner_field(banner, field)
def data_list(dataset):
"""List available datasets or the files within those datasets."""
# Setup the API connection
key = get_api_key()
api = shodan.Shodan(key)
if dataset:
# Show the files within this dataset
files = api.data.list_files(dataset)
for file in files:
click.echo(click.style(u'{:20s}'.format(file['name']), fg='cyan'), nl=False)
click.echo(click.style('{:10s}'.format(helpers.humanize_bytes(file['size'])), fg='yellow'), nl=False)
# Show the SHA1 checksum if available
if file.get('sha1'):
click.echo(click.style('{:42s}'.format(file['sha1']), fg='green'), nl=False)
click.echo('{}'.format(file['url']))
else:
# If no dataset was provided then show a list of all datasets
datasets = api.data.list_datasets()
for ds in datasets:
click.echo(click.style('{:15s}'.format(ds['name']), fg='cyan'), nl=False)
click.echo('{}'.format(ds['description']))