Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def is_email_blacklisted(address):
"""
Determines if a supplied email address is present in the 'emailblacklist' table.
Parameters:
address: The email address to split out the domain from.
Returns:
Boolean True if present on the blacklist, or False otherwise.
"""
_, domain = address.rsplit("@", 1)
psl = PublicSuffixList()
private_suffix = psl.privatesuffix(domain=domain)
# Check the disposable email address list
if private_suffix in DISPOSABLE_DOMAINS:
return True
# Check the explicitly defined/blacklisted domains.
return d.engine.scalar(
"SELECT EXISTS (SELECT FROM emailblacklist WHERE domain_name = %(domain)s)",
domain=private_suffix,
)
def check_for_public_suffixes(filename):
lines = files[filename]
suffix_detected = False
psl = None
download_suffixes()
with open("public_suffix_list.dat", "r") as latest:
psl = PublicSuffixList(latest)
for i, line in enumerate(lines):
current_line = line.strip()
public_suffix = psl.publicsuffix(current_line)
if public_suffix == current_line:
print(
f"The line number {i+1} contains just a public suffix: {current_line}"
)
suffix_detected = True
if suffix_detected:
print(
"At least one valid public suffix found in {!r}, please "
"remove it. See https://publicsuffix.org for details on why this "
"shouldn't be blocklisted.".format(filename)
)
sys.exit(1)
def reduce_domain(domain_in):
if not PublicSuffixList().publicsuffix(domain_in,accept_unknown=False):
return None
domain = PublicSuffixList().privatesuffix(domain_in)
if domain:
domain=domain.lower()
else:
log.debug("No eTLD for {}".format(domain))
log.debug("Trimmed domain from {0} to {1}".format(domain_in,domain))
return domain
def reduce_domain(domain_in):
if not PublicSuffixList().publicsuffix(domain_in,accept_unknown=False):
return None
domain = PublicSuffixList().privatesuffix(domain_in)
if domain:
domain=domain.lower()
else:
log.debug("No eTLD for {}".format(domain))
log.debug("Trimmed domain from {0} to {1}".format(domain_in,domain))
return domain
# Track to see if any scripts were on foreign TLDs
scripts_on_foreign_origin = False
# Get all the scripts
scripts = soup.find_all('script')
for script in scripts:
if script.has_attr('src'):
# Script tag parameters
src = urlparse(script['src'])
integrity = script.get('integrity')
crossorigin = script.get('crossorigin')
# Check to see if they're on the same second-level domain
# TODO: update the PSL list on startup
psl = PublicSuffixList()
samesld = True if (psl.privatesuffix(urlparse(response.url).netloc) ==
psl.privatesuffix(src.netloc)) else False
if src.scheme == '':
if src.netloc == '':
# Relative URL (src="/path")
relativeorigin = True
relativeprotocol = False
else:
# Relative protocol (src="//host/path")
relativeorigin = False
relativeprotocol = True
else:
relativeorigin = False
relativeprotocol = False