Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def city_to_zip(location):
# remove ending 'county' in location
tokens = location.split()
if tokens[-1].lower() == 'county':
location = ' '.join(tokens[:-1])
return int(search().by_city_and_state(location, 'CA')[0].zipcode)
def get_courts_from_massgov_url(url, shim_ehc_middlesex=True, shim_nhc_woburn=True):
searcher = SearchEngine(simple_zipcode=True)
"""Load specified court directory page on Mass.gov and returns an MACourtList
Properties include name, phone, fax, address, description (usually includes cities or county served), latitude, longitude
"""
page = requests.get(url)
soup = bs4.BeautifulSoup(page.text, 'html.parser')
jstring = soup.find_all( attrs={"data-drupal-selector":"drupal-settings-json"} )[0].text # this is the element that has the JSON data as of 6/19/2018
jdata = json.loads(jstring)
markers = jdata['locations']['googleMap']['markers']
courts = []
# The address and description are in a different part of the JSON
for marker in markers:
html_name = marker['infoWindow']['name']
for item in jdata['locations']['imagePromos']['items']:
description = ''
def validate_geographic_areas(self, values, db):
# Note: the validator does not recognize data from US territories other than Puerto Rico.
us_search = uszipcode.SearchEngine(simple_zipcode=True)
ca_search = PostalCodeDatabase()
CA_PROVINCES = {
"AB": "Alberta",
"BC": "British Columbia",
"MB": "Manitoba",
"NB": "New Brunswick",
"NL": "Newfoundland and Labrador",
"NT": "Northwest Territories",
"NS": "Nova Scotia",
"NU": "Nunavut",
"ON": "Ontario",
"PE": "Prince Edward Island",
"QC": "Quebec",
"SK": "Saskatchewan",
"YT": "Yukon Territories"
}
def look_up_zip(self, zip, country, formatted=False):
if country == "US":
info = uszipcode.SearchEngine(simple_zipcode=True).by_zipcode(zip)
if formatted:
info = self.format_place(zip, info.major_city, info.state)
elif country == "CA":
info = PostalCodeDatabase()[zip]
if formatted:
info = self.format_place(zip, info.city, info.province)
return info
def zip_to_city(cityzip):
return search().by_zipcode(cityzip).major_city
return
# gets the followers of all the retrieved user ids 'depth' number of times
collect_user_followers(args.depth, twpy_api, working_dir, args.filename, user_ids)
if args.mode == 'search':
twpy_api = auth.get_access_creds(args.creds)
if not twpy_api:
print('Error: Twitter developer access credentials denied')
return
working_dir = get_directory_of_file(args.filename)
# gets the first 50 zip codes by city and state
zip_search = SearchEngine()
zipcodes = zip_search.by_city_and_state(args.city, args.state, returns=50)
user_ids = []
user_followers = []
# gets the user ids at each geo-location for the retrieved zip codes
bar = pyprind.ProgPercent(len(zipcodes), track_time=True, title='Finding user ids')
for zipcode in zipcodes:
bar.update(item_id='zip code:' + str(zipcode.zipcode) + '\t')
user_ids.extend(get_user_ids(twpy_api, zipcode.lat, zipcode.lng, args.radius))
write_json(args.filename, list(set(user_ids)))
if args.mode == 'netx':
user_followers = read_json(args.in_filename)
pythonify_dict(user_followers)
print("Number of followers: " + str(len(user_followers)))
output_filename = args.out_filename + '.json'