Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def autocomplete_check(site_id):
r = self.client.get(reverse('feedback.sad'), HTTP_USER_AGENT=(
self.FX_UA % '20.0'), SITE_ID=site_id, follow=True)
doc = pyquery.PyQuery(r.content)
form = doc('#feedbackform form')
assert form
eq_(form.attr('autocomplete'), 'off')
print r
def getSHA(url):
baseurl = '/'.join(url.split('/')[:-1])
filename = url.split('/')[-1]
try:
d = pyquery.PyQuery(urlread(baseurl))
except LiveUSBError:
return ''
checksum = ''
for i in d.items('a'):
if 'CHECKSUM' in i.attr('href'):
checksum = urlread(baseurl + '/' + i.attr('href'))
break
for line in checksum.split('\n'):
i = re.match(r'^SHA256 \(([^)]+)\) = ([a-f0-9]+)$', line)
if i:
if i.group(1) == filename:
return i.group(2)
return ''
if ex.code == 404:
return
# this is a slight problem, it shouldn't happen but it
# does sometimes, so keeping tracking is useful to see how
# often it does happen
self.failed += 1
return
except urllib2.URLError:
self.failed += 1
return
try:
content = resp.read()
doc = pq.PyQuery(content)
# we must do our best to ignore pages that are not
# relevant (music, movies, other pages that don't have
# links to apps in them)
if not self.is_page_valid(url, doc):
return
# I like keeping a log of URLs processed
sys.stderr.write(url + "\n")
# fetches links in this page, by regular expressions.
# we are interested in app links and publisher links.
all_links = [
a.attrib['href']
for a in doc('a')
if re.search(r'\/(details|developer)[?]', a.attrib.get('href', '')) \
def getSpins(url, source):
d = pyquery.PyQuery(urlread(url))
spins = []
for i in d('div').filter('.high').items('span'):
spinUrl = url + i.siblings()('a').attr('href')
spin = getSpinDetails(spinUrl, source)
spin['summary'] = i.html()
spins.append(spin)
return spins