Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_fail_parse(feed):
with assert_raises(podcastparser.FeedParseError):
podcastparser.parse('file://example.com/feed.xml', StringIO(feed))
try:
self._parse_feed(ad._resolved_url, None, None, False)
return Result(NEW_LOCATION, ad._resolved_url)
except Exception as e:
logger.warn('Feed autodiscovery failed', exc_info=True)
# Second, try to resolve the URL
url = self._resolve_url(url)
if url:
return Result(NEW_LOCATION, url)
# Reset the stream so podcastparser can give it a go
data.seek(0)
try:
feed = podcastparser.parse(url, data)
feed['url'] = url
except ValueError as e:
raise InvalidFeed('Could not parse feed: {msg}'.format(msg=e))
if is_local:
feed['headers'] = {}
return Result(UPDATED_FEED, feed)
else:
feed['headers'] = stream.headers
return self._check_statuscode(stream, feed)
page = 2
remaining_episodes = max_episodes - len(self.parsed['episodes'])
while ('paged_feed_next' in self.parsed and
page < self.PAGED_FEED_MAX_PAGES and
remaining_episodes > 0):
# Take the next page from the paged feed
url = self.parsed['paged_feed_next']
del self.parsed['paged_feed_next']
if not url:
break
try:
logger.debug('Downloading page %d from %s', page, url)
stream = util.urlopen(url)
parsed = podcastparser.parse(url, stream, remaining_episodes)
added_episodes = len(parsed['episodes'])
remaining_episodes -= added_episodes
logger.debug('Page %d contains %d additional episodes', page,
added_episodes)
self.parsed['episodes'].extend(parsed['episodes'])
# Next iteration if we still have a next page
if 'paged_feed_next' in parsed:
self.parsed['paged_feed_next'] = parsed['paged_feed_next']
except Exception as e:
logger.warn('Error while fetching feed page %d from %s: %s', page, url, e)
# Give up, don't try to download additional pages here
break
page += 1