Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
:param episode:
:type episode: int
:param filepath:
:type filepath: str
:param no_create:
:type no_create: bool
:param absolute_number:
:type absolute_number: int
:param should_cache:
:type should_cache: bool
:return:
:rtype: TVEpisode
"""
season = try_int(season, None)
episode = try_int(episode, None)
absolute_number = try_int(absolute_number, None)
# if we get an anime get the real season and episode
if self.is_anime and absolute_number and not season and not episode:
main_db_con = db.DBConnection()
sql = b'SELECT season, episode FROM tv_episodes WHERE showid = ? AND absolute_number = ? AND season != 0'
sql_results = main_db_con.select(sql, [self.indexerid, absolute_number])
if len(sql_results) == 1:
episode = int(sql_results[0][b'episode'])
season = int(sql_results[0][b'season'])
logger.log(u'Found episode by absolute number {absolute} which is {ep}'.format
(absolute=absolute_number,
ep=episode_num(season, episode)), logger.DEBUG)
elif len(sql_results) > 1:
logger.log(u'Multiple entries for absolute number: {absolute} in show: {name} found '.format
(absolute=absolute_number, name=self.name), logger.ERROR)
def get_overview(self, ep_status):
"""Get the Overview status from the Episode status.
:param ep_status: an Episode status
:type ep_status: int
:return: an Overview status
:rtype: int
"""
ep_status = try_int(ep_status) or UNKNOWN
if ep_status == WANTED:
return Overview.WANTED
elif ep_status in (UNAIRED, UNKNOWN):
return Overview.UNAIRED
elif ep_status in (SKIPPED, IGNORED):
return Overview.SKIPPED
elif ep_status in Quality.ARCHIVED:
return Overview.GOOD
elif ep_status in Quality.FAILED:
return Overview.WANTED
elif ep_status in Quality.SNATCHED:
return Overview.SNATCHED
elif ep_status in Quality.SNATCHED_PROPER:
return Overview.SNATCHED_PROPER
elif ep_status in Quality.SNATCHED_BEST:
:param season:
:type season: int
:param episode:
:type episode: int
:param filepath:
:type filepath: str
:param no_create:
:type no_create: bool
:param absolute_number:
:type absolute_number: int
:param should_cache:
:type should_cache: bool
:return:
:rtype: TVEpisode
"""
season = try_int(season, None)
episode = try_int(episode, None)
absolute_number = try_int(absolute_number, None)
# if we get an anime get the real season and episode
if self.is_anime and absolute_number and not season and not episode:
main_db_con = db.DBConnection()
sql = b'SELECT season, episode FROM tv_episodes WHERE showid = ? AND absolute_number = ? AND season != 0'
sql_results = main_db_con.select(sql, [self.indexerid, absolute_number])
if len(sql_results) == 1:
episode = int(sql_results[0][b'episode'])
season = int(sql_results[0][b'season'])
logger.log(u'Found episode by absolute number {absolute} which is {ep}'.format
(absolute=absolute_number,
ep=episode_num(season, episode)), logger.DEBUG)
elif len(sql_results) > 1:
# don't overwrite my location
if sql_results[0][b'location']:
self.location = ek(os.path.normpath, sql_results[0][b'location'])
if sql_results[0][b'file_size']:
self.file_size = int(sql_results[0][b'file_size'])
else:
self.file_size = 0
self.indexerid = int(sql_results[0][b'indexerid'])
self.indexer = int(sql_results[0][b'indexer'])
xem_refresh(self.show.indexerid, self.show.indexer)
self.scene_season = try_int(sql_results[0][b'scene_season'], 0)
self.scene_episode = try_int(sql_results[0][b'scene_episode'], 0)
self.scene_absolute_number = try_int(sql_results[0][b'scene_absolute_number'], 0)
if self.scene_absolute_number == 0:
self.scene_absolute_number = get_scene_absolute_numbering(
self.show.indexerid,
self.show.indexer,
self.absolute_number
)
if self.scene_season == 0 or self.scene_episode == 0:
self.scene_season, self.scene_episode = get_scene_numbering(
self.show.indexerid,
self.show.indexer,
self.season, self.episode
)
if sql_results[0][b'release_name'] is not None:
# don't overwrite my location
if sql_results[0][b'location']:
self.location = ek(os.path.normpath, sql_results[0][b'location'])
if sql_results[0][b'file_size']:
self.file_size = int(sql_results[0][b'file_size'])
else:
self.file_size = 0
self.indexerid = int(sql_results[0][b'indexerid'])
self.indexer = int(sql_results[0][b'indexer'])
xem_refresh(self.show.indexerid, self.show.indexer)
self.scene_season = try_int(sql_results[0][b'scene_season'], 0)
self.scene_episode = try_int(sql_results[0][b'scene_episode'], 0)
self.scene_absolute_number = try_int(sql_results[0][b'scene_absolute_number'], 0)
if self.scene_absolute_number == 0:
self.scene_absolute_number = get_scene_absolute_numbering(
self.show.indexerid,
self.show.indexer,
self.absolute_number
)
if self.scene_season == 0 or self.scene_episode == 0:
self.scene_season, self.scene_episode = get_scene_numbering(
self.show.indexerid,
self.show.indexer,
self.season, self.episode
)
# Catégorie, Nom, DL, Com, Taille, C, Seed, Leech, Share
labels = [process_column_header(label) for label in torrent_rows[0]('td')]
# Skip column headers
for result in torrent_rows[1:]:
cells = result('td')
if len(cells) < len(labels):
continue
try:
title = cells[labels.index('Nom')].get_text(strip=True)
download_url = cells[labels.index('DL')].find('a')['href']
if not all([title, download_url]):
continue
seeders = try_int(cells[labels.index('Seed')].get_text(strip=True))
leechers = try_int(cells[labels.index('Leech')].get_text(strip=True))
# Filter unseeded torrent
if seeders < min(self.minseed, 1) or leechers < min(self.minleech, 0):
if mode != 'RSS':
logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {} (S:{} L:{})".format(title, seeders, leechers), logger.DEBUG)
continue
torrent_size = cells[labels.index('Taille')].get_text(strip=True)
size = convert_size(torrent_size, units=units) or -1
item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'hash': None}
if mode != 'RSS':
logger.log(u"Found result: %s with %s seeders and %s leechers" % (title, seeders, leechers), logger.DEBUG)
items.append(item)
self.status = int(sql_results[0][b'status'] or -1)
# don't overwrite my location
if sql_results[0][b'location']:
self.location = ek(os.path.normpath, sql_results[0][b'location'])
if sql_results[0][b'file_size']:
self.file_size = int(sql_results[0][b'file_size'])
else:
self.file_size = 0
self.indexerid = int(sql_results[0][b'indexerid'])
self.indexer = int(sql_results[0][b'indexer'])
xem_refresh(self.show.indexerid, self.show.indexer)
self.scene_season = try_int(sql_results[0][b'scene_season'], 0)
self.scene_episode = try_int(sql_results[0][b'scene_episode'], 0)
self.scene_absolute_number = try_int(sql_results[0][b'scene_absolute_number'], 0)
if self.scene_absolute_number == 0:
self.scene_absolute_number = get_scene_absolute_numbering(
self.show.indexerid,
self.show.indexer,
self.absolute_number
)
if self.scene_season == 0 or self.scene_episode == 0:
self.scene_season, self.scene_episode = get_scene_numbering(
self.show.indexerid,
self.show.indexer,
self.season, self.episode
)
labels = [process_column_header(label) for label in torrent_rows[0]('td')]
# Skip column headers
for result in torrent_rows[1:]:
cells = result('td')
if len(cells) < len(labels):
continue
try:
title = cells[labels.index('Nom')].get_text(strip=True)
download_url = cells[labels.index('DL')].find('a')['href']
if not all([title, download_url]):
continue
seeders = try_int(cells[labels.index('Seed')].get_text(strip=True))
leechers = try_int(cells[labels.index('Leech')].get_text(strip=True))
# Filter unseeded torrent
if seeders < min(self.minseed, 1) or leechers < min(self.minleech, 0):
if mode != 'RSS':
logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {} (S:{} L:{})".format(title, seeders, leechers), logger.DEBUG)
continue
torrent_size = cells[labels.index('Taille')].get_text(strip=True)
size = convert_size(torrent_size, units=units) or -1
item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'hash': None}
if mode != 'RSS':
logger.log(u"Found result: %s with %s seeders and %s leechers" % (title, seeders, leechers), logger.DEBUG)
items.append(item)
except StandardError:
else:
search_params['page'] = 'last_seriebrowse'
results = []
search_url = self.urls['search']
try:
jdata = self.get_url(search_url, params=search_params, returns='json')
except ValueError:
logger.log('No data returned from provider', logger.DEBUG)
continue
for torrent in jdata:
try:
title = torrent.pop('name', '')
id = str(torrent.pop('id', ''))
if not id:
continue
seeders = try_int(torrent.pop('seeders', ''), 1)
leechers = try_int(torrent.pop('leechers', ''), 0)
freeleech = torrent.pop('frileech')
if self.freeleech and freeleech != 1:
continue
size = try_int(torrent.pop('size', ''), 0)
download_url = self.urls['download'] + id
# Filter unseeded torrent
if seeders < min(self.minseed, 1):
if mode != 'RSS':
logger.log("Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format(title, seeders), logger.DEBUG)
continue
item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None}
if mode != 'RSS':