Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
from requests.exceptions import TooManyRedirects
from sqlalchemy import Column, Unicode, DateTime
from dateutil.parser import parse as dateutil_parse
from flexget import plugin, db_schema
from flexget.config_schema import one_or_more
from flexget.entry import Entry
from flexget.event import event
from flexget.manager import Session
from flexget.utils.database import json_synonym
from flexget.utils.requests import Session as RequestSession, TimedLimiter, RequestException
from flexget.utils.tools import parse_filesize
log = logging.getLogger('passthepopcorn')
Base = db_schema.versioned_base('passthepopcorn', 1)
requests = RequestSession()
requests.add_domain_limiter(TimedLimiter('passthepopcorn.me', '5 seconds'))
TAGS = [
'action',
'adventure',
'animation',
'arthouse',
'asian',
'biography',
'camp',
'comedy',
'crime',
'cult',
'documentary',
import logging
from sqlalchemy import Column, Unicode, String, Integer
from flexget import config_schema
from flexget import db_schema
from flexget.entry import EntryUnicodeError
from flexget.event import fire_event, event
from flexget.manager import Session
from flexget.plugin import (get_plugins, task_phases, phase_methods, PluginWarning, PluginError,
DependencyError, plugins as all_plugins, plugin_schemas)
from flexget.utils import requests
from flexget.utils.simple_persistence import SimpleTaskPersistence
log = logging.getLogger('task')
Base = db_schema.versioned_base('feed', 0)
class TaskConfigHash(Base):
"""Stores the config hash for tasks so that we can tell if the config has changed since last run."""
__tablename__ = 'feed_config_hash'
id = Column(Integer, primary_key=True)
task = Column('name', Unicode, index=True, nullable=False)
hash = Column('hash', String)
def __repr__(self):
return '' % (self.task, self.hash)
def config_changed(task):
from flexget.plugin import get_plugin_by_name
from flexget.plugins.parsers import SERIES_ID_TYPES
from flexget.utils import qualities
from flexget.utils.database import quality_property, with_session
from flexget.utils.log import log_once
from flexget.utils.sqlalchemy_utils import (
table_columns, table_exists, drop_tables, table_schema, table_add_column, create_index
)
from flexget.utils.tools import (
merge_dict_from_to, parse_timedelta, parse_episode_identifier, get_config_as_array, chunked
)
SCHEMA_VER = 14
log = logging.getLogger('series')
Base = db_schema.versioned_base('series', SCHEMA_VER)
@db_schema.upgrade('series')
def upgrade(ver, session):
if ver is None:
if table_exists('episode_qualities', session):
log.info('Series database format is too old to upgrade, dropping and recreating tables.')
# Drop the deprecated data
drop_tables(['series', 'series_episodes', 'episode_qualities'], session)
# Create new tables from the current models
Base.metadata.create_all(bind=session.bind)
# Upgrade episode_releases table to have a proper count and seed it with appropriate numbers
columns = table_columns('episode_releases', session)
if 'proper_count' not in columns:
log.info('Upgrading episode_releases table to have proper_count column')
table_add_column('episode_releases', 'proper_count', Integer, session)
import logging
from datetime import datetime, timedelta
from sqlalchemy import Column, String, Unicode, Boolean, Integer, DateTime
from flexget import db_schema, plugin
from flexget.event import event
from flexget.manager import Session
from flexget.utils.database import entry_synonym
log = logging.getLogger('pending_approval')
Base = db_schema.versioned_base('pending_approval', 0)
class PendingEntry(Base):
__tablename__ = 'pending_entries'
id = Column(Integer, primary_key=True, autoincrement=True, nullable=False)
task_name = Column(Unicode)
title = Column(Unicode)
url = Column(String)
approved = Column(Boolean)
_json = Column('json', Unicode)
entry = entry_synonym('_json')
added = Column(DateTime, default=datetime.now)
def __init__(self, task_name, entry):
self.task_name = task_name
import logging
import re
from datetime import datetime
from sqlalchemy import Column, DateTime, ForeignKey, Index, Integer, Table, Unicode
from sqlalchemy.orm import relationship
from sqlalchemy.orm.exc import NoResultFound
from flexget import db_schema
from flexget.utils.sqlalchemy_utils import get_index_by_name, table_schema
log = logging.getLogger('archive.db')
SCHEMA_VER = 0
Base = db_schema.versioned_base('archive', SCHEMA_VER)
archive_tags_table = Table(
'archive_entry_tags',
Base.metadata,
Column('entry_id', Integer, ForeignKey('archive_entry.id')),
Column('tag_id', Integer, ForeignKey('archive_tag.id')),
Index('ix_archive_tags', 'entry_id', 'tag_id'),
)
archive_sources_table = Table(
'archive_entry_sources',
Base.metadata,
Column('entry_id', Integer, ForeignKey('archive_entry.id')),
Column('source_id', Integer, ForeignKey('archive_source.id')),
Index('ix_archive_sources', 'entry_id', 'source_id'),
)
from flexget import db_schema, plugin
from flexget.entry import Entry
from flexget.event import event
from flexget.manager import Session
from flexget.utils.database import with_session
from flexget.utils.template import RenderError
from flexget.utils.tools import parse_timedelta
try:
from babelfish import Language
except ImportError:
raise plugin.DependencyError(issued_by='subtitle_queue', missing='babelfish',
message='subtitle_queue requires the babelfish plugin')
log = logging.getLogger('subtitle_queue')
Base = db_schema.versioned_base('subtitle_queue', 0)
#: Video extensions stolen from https://github.com/Diaoul/subliminal/blob/master/subliminal/video.py
VIDEO_EXTENSIONS = ('.3g2', '.3gp', '.3gp2', '.3gpp', '.60d', '.ajp', '.asf', '.asx', '.avchd', '.avi', '.bik',
'.bix', '.box', '.cam', '.dat', '.divx', '.dmf', '.dv', '.dvr-ms', '.evo', '.flc', '.fli',
'.flic', '.flv', '.flx', '.gvi', '.gvp', '.h264', '.m1v', '.m2p', '.m2ts', '.m2v', '.m4e',
'.m4v', '.mjp', '.mjpeg', '.mjpg', '.mkv', '.moov', '.mov', '.movhd', '.movie', '.movx', '.mp4',
'.mpe', '.mpeg', '.mpg', '.mpv', '.mpv2', '.mxf', '.nsv', '.nut', '.ogg', '.ogm', '.omf', '.ps',
'.qt', '.ram', '.rm', '.rmvb', '.swf', '.ts', '.vfw', '.vid', '.video', '.viv', '.vivo', '.vob',
'.vro', '.wm', '.wmv', '.wmx', '.wrap', '.wvx', '.wx', '.x264', '.xvid')
SUBTITLE_EXTENSIONS = ('.srt', '.sub', '.smi', '.txt', '.ssa', '.ass', '.mpl') # Borrowed from Subliminal
association_table = Table('association', Base.metadata,
from datetime import datetime, timedelta
from sqlalchemy import Column, Integer, String, DateTime, Unicode, select, ForeignKey
from sqlalchemy.orm import relation
from flexget import db_schema
from flexget.event import event
from flexget.manager import Session
from flexget.plugin import PluginError
from flexget.utils import json
from flexget.utils.database import entry_synonym
from flexget.utils.sqlalchemy_utils import table_schema, table_add_column
from flexget.utils.tools import parse_timedelta, TimedDict, get_config_hash
log = logging.getLogger('input_cache')
Base = db_schema.versioned_base('input_cache', 1)
@db_schema.upgrade('input_cache')
def upgrade(ver, session):
if ver == 0:
table = table_schema('input_cache_entry', session)
table_add_column(table, 'json', Unicode, session)
# Make sure we get the new schema with the added column
table = table_schema('input_cache_entry', session)
for row in session.execute(select([table.c.id, table.c.entry])):
try:
p = pickle.loads(row['entry'])
session.execute(table.update().where(table.c.id == row['id']).values(
json=json.dumps(p, encode_datetime=True)))
except KeyError as e:
log.error('Unable error upgrading input_cache pickle object due to %s', e)
from sqlalchemy.orm import relation
from sqlalchemy.schema import ForeignKey
from flexget import plugin, db_schema
from flexget.entry import Entry
from flexget.event import event
from flexget.plugin import PluginError
from flexget.manager import Session
from flexget.utils.database import json_synonym
from flexget.utils.requests import Session as RequestSession, TimedLimiter
from flexget.utils.soup import get_soup
log = logging.getLogger('imdb_list')
IMMUTABLE_LISTS = ['ratings', 'checkins']
Base = db_schema.versioned_base('imdb_list', 0)
MOVIE_TYPES = ['documentary', 'tvmovie', 'video', 'short', 'movie']
SERIES_TYPES = ['tvseries', 'tvepisode', 'tvminiseries']
OTHER_TYPES = ['videogame']
class IMDBListUser(Base):
__tablename__ = "imdb_list_user"
user_id = Column(String, primary_key=True)
user_name = Column(Unicode)
_cookies = Column('cookies', Unicode)
cookies = json_synonym('_cookies')
lists = relation('IMDBListList', backref='imdb_user', cascade='all, delete, delete-orphan')
from requests.exceptions import TooManyRedirects
from sqlalchemy import Column, Unicode, DateTime
from flexget import plugin, db_schema
from flexget.config_schema import one_or_more
from flexget.entry import Entry
from flexget.event import event
from flexget.manager import Session
from flexget.utils.database import json_synonym
from flexget.utils.requests import Session as RequestSession, TimedLimiter, RequestException
from flexget.utils.soup import get_soup
from flexget.utils.tools import parse_filesize
log = logging.getLogger('alpharatio')
Base = db_schema.versioned_base('alpharatio', 0)
requests = RequestSession()
requests.add_domain_limiter(TimedLimiter('alpharatio.cc', '5 seconds'))
# ElementZero confirmed with AlphaRato sysop 'jasonmaster' that they do want a 5 second limiter
CATEGORIES = {
'tvsd': 'filter_cat[1]',
'tvhd': 'filter_cat[2]',
'tvdvdrip': 'filter_cat[3]',
'tvpacksd': 'filter_cat[4]',
'tvpackhd': 'filter_cat[5]',
'moviesd': 'filter_cat[6]',
'moviehd': 'filter_cat[7]',
'moviepacksd': 'filter_cat[8]',
'moviepackhd': 'filter_cat[9]',
'moviexxx': 'filter_cat[10]',
import os
import re
from loguru import logger
from sqlalchemy import Column, Integer, Unicode
from flexget import options, plugin
from flexget.db_schema import versioned_base
from flexget.entry import Entry
from flexget.event import event
from flexget.manager import Session
logger = logger.bind(name='tail')
Base = versioned_base('tail', 0)
class TailPosition(Base):
__tablename__ = 'tail'
id = Column(Integer, primary_key=True)
task = Column(Unicode)
filename = Column(Unicode)
position = Column(Integer)
class InputTail:
"""
Parse any text for entries using regular expression.
::