Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
log = logging.getLogger(__name__)
log.setLevel(logging.DEBUG)
handler = logging.StreamHandler(sys.stdout)
format = "%(name)s %(levelname)s %(asctime)s %(message)s"
formatter = logging.Formatter(format)
handler.setFormatter(formatter)
log.addHandler(handler)
now = datetime.datetime.utcnow
metadata = MetaData()
SkipToolTest_table = Table("skip_tool_test", metadata,
Column("id", Integer, primary_key=True),
Column("create_time", DateTime, default=now),
Column("update_time", DateTime, default=now, onupdate=now),
Column("repository_metadata_id", Integer, ForeignKey("repository_metadata.id"), index=True),
Column("initial_changeset_revision", TrimmedString(255), index=True),
Column("comment", TEXT))
def upgrade(migrate_engine):
print(__doc__)
metadata.bind = migrate_engine
metadata.reflect()
# Initialize.
if migrate_engine.name == 'mysql' or migrate_engine.name == 'sqlite':
default_false = "0"
elif migrate_engine.name in ['postgresql', 'postgres']:
default_false = "false"
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('rss_parser_funcs', sa.Column('last_changed', sa.DateTime(), nullable=True))
op.add_column('rss_parser_funcs_version', sa.Column('last_changed', sa.DateTime(), autoincrement=False, nullable=True))
bind = op.get_bind()
sess = Session(bind=bind)
print("Updating date/time stamps for functions.")
sess.query(RssFeedEntry).update({'last_changed' : datetime.datetime.now()})
sess.commit()
print("Update done.")
op.alter_column('rss_parser_funcs', 'last_changed', nullable=False)
def is_datetime_field(col):
"""
Check if a column is DateTime (or implements DateTime)
:param Column col: the column object to be checked
:rtype: bool
"""
if hasattr(col.type, "impl"):
return type(col.type.impl) is DateTime
else:
return type(col.type) is DateTime
def downgrade():
op.drop_constraint(None, 'pystock_currency')
op.drop_column(u'pystock_fxrates', sa.Column('created_on', sa.DateTime(), nullable=True))
op.drop_table('pystock_book')
op.drop_table('pystock_user')
op.drop_table('pystock_exchange')
op.drop_table('pystock_liability')
op.drop_table('pystock_company')
def upgrade(migrate_engine):
# Upgrade operations go here. Don't create your own engine;
# bind migrate_engine to your metadata
meta = MetaData()
meta.bind = migrate_engine
placement_groups = Table(
'placement_groups', meta,
Column('id', Integer, primary_key=True, nullable=False),
Column('pgid', String(length=255), nullable=False),
Column('state', String(length=255), nullable=False),
Column('up', String(length=255), nullable=False),
Column('acting', String(length=255), nullable=False),
Column('created_at', DateTime(timezone=False)),
Column('updated_at', DateTime(timezone=False)),
Column('deleted_at', DateTime(timezone=False)),
Column('deleted', Boolean(create_constraint=True, name=None)),
)
try:
placement_groups.create()
except Exception:
meta.drop_all(tables=[placement_groups])
raise
raw_body = Column(Text, nullable=True, default=None)
url = Column(Text, nullable=True, default=None)
author = Column(String, nullable=True, default=None)
domain = Column(String, nullable=True, default=None)
score = Column(Integer, nullable=True, default=None)
num_comments = Column(Integer, default=0)
# Comma-separated list of child ids
# Currently not used, saving those in case we'll need comment ranking info later on
child_ids = Column(Text)
# Date fields
date_posted = Column(DateTime, default=datetime.utcnow)
date_updated = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
date_entered_fp = Column(DateTime, default=None)
date_left_fp = Column(DateTime, default=None)
deleted = Column(Integer, default=0)
dead = Column(Integer, default=0)
# PostgreSQL search fields
title_tsv = Column(TSVECTOR)
body_tsv = Column(TSVECTOR)
# def update
def upgrade():
op.create_table(
'bazeltarget',
sa.Column('id', sa.GUID(), nullable=False),
sa.Column('step_id', sa.GUID(), nullable=False),
sa.Column('job_id', sa.GUID(), nullable=False),
sa.Column('name', sa.Text(), nullable=False),
sa.Column('status', sa.Enum(), nullable=False),
sa.Column('result', sa.Enum(), nullable=False),
sa.Column('duration', sa.Integer(), nullable=True),
sa.Column('date_created', sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(
['step_id'], ['jobstep.id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(
['job_id'], ['job.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
def __init__(self, md5=None):
self.md5 = md5
self.count = 0
def __repr__(self):
return '<img>' % (self.id, self.md5)
class Score(Base):
__tablename__ = 'scores'
id = Column(Integer, primary_key=True)
user = Column(Integer)
image = Column(Integer)
score = Column(Integer)
ts = Column(DateTime)
nouce = Column(Integer)
def __init__(self, user, image, score=0):
self.user = user
self.image = image
self.score = score
self.ts = datetime.utcnow()
self.nouce = random.randint(0, 10000)
def __repr__(self):
return '%d, %d>' % (self.user, self.image, self.score)
@login_manager.user_loader
def load_user(user_id):
return db_session.query(User).filter(User.id == int(user_id)).one_or_none()
def upgrade():
'''
upgrade method
'''
create_table('lifetime_except',
sa.Column('id', GUID()),
sa.Column('scope', sa.String(25)),
sa.Column('name', sa.String(255)),
sa.Column('did_type', DIDType.db_type(name='LIFETIME_EXCEPT_DID_TYPE_CHK')),
sa.Column('account', sa.String(25)),
sa.Column('comments', sa.String(4000)),
sa.Column('pattern', sa.String(255)),
sa.Column('state', LifetimeExceptionsState.db_type(name='LIFETIME_EXCEPT_STATE_CHK')),
sa.Column('updated_at', sa.DateTime),
sa.Column('expires_at', sa.DateTime),
sa.Column('created_at', sa.DateTime))
if context.get_context().dialect.name != 'sqlite':
create_primary_key('LIFETIME_EXCEPT_PK', 'lifetime_except', ['id', 'scope', 'name', 'did_type', 'account'])
create_check_constraint('LIFETIME_EXCEPT_SCOPE_NN', 'lifetime_except', 'scope is not null')
create_check_constraint('LIFETIME_EXCEPT_NAME_NN', 'lifetime_except', 'name is not null')
create_check_constraint('LIFETIME_EXCEPT_DID_TYPE_NN', 'lifetime_except', 'did_type is not null')
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('nu_outbound_wrappers', sa.Column('released_on', sa.DateTime(), nullable=True))
### end Alembic commands ###