text stringlengths 0 1.05M | meta dict |
|---|---|
"""Added recruit time to user table
Revision ID: 2e37930ecb54
Revises: 2de0d2488523
Create Date: 2013-08-15 18:35:30.985537
"""
# revision identifiers, used by Alembic.
revision = '2e37930ecb54'
down_revision = '2de0d2488523'
from alembic import op
import sqlalchemy as sa
def upgrade(engine_name):
eval("upgrade_%s" % engine_name)()
def downgrade(engine_name):
eval("downgrade_%s" % engine_name)()
def upgrade_engine1():
### commands auto generated by Alembic - please adjust! ###
op.add_column('users', sa.Column('recruited', sa.Integer(), nullable=True))
### end Alembic commands ###
def downgrade_engine1():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('users', 'recruited')
### end Alembic commands ###
def upgrade_engine2():
### commands auto generated by Alembic - please adjust! ###
op.add_column('users', sa.Column('recruited', sa.Integer(), nullable=True))
### end Alembic commands ###
def downgrade_engine2():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('users', 'recruited')
### end Alembic commands ###
def upgrade_engine3():
### commands auto generated by Alembic - please adjust! ###
op.add_column('users', sa.Column('recruited', sa.Integer(), nullable=True))
### end Alembic commands ###
def downgrade_engine3():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('users', 'recruited')
### end Alembic commands ###
| {
"repo_name": "atiaxi/chromabot",
"path": "alembic/versions/2e37930ecb54_added_recruit_time_t.py",
"copies": "1",
"size": "1520",
"license": "mit",
"hash": -5289358319553231000,
"line_mean": 23.5161290323,
"line_max": 79,
"alpha_frac": 0.6638157895,
"autogenerated": false,
"ratio": 3.4942528735632186,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.46580686630632184,
"avg_score": null,
"num_lines": null
} |
"""Added relationship between groups and instances
Revision ID: 35360c0a6b47
Revises: c16f3da9876
Create Date: 2012-02-08 17:00:43.792045
"""
# downgrade revision identifier, used by Alembic.
revision = '35360c0a6b47'
down_revision = 'c16f3da9876'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.alter_column(u'groups', u'name',
type_=sa.Unicode(255),
existing_server_default=None,
existing_nullable=False)
op.add_column(u'groups', sa.Column('instance_id', sa.Integer(),
nullable=True))
op.create_foreign_key(u'groups_instance_id_fkey', u'groups',
u'instances', ['instance_id'], ['id'],
onupdate='CASCADE', ondelete='CASCADE')
def downgrade():
op.drop_constraint(u'groups_instance_id_fkey', u'groups')
op.drop_column(u'groups', 'instance_id')
op.alter_column(u'groups', u'name',
type_=sa.Unicode(32),
existing_server_default=None,
existing_nullable=False)
| {
"repo_name": "asidev/aybu-manager",
"path": "migrations/versions/35360c0a6b47_.py",
"copies": "1",
"size": "1117",
"license": "apache-2.0",
"hash": -8758575051074596000,
"line_mean": 30.0277777778,
"line_max": 67,
"alpha_frac": 0.5881826321,
"autogenerated": false,
"ratio": 3.6266233766233764,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4714806008723376,
"avg_score": null,
"num_lines": null
} |
"""Added roles.
Revision ID: 4453e9c3a9be
Revises: e58b593ca6c9
Create Date: 2017-10-05 18:59:49.721641
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '4453e9c3a9be'
down_revision = 'e58b593ca6c9'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('roles',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=80), nullable=True),
sa.Column('description', sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name')
)
op.create_table('roles_users',
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('role_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['role_id'], ['roles.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], )
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('roles_users')
op.drop_table('roles')
# ### end Alembic commands ###
| {
"repo_name": "saseumn/website",
"path": "migrations/versions/4453e9c3a9be_.py",
"copies": "1",
"size": "1141",
"license": "mit",
"hash": -1234480173237632500,
"line_mean": 26.8292682927,
"line_max": 67,
"alpha_frac": 0.6581945662,
"autogenerated": false,
"ratio": 3.2693409742120343,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9405346431137473,
"avg_score": 0.00443782185491208,
"num_lines": 41
} |
"""Added schedule
Revision ID: 37924d95f098
Revises: 41808ec16aa1
Create Date: 2013-08-19 16:21:48.293000
"""
# revision identifiers, used by Alembic.
revision = '37924d95f098'
down_revision = '41808ec16aa1'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('schedule',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('show_id', sa.Integer(), nullable=True),
sa.Column('station_id', sa.Integer(), nullable=True),
sa.Column('day', sa.Integer(), nullable=True),
sa.Column('start_hour', sa.Integer(), nullable=True),
sa.Column('start_minute', sa.Integer(), nullable=True),
sa.Column('length', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['show_id'], ['show.id'], ),
sa.ForeignKeyConstraint(['station_id'], ['station.id'], ),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('schedule')
### end Alembic commands ###
| {
"repo_name": "ebu/radiodns-plugit",
"path": "RadioDns-PlugIt/alembic/versions/37924d95f098_added_schedule.py",
"copies": "1",
"size": "1157",
"license": "bsd-3-clause",
"hash": -9211281909057578000,
"line_mean": 17.9830508475,
"line_max": 63,
"alpha_frac": 0.6300777874,
"autogenerated": false,
"ratio": 3.353623188405797,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9408378122371862,
"avg_score": 0.015064570686787003,
"num_lines": 37
} |
"""added search_vector to items table
Revision ID: 35b2b9f7b64e
Revises: 1c047a351906
Create Date: 2015-02-28 17:00:36.979960
"""
# revision identifiers, used by Alembic.
revision = '35b2b9f7b64e'
down_revision = '1c047a351906'
from alembic import op
import sqlalchemy as sa
from sqlalchemy_searchable import sync_trigger
from sqlalchemy_utils.types import TSVectorType
def upgrade():
### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
op.add_column('items', sa.Column('search_vector', TSVectorType(), nullable=True))
op.create_index('ix_items_search_vector', 'items', ['search_vector'], unique=False, postgresql_using='gin')
sync_trigger(conn, 'items', 'search_vector', ['name', 'description'])
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
op.drop_index('ix_items_search_vector', table_name='items')
op.drop_column('items', 'search_vector')
sync_trigger(conn, 'items', 'search_vector', ['name', 'description'])
### end Alembic commands ###
| {
"repo_name": "rosariomgomez/tradyfit",
"path": "vagrant/tradyfit/migrations/versions/35b2b9f7b64e_added_search_vector_to_items_table.py",
"copies": "1",
"size": "1108",
"license": "mit",
"hash": -8169733350559735000,
"line_mean": 32.5757575758,
"line_max": 111,
"alpha_frac": 0.6958483755,
"autogenerated": false,
"ratio": 3.3273273273273274,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9476761450964859,
"avg_score": 0.009282850372493606,
"num_lines": 33
} |
"""Added Section Layout Settings and Site Settings
Revision ID: 698cc06661d6
Revises: 958a9358a256
Create Date: 2016-02-29 00:03:36.777728
"""
# revision identifiers, used by Alembic.
revision = '698cc06661d6'
down_revision = '958a9358a256'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('sites_settings',
sa.Column('site_id', sa.Integer(), nullable=False),
sa.Column('setting', sa.Text(length=40), nullable=False),
sa.Column('value', sa.Text(length=200), nullable=False),
sa.ForeignKeyConstraint(['site_id'], ['sites.id'], ),
sa.PrimaryKeyConstraint('site_id', 'setting')
)
op.create_index(op.f('ix_sites_settings_site_id'), 'sites_settings', ['site_id'], unique=False)
op.create_table('pages_sections_layout_settings',
sa.Column('page_section_id', sa.Integer(), nullable=False),
sa.Column('setting', sa.Text(length=40), nullable=False),
sa.Column('value', sa.Text(length=200), nullable=True),
sa.Column('image_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['image_id'], ['images.id'], ),
sa.ForeignKeyConstraint(['page_section_id'], ['pages_sections.id'], ),
sa.PrimaryKeyConstraint('page_section_id', 'setting')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('pages_sections_layout_settings')
op.drop_index(op.f('ix_sites_settings_site_id'), table_name='sites_settings')
op.drop_table('sites_settings')
### end Alembic commands ###
| {
"repo_name": "matslindh/kimochi",
"path": "alembic/versions/698cc06661d6_added_section_layout_settings_and_site_.py",
"copies": "1",
"size": "1657",
"license": "mit",
"hash": 6368856217526752000,
"line_mean": 35.0217391304,
"line_max": 99,
"alpha_frac": 0.6813518407,
"autogenerated": false,
"ratio": 3.3678861788617884,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4549238019561789,
"avg_score": null,
"num_lines": null
} |
"""added series to invoice
Revision ID: 2d70356c6c5f
Revises: 77a9fbbe4cd9
Create Date: 2021-01-07 23:38:39.086260
"""
# revision identifiers, used by Alembic.
revision = '2d70356c6c5f'
down_revision = '77a9fbbe4cd9'
from alembic import op
import sqlalchemy as sa
from sqlalchemy import orm
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class User(Base):
__tablename__ = 'users'
id = sa.Column(sa.Integer, primary_key=True)
next_invoice_num = sa.Column(sa.Integer, nullable=False)
class Series(Base):
__tablename__ = 'series'
id = sa.Column(sa.Integer, primary_key=True)
user_id = sa.Column(sa.Integer, sa.ForeignKey('users.id'), nullable=False)
name = sa.Column(sa.String, nullable=False)
next_invoice_num = sa.Column(sa.Integer, nullable=False)
class Invoice(Base):
__tablename__ = 'invoices'
id = sa.Column(sa.Integer, primary_key=True)
owner_id = sa.Column(sa.Integer, sa.ForeignKey('users.id'), nullable=False)
series_id = sa.Column(sa.Integer, sa.ForeignKey('series.id'), nullable=False)
def upgrade():
bind = op.get_bind()
session = orm.Session(bind=bind)
### commands auto generated by Alembic - please adjust! ###
op.add_column('invoices', sa.Column('series_id', sa.Integer(), nullable=False))
# create Normal series for each user
series = {user.id: Series(user_id=user.id, name='Normal', next_invoice_num=user.next_invoice_num) for user in session.query(User)}
session.add_all(series.values())
# update invoices with newly created series refs
for invoice in session.query(Invoice):
invoice.series_id = series[invoice.owner_id]
session.commit()
op.create_foreign_key(None, 'invoices', 'series', ['series_id'], ['id'])
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, 'invoices', type_='foreignkey')
op.drop_column('invoices', 'series_id')
### end Alembic commands ###
| {
"repo_name": "skazi0/yaia",
"path": "migrations/versions/2d70356c6c5f_added_series_to_invoice.py",
"copies": "1",
"size": "2038",
"license": "mit",
"hash": 463199026116752600,
"line_mean": 28.1142857143,
"line_max": 134,
"alpha_frac": 0.6864573111,
"autogenerated": false,
"ratio": 3.351973684210526,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9519616533931965,
"avg_score": 0.0037628922757123023,
"num_lines": 70
} |
"""Added service following tables
Revision ID: 4121a98095b5
Revises: 37924d95f098
Create Date: 2013-08-23 09:00:32.793000
"""
# revision identifiers, used by Alembic.
revision = '4121a98095b5'
down_revision = '37924d95f098'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('generic_service_following_entry',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('active', sa.Boolean(), nullable=True),
sa.Column('cost', sa.Integer(), nullable=True),
sa.Column('offset', sa.Integer(), nullable=True),
sa.Column('mine_type', sa.String(length=255), nullable=True),
sa.Column('channel_id', sa.Integer(), nullable=True),
sa.Column('station_id', sa.Integer(), nullable=True),
sa.Column('channel_uri', sa.String(length=255), nullable=True),
sa.ForeignKeyConstraint(['channel_id'], ['channel.id'], ),
sa.ForeignKeyConstraint(['station_id'], ['station.id'], ),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('generic_service_following_entry')
### end Alembic commands ###
| {
"repo_name": "ebu/radiodns-plugit",
"path": "RadioDns-PlugIt/alembic/versions/4121a98095b5_added_service_follow.py",
"copies": "1",
"size": "1300",
"license": "bsd-3-clause",
"hash": 2634011017034615000,
"line_mean": 20.0333333333,
"line_max": 67,
"alpha_frac": 0.6476923077,
"autogenerated": false,
"ratio": 3.3942558746736293,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9470556890608623,
"avg_score": 0.014278258353001321,
"num_lines": 38
} |
"""Added Service Provider
Revision ID: 4891307e6a8c
Revises: e4619239aac
Create Date: 2014-11-21 17:33:21.610477
"""
# revision identifiers, used by Alembic.
revision = '4891307e6a8c'
down_revision = 'e4619239aac'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('service_provider',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('codops', sa.String(length=255), nullable=True),
sa.Column('short_name', sa.String(length=8), nullable=True),
sa.Column('medium_name', sa.String(length=16), nullable=True),
sa.Column('long_name', sa.String(length=128), nullable=True),
sa.Column('short_description', sa.String(length=180), nullable=True),
sa.Column('long_description', sa.String(length=1200), nullable=True),
sa.Column('url_default', sa.String(length=255), nullable=True),
sa.Column('default_language', sa.String(length=5), nullable=True),
sa.Column('location_country', sa.String(length=5), nullable=True),
sa.Column('default_logo_image_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['default_logo_image_id'], ['logo_image.id'], name='fk_default_logo_id', use_alter=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('logo_image',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('orga', sa.Integer(), nullable=True),
sa.Column('filename', sa.String(length=255), nullable=True),
sa.Column('url32x32', sa.String(length=255), nullable=True),
sa.Column('url112x32', sa.String(length=255), nullable=True),
sa.Column('url128x128', sa.String(length=255), nullable=True),
sa.Column('url320x240', sa.String(length=255), nullable=True),
sa.Column('url600x600', sa.String(length=255), nullable=True),
sa.Column('service_provider_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['service_provider_id'], ['service_provider.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.add_column(u'station', sa.Column('service_provider_id', sa.Integer(), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column(u'station', 'service_provider_id')
op.drop_table('logo_image')
op.drop_table('service_provider')
### end Alembic commands ###
| {
"repo_name": "ebu/radiodns-plugit",
"path": "RadioDns-PlugIt/alembic/versions/4891307e6a8c_added_service_provider.py",
"copies": "1",
"size": "2371",
"license": "bsd-3-clause",
"hash": 19878428672184810,
"line_mean": 41.3392857143,
"line_max": 117,
"alpha_frac": 0.6866301139,
"autogenerated": false,
"ratio": 3.297635605006954,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9430871468291986,
"avg_score": 0.010678850122993593,
"num_lines": 56
} |
"""Added Services to Station Model
Revision ID: 423350744c8c
Revises: 519654f4eb9c
Create Date: 2014-12-04 11:28:14.972747
"""
# revision identifiers, used by Alembic.
revision = '423350744c8c'
down_revision = '519654f4eb9c'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('station', sa.Column('radioepg_enabled', sa.Boolean(), nullable=True))
op.add_column('station', sa.Column('radioepg_service', sa.String(length=255), nullable=True))
op.add_column('station', sa.Column('radiotag_enabled', sa.Boolean(), nullable=True))
op.add_column('station', sa.Column('radiotag_service', sa.String(length=255), nullable=True))
op.add_column('station', sa.Column('radiovis_enabled', sa.Boolean(), nullable=True))
op.add_column('station', sa.Column('radiovis_service', sa.String(length=255), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('station', 'radiovis_service')
op.drop_column('station', 'radiovis_enabled')
op.drop_column('station', 'radiotag_service')
op.drop_column('station', 'radiotag_enabled')
op.drop_column('station', 'radioepg_service')
op.drop_column('station', 'radioepg_enabled')
### end Alembic commands ###
| {
"repo_name": "ebu/radiodns-plugit",
"path": "RadioDns-PlugIt/alembic/versions/423350744c8c_added_services_to_station_model.py",
"copies": "1",
"size": "1367",
"license": "bsd-3-clause",
"hash": -4597954878787607600,
"line_mean": 36.9722222222,
"line_max": 97,
"alpha_frac": 0.6986100951,
"autogenerated": false,
"ratio": 3.2860576923076925,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9451150968212028,
"avg_score": 0.006703363839133084,
"num_lines": 36
} |
"""Added session
Revision ID: 1fcee2e6280
Revises: 1925329c798a
Create Date: 2013-11-08 19:24:18.721591
"""
# revision identifiers, used by Alembic.
revision = '1fcee2e6280'
down_revision = '1925329c798a'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('session',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.Column('proposal_space_id', sa.Integer(), nullable=False),
sa.Column('description_text', sa.UnicodeText(), nullable=False),
sa.Column('description_html', sa.UnicodeText(), nullable=False),
sa.Column('speaker_bio_text', sa.UnicodeText(), nullable=False),
sa.Column('speaker_bio_html', sa.UnicodeText(), nullable=False),
sa.Column('proposal_id', sa.Integer(), nullable=True),
sa.Column('start_datetime', sa.DateTime(), nullable=False),
sa.Column('end_datetime', sa.DateTime(), nullable=False),
sa.Column('venue_room_id', sa.Integer(), nullable=False),
sa.Column('is_break', sa.Boolean(), nullable=False),
sa.Column('url_id', sa.Integer(), nullable=False),
sa.Column('name', sa.Unicode(length=250), nullable=False),
sa.Column('title', sa.Unicode(length=250), nullable=False),
sa.ForeignKeyConstraint(['proposal_id'], ['proposal.id'], ),
sa.ForeignKeyConstraint(['proposal_space_id'], ['proposal_space.id'], ),
sa.ForeignKeyConstraint(['venue_room_id'], ['venue_room.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('proposal_space_id', 'url_id')
)
op.add_column('venue_room', sa.Column('bgcolor', sa.Unicode(length=6), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('session')
op.drop_column('venue_room', 'bgcolor')
### end Alembic commands ###
| {
"repo_name": "jace/failconfunnel",
"path": "alembic/versions/1fcee2e6280_added_session.py",
"copies": "1",
"size": "1987",
"license": "bsd-2-clause",
"hash": -6338683961034127000,
"line_mean": 38.74,
"line_max": 90,
"alpha_frac": 0.6758933065,
"autogenerated": false,
"ratio": 3.4496527777777777,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.46255460842777774,
"avg_score": null,
"num_lines": null
} |
"""Added simple movement in the x-y plane with arrow keys.
This is just independent experimentation and not part of the original tutorial."""
import sys,os,math
import pygame as pg
from OpenGL import GL
sys.path.append("..")
sys.path.append("data")
import myframework
from vertices_perspective import VERTICES
KEYDICT = {pg.K_UP : ( 0,-1),
pg.K_DOWN : ( 0, 1),
pg.K_RIGHT : (-1, 0),
pg.K_LEFT : ( 1, 0)}
class Shader(myframework.BaseShader):
def __init__(self,vertices,vert_file,frag_file):
myframework.BaseShader.__init__(self,vertices,vert_file,frag_file)
GL.glEnable(GL.GL_CULL_FACE)
GL.glCullFace(GL.GL_BACK)
GL.glFrontFace(GL.GL_CW)
self.offset = [-0.5,-0.5]
self.speed = 0.03
def setup_uniforms(self):
self.offset_location = GL.glGetUniformLocation(self.shader,"offset")
self.perspective_matrix_unif = GL.glGetUniformLocation(self.shader,"perspectiveMatrix")
self.frustum_scale = 1.0
self.z_near,self.z_far = 0.5,3.0
self.create_matrix()
GL.glUseProgram(self.shader)
GL.glUniformMatrix4fv(self.perspective_matrix_unif,1,GL.GL_FALSE,self.the_matrix)
GL.glUseProgram(0)
def create_matrix(self):
self.the_matrix = [0.0 for i in range(16)]
self.the_matrix[0] = self.frustum_scale
self.the_matrix[5] = self.frustum_scale
self.the_matrix[10] = (self.z_far+self.z_near)/(self.z_near-self.z_far)
self.the_matrix[14] = (2*self.z_far*self.z_near)/(self.z_near-self.z_far)
self.the_matrix[11] = -1.0
def display(self):
GL.glClearColor(0,0,0,0)
GL.glClear(GL.GL_COLOR_BUFFER_BIT)
GL.glUseProgram(self.shader)
GL.glUniform2f(self.offset_location,*self.offset)
color_data = GL.GLvoidp((len(self.vertices)*self.size_float)/2)
GL.glBindBuffer(GL.GL_ARRAY_BUFFER, self.vbo)
GL.glEnableVertexAttribArray(0)
GL.glEnableVertexAttribArray(1)
GL.glVertexAttribPointer(0,self.vert_comp,GL.GL_FLOAT,GL.GL_FALSE,0,None)
GL.glVertexAttribPointer(1,self.vert_comp,GL.GL_FLOAT,GL.GL_FALSE,0,color_data)
GL.glDrawArrays(GL.GL_TRIANGLES,0,36)
GL.glDisableVertexAttribArray(0)
GL.glDisableVertexAttribArray(1)
GL.glUseProgram(0)
def reshape(self,width,height):
self.the_matrix[0] = self.frustum_scale/(width/float(height))
self.the_matrix[5] = self.frustum_scale
GL.glUseProgram(self.shader)
GL.glUniformMatrix4fv(self.perspective_matrix_unif,1,GL.GL_FALSE,self.the_matrix)
GL.glUseProgram(0)
GL.glViewport(0,0,width,height)
def main():
pg.init()
os.environ['SDL_VIDEO_CENTERED'] = '1'
SCREEN = pg.display.set_mode((500,500),pg.HWSURFACE|pg.OPENGL|pg.DOUBLEBUF|pg.RESIZABLE)
MyClock = pg.time.Clock()
MyGL = Shader(VERTICES[:],os.path.join("data","MatrixPerspective.vert"),os.path.join("data","StandardColors.frag"))
done = False
while not done:
for event in pg.event.get():
keys = pg.key.get_pressed()
if event.type==pg.QUIT or (event.type==pg.KEYDOWN and event.key==pg.K_ESCAPE):
done = True
elif event.type == pg.KEYDOWN:
pass
elif event.type == pg.VIDEORESIZE:
MyGL.reshape(*event.size)
for key in KEYDICT:
if keys[key]:
for i in (0,1):
MyGL.offset[i] = MyGL.offset[i]+KEYDICT[key][i]*MyGL.speed
MyGL.display()
pg.display.flip()
MyClock.tick(60)
if __name__ == '__main__':
main()
pg.quit()
sys.exit()
| {
"repo_name": "Mekire/gltut-pygame",
"path": "04_objects_at_rest/04_AspectRatio_interactive.py",
"copies": "1",
"size": "3698",
"license": "mit",
"hash": 1715360839114872300,
"line_mean": 35.2549019608,
"line_max": 119,
"alpha_frac": 0.6208761493,
"autogenerated": false,
"ratio": 3.038619556285949,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.41594957055859494,
"avg_score": null,
"num_lines": null
} |
"""added slug to port
Revision ID: 3ada503c2991
Revises: 38745782554d
Create Date: 2017-06-08 11:41:45.995039
"""
from alembic import op
import sqlalchemy as sa
from alembic import op
from sqlalchemy import Integer, String
from sqlalchemy.sql import table, column, text
# revision identifiers, used by Alembic.
revision = '3ada503c2991'
down_revision = '38745782554d'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('operation_port', sa.Column('slug', sa.String(length=50), nullable=False))
# ### end Alembic commands ###
op.execute(text(
''' UPDATE operation_port, operation_port_translation
SET operation_port.slug = operation_port_translation.name
WHERE operation_port_translation.id = operation_port.id '''))
op.execute(text("""
UPDATE operation_port_interface_operation_port
SET operation_port_interface_id = 19
WHERE operation_port_id IN
(SELECT id FROM operation_port WHERE operation_id IN
(26, 35, 68, 69, 70, 71, 81) AND type = 'OUTPUT'
);"""))
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('operation_port', 'slug')
op.execute(text("""
UPDATE operation_port_interface_operation_port
SET operation_port_interface_id = 1
WHERE operation_port_id IN
(SELECT id FROM operation_port WHERE operation_id IN
(35, 68, 69, 70, 71, 81) AND type = 'OUTPUT'
);"""))
# ### end Alembic commands ###
| {
"repo_name": "eubr-bigsea/tahiti",
"path": "migrations/versions/3ada503c2991_added_slug_to_port.py",
"copies": "1",
"size": "1629",
"license": "apache-2.0",
"hash": 5382263895481748000,
"line_mean": 31.58,
"line_max": 92,
"alpha_frac": 0.6464088398,
"autogenerated": false,
"ratio": 3.80607476635514,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.49524836061551397,
"avg_score": null,
"num_lines": null
} |
"""Added Source table and many-to-many with Event and Variable.
Revision ID: 362ecdf08386
Revises: 499accc505c3
Create Date: 2013-06-07 09:28:34.281518
"""
# revision identifiers, used by Alembic.
revision = '362ecdf08386'
down_revision = '499accc505c3'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('source',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=80), nullable=False),
sa.Column('description', sa.String(length=255), nullable=False),
sa.Column('url', sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name')
)
op.create_table('sources_events',
sa.Column('source_id', sa.Integer(), nullable=True),
sa.Column('event_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['event_id'], ['event.id'], ),
sa.ForeignKeyConstraint(['source_id'], ['source.id'], ),
sa.PrimaryKeyConstraint()
)
op.create_table('sources_variables',
sa.Column('source_id', sa.Integer(), nullable=True),
sa.Column('variable_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['source_id'], ['source.id'], ),
sa.ForeignKeyConstraint(['variable_id'], ['variable.id'], ),
sa.PrimaryKeyConstraint()
)
op.alter_column(u'variable', u'in_use',
existing_type=sa.BOOLEAN(),
nullable=True,
existing_server_default=u'true')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.alter_column(u'variable', u'in_use',
existing_type=sa.BOOLEAN(),
nullable=False,
existing_server_default=u'true')
op.drop_table('sources_variables')
op.drop_table('sources_events')
op.drop_table('source')
### end Alembic commands ###
| {
"repo_name": "msscully/datamart",
"path": "alembic/versions/362ecdf08386_added_source_table_a.py",
"copies": "1",
"size": "1939",
"license": "mit",
"hash": -5699638766684028000,
"line_mean": 33.0175438596,
"line_max": 68,
"alpha_frac": 0.6441464673,
"autogenerated": false,
"ratio": 3.5907407407407406,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.967704210718774,
"avg_score": 0.011569020170600098,
"num_lines": 57
} |
# added specifically to make floating point division apply to code in bar position calculation
from __future__ import division
import libtcodpy as libtcod
import xp_loader
import gzip
from vec2d import Vec2d
from model.attribute import AttributeTag
from ui.frame import Frame
from ui.ui_event import UIEvent, UIEventType
# Displays remaining and queued actions.
class FrameLibraries(Frame):
def __init__(self, root_console_width, root_console_height, frame_manager):
self.entity_manager = frame_manager.parent_menu.entity_manager
# load xp for bg
console_bg_xp = gzip.open('assets\\ui\\ui_frame_libraries_bg.xp')
self.bg_data = xp_loader.load_xp_string(console_bg_xp.read())
Frame.__init__(self, root_console_width, root_console_height, self.bg_data['width'], self.bg_data['height'], frame_manager)
library_start_xy = xp_loader.get_position_key_xy(self.bg_data['layer_data'][1], xp_loader.poskey_color_red)
self.library_start_xy = Vec2d(library_start_xy[0], library_start_xy[1])
self.library_line_extent = xp_loader.get_position_key_xy(self.bg_data['layer_data'][1], xp_loader.poskey_color_green)
#TODO put these in config somewhere
self.line_char = chr(196)
self.line_bg = libtcod.Color(2, 22, 12)
self.line_fg = libtcod.Color(6, 130, 60)
self.libname_fg = libtcod.Color(102, 255, 178)
libtcod.console_set_default_background(self.console,self.line_bg)
libtcod.console_set_default_foreground(self.console,self.libname_fg)
libtcod.console_set_alignment(self.console, libtcod.LEFT)
xp_loader.load_layer_to_console(self.console, self.bg_data['layer_data'][0])
def handle_ui_event(self, event):
pass
def draw(self):
libtcod.console_clear(self.console)
xp_loader.load_layer_to_console(self.console, self.bg_data['layer_data'][0])
player_libraries = self.entity_manager.get_entity_by_id(self.entity_manager.player_id).get_attribute(AttributeTag.Libraries).data['value']
for lib in range(4):
#+1 here because range will go up to but not including the final screen tile needed
for x in range(self.library_line_extent[0] - self.library_start_xy[0] + 1):
libtcod.console_put_char_ex(self.console, self.library_start_xy[0] + x, self.library_start_xy[1] + lib, self.line_char, self.line_fg, self.line_bg)
libname_xy = Vec2d(self.library_start_xy[0], self.library_start_xy[1] + lib)
#TODO: move to config strings
libname = 'lib_missing'
print_color = self.line_fg
if len(player_libraries) > lib:
print_color = self.libname_fg
libname = player_libraries[lib].name
libtcod.console_set_default_foreground(self.console, print_color)
libtcod.console_print(self.console, libname_xy[0], libname_xy[1], libname)
libtcod.console_blit(self.console, 0, 0, self.width, self.height, 0, 0, 0) | {
"repo_name": "RCIX/RogueP",
"path": "ui/game/frame_libraries.py",
"copies": "1",
"size": "2774",
"license": "mit",
"hash": -8444959130486290000,
"line_mean": 39.2173913043,
"line_max": 151,
"alpha_frac": 0.7332372026,
"autogenerated": false,
"ratio": 2.895615866388309,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4128853068988309,
"avg_score": null,
"num_lines": null
} |
"""Added Spec table
Revision ID: 2614c3bbec2a
Revises: 561b1cc23d6
Create Date: 2014-11-19 10:07:53.610437
"""
# revision identifiers, used by Alembic.
import traceback
from appcomposer.models import *
revision = '2614c3bbec2a'
down_revision = '561b1cc23d6'
from alembic import op
import sqlalchemy as sa
from sqlalchemy import text
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('Specs',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('url', sa.Unicode(length=500), nullable=False),
sa.Column('pid', sa.Unicode(length=50), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.add_column(u'Apps', sa.Column('spec_id', sa.Integer(), nullable=True))
### end Alembic commands ###
# Custom migration script.
# Create a Spec object for every App, unless the Spec exists already.
connection = op.get_bind()
apps = connection.execute(text("SELECT id, spec_url, spec_id FROM Apps"))
for id, spec_url, spec_id in apps:
try:
# TODO: There are some invalid apps from when Adapted apps could be created with no URL.
# They should probably be removed.
if spec_url is None:
continue
# Create Spec if not exist.
exists = connection.execute(text("SELECT count(*) FROM Specs WHERE url=:spec_url"), spec_url=spec_url)
exists = exists.fetchone()
exists = exists[0] > 0
if not exists:
connection.execute(text("INSERT INTO Specs (url) VALUES (:spec_url)"), spec_url=spec_url)
connection.execute(text("UPDATE Apps SET spec_id = (SELECT id FROM Specs WHERE url = :spec_url), spec_url = NULL WHERE id=:app_id"), spec_url=spec_url, app_id=id)
except:
traceback.print_exc()
print "Exception on an app: %r" % id
def downgrade():
connection = op.get_bind()
apps = connection.execute(text("UPDATE Apps SET spec_url = (SELECT url FROM Specs WHERE id=Apps.spec_id)"))
### commands auto generated by Alembic - please adjust! ###
op.drop_column(u'Apps', 'spec_id')
op.drop_table('Specs')
### end Alembic commands ###
| {
"repo_name": "porduna/appcomposer",
"path": "alembic/versions/2614c3bbec2a_added_spec_table.py",
"copies": "3",
"size": "2217",
"license": "bsd-2-clause",
"hash": -1561859867548810200,
"line_mean": 30.2253521127,
"line_max": 174,
"alpha_frac": 0.6382498872,
"autogenerated": false,
"ratio": 3.5990259740259742,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00682569216700766,
"num_lines": 71
} |
"""Added spec_url to replace spec appvar
Revision ID: 561b1cc23d6
Revises: 4211b3736e90
Create Date: 2014-11-04 09:57:23.710248
"""
# revision identifiers, used by Alembic.
import json
import traceback
from appcomposer.models import App
revision = '561b1cc23d6'
down_revision = '4211b3736e90'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('Apps', sa.Column('spec_url', sa.Unicode(length=600), nullable=True))
### end Alembic commands ###
# To upgrade we need to take every AppVar with spec, remove it, and add it to the App itself.
op.execute("UPDATE Apps SET spec_url = (SELECT value FROM AppVars WHERE name = 'spec' and app_id = Apps.id)")
op.execute("DELETE FROM AppVars WHERE name = 'spec'")
# We will also try to extract the spec from the JSON data when possible.
connection = op.get_bind()
oldapp_ids = connection.execute("SELECT id, data FROM Apps WHERE spec_url IS NULL")
for oldapp_id, data in oldapp_ids:
try:
data = json.loads(data)
spec = data["url"]
op.execute(
App.__table__.update().where(App.__table__.c.id==oldapp_id).values({"spec_url": spec})
)
except:
traceback.print_exc()
print "Exception on an app: %r" % oldapp_id
def downgrade():
# To downgrade we need to add an AppVar with the spec to every App.
op.execute("INSERT INTO AppVars (name, value) SELECT 'spec', spec_url FROM Apps")
### commands auto generated by Alembic - please adjust! ###
op.drop_column('Apps', 'spec_url')
### end Alembic commands ###
| {
"repo_name": "porduna/appcomposer",
"path": "alembic/versions/561b1cc23d6_added_spec_url_to_re.py",
"copies": "3",
"size": "1699",
"license": "bsd-2-clause",
"hash": 5363048259341549000,
"line_mean": 29.8909090909,
"line_max": 113,
"alpha_frac": 0.6509711595,
"autogenerated": false,
"ratio": 3.524896265560166,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5675867425060166,
"avg_score": null,
"num_lines": null
} |
"""Added SQS table
Revision ID: 4c25c6cb647b
Revises: 465c323b7bf2
Create Date: 2017-01-22 22:24:55.993561
"""
# revision identifiers, used by Alembic.
revision = '4c25c6cb647b'
down_revision = '465c323b7bf2'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_data_broker():
### commands auto generated by Alembic - please adjust! ###
op.create_table('sqs',
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('sqs_id', sa.Integer(), nullable=False),
sa.Column('job_id', sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint('sqs_id'),
sa.UniqueConstraint('job_id', name='uniq_job_id')
)
### end Alembic commands ###
def downgrade_data_broker():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('sqs')
### end Alembic commands ###
| {
"repo_name": "fedspendingtransparency/data-act-broker-backend",
"path": "dataactcore/migrations/versions/4c25c6cb647b_added_sqs_table.py",
"copies": "1",
"size": "1084",
"license": "cc0-1.0",
"hash": -4007039563733743000,
"line_mean": 22.0638297872,
"line_max": 63,
"alpha_frac": 0.6688191882,
"autogenerated": false,
"ratio": 3.2650602409638556,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.44338794291638556,
"avg_score": null,
"num_lines": null
} |
"""added station overrides
Revision ID: 7045978e05ec
Revises: b1f1f87a08d8
Create Date: 2018-10-15 09:45:24.256760
"""
# revision identifiers, used by Alembic.
revision = '7045978e05ec'
down_revision = 'b1f1f87a08d8'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade():
op.add_column('station', sa.Column('parent', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True))
op.add_column('station',
sa.Column('fk_client', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True))
op.create_foreign_key(u'station_clients_id_fk', 'station', 'clients', ['fk_client'], ['id'], ondelete=u'CASCADE')
op.create_foreign_key(u'station_station_id_fk', 'station', 'station', ['parent'], ['id'], ondelete=u'CASCADE')
def downgrade():
op.drop_constraint(u'station_clients_id_fk', 'station', type_='foreignkey')
op.drop_constraint(u'station_station_id_fk', 'station', type_='foreignkey')
op.drop_column('station', 'fk_client')
op.drop_column('station', 'parent')
| {
"repo_name": "ebu/radiodns-plugit",
"path": "RadioDns-PlugIt/alembic/versions/7045978e05ec_added_station_overrides.py",
"copies": "1",
"size": "1077",
"license": "bsd-3-clause",
"hash": -4390575369357088000,
"line_mean": 34.9,
"line_max": 118,
"alpha_frac": 0.7019498607,
"autogenerated": false,
"ratio": 3.121739130434783,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4323688991134783,
"avg_score": null,
"num_lines": null
} |
"""Added Subject table, ExternalID table, and subject.id foreign key to Facts.
Revision ID: 33dee8e1b22c
Revises: 362ecdf08386
Create Date: 2013-06-11 10:49:27.559636
"""
# revision identifiers, used by Alembic.
revision = '33dee8e1b22c'
down_revision = '362ecdf08386'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('subject',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('internal_id', sa.String(length=100), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('internal_id')
)
op.create_table('externalID',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=80), nullable=False),
sa.Column('description', sa.String(length=255), nullable=True),
sa.Column('subject_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['subject_id'], ['subject.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('subject_id','name', name='_subject_externalid_name_uc')
)
op.add_column(u'facts', sa.Column('subject_id', sa.Integer(), nullable=False))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column(u'facts', 'subject_id')
op.drop_table('externalID')
op.drop_table('subject')
### end Alembic commands ###
| {
"repo_name": "msscully/datamart",
"path": "alembic/versions/33dee8e1b22c_added_subject_table_.py",
"copies": "1",
"size": "1425",
"license": "mit",
"hash": -6413467736652968000,
"line_mean": 32.1395348837,
"line_max": 82,
"alpha_frac": 0.6785964912,
"autogenerated": false,
"ratio": 3.4009546539379474,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4579551145137948,
"avg_score": null,
"num_lines": null
} |
"""Added summary_id to SkirmishAction
Revision ID: 28f8622f68c3
Revises: 391058ac6f40
Create Date: 2013-07-07 16:05:54.554858
"""
# revision identifiers, used by Alembic.
revision = '28f8622f68c3'
down_revision = '391058ac6f40'
from alembic import op
import sqlalchemy as sa
def upgrade(engine_name):
eval("upgrade_%s" % engine_name)()
def downgrade(engine_name):
eval("downgrade_%s" % engine_name)()
def upgrade_engine1():
### commands auto generated by Alembic - please adjust! ###
op.add_column('skirmish_actions', sa.Column('summary_id', sa.String(), nullable=True))
### end Alembic commands ###
def downgrade_engine1():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('skirmish_actions', 'summary_id')
### end Alembic commands ###
def upgrade_engine2():
### commands auto generated by Alembic - please adjust! ###
op.add_column('skirmish_actions', sa.Column('summary_id', sa.String(), nullable=True))
### end Alembic commands ###
def downgrade_engine2():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('skirmish_actions', 'summary_id')
### end Alembic commands ###
def upgrade_engine3():
### commands auto generated by Alembic - please adjust! ###
op.add_column('skirmish_actions', sa.Column('summary_id', sa.String(), nullable=True))
### end Alembic commands ###
def downgrade_engine3():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('skirmish_actions', 'summary_id')
### end Alembic commands ###
| {
"repo_name": "atiaxi/chromabot",
"path": "alembic/versions/28f8622f68c3_added_summary_id_to_.py",
"copies": "1",
"size": "1591",
"license": "mit",
"hash": -3121878894910684000,
"line_mean": 24.6612903226,
"line_max": 90,
"alpha_frac": 0.6719044626,
"autogenerated": false,
"ratio": 3.4141630901287554,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.45860675527287553,
"avg_score": null,
"num_lines": null
} |
"""Added table data for the prediction stuff
Revision ID: 21f77e60814
Revises: 465365d933e
Create Date: 2015-12-30 16:33:40.491732
"""
# revision identifiers, used by Alembic.
revision = '21f77e60814'
down_revision = '465365d933e'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('tb_prediction_run',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('winner_id', sa.Integer(), nullable=True),
sa.Column('started', sa.DateTime(), nullable=False),
sa.Column('ended', sa.DateTime(), nullable=True),
sa.Column('open', sa.Boolean(), server_default=sa.text('true'), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_table('tb_prediction_run_entry',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('prediction_run_id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('prediction', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['prediction_run_id'], ['tb_prediction_run.id'], ),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('tb_prediction_run_entry')
op.drop_table('tb_prediction_run')
### end Alembic commands ###
| {
"repo_name": "gigglearrows/anniesbot",
"path": "alembic/versions/21f77e60814_added_table_data_for_the_prediction_.py",
"copies": "1",
"size": "1419",
"license": "mit",
"hash": -7835828607382526000,
"line_mean": 31.25,
"line_max": 84,
"alpha_frac": 0.6765327696,
"autogenerated": false,
"ratio": 3.427536231884058,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4604069001484058,
"avg_score": null,
"num_lines": null
} |
"""Added table for front pages
Revision ID: 4d46b88366fc
Revises:
Create Date: 2016-06-08 19:08:11.281686
"""
# revision identifiers, used by Alembic.
revision = '4d46b88366fc'
down_revision = None
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_development():
### commands auto generated by Alembic - please adjust! ###
op.create_table('front_pages',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('page_data', mysql.MEDIUMTEXT(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
def downgrade_development():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('front_pages')
### end Alembic commands ###
def upgrade_test():
### commands auto generated by Alembic - please adjust! ###
op.create_table('front_pages',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('page_data', mysql.MEDIUMTEXT(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
def downgrade_test():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('front_pages')
### end Alembic commands ###
def upgrade_production():
### commands auto generated by Alembic - please adjust! ###
op.create_table('front_pages',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('page_data', mysql.MEDIUMTEXT(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
def downgrade_production():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('front_pages')
### end Alembic commands ###
| {
"repo_name": "c4fcm/CivilServant",
"path": "alembic/versions/4d46b88366fc_added_table_for_front_pages.py",
"copies": "1",
"size": "2067",
"license": "mit",
"hash": 1457758832660858000,
"line_mean": 25.164556962,
"line_max": 63,
"alpha_frac": 0.6613449444,
"autogenerated": false,
"ratio": 3.6844919786096257,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9731403201294293,
"avg_score": 0.02288674434306647,
"num_lines": 79
} |
"""Added tables for pure banphrases
Revision ID: 15712d19833
Revises: 162dd748b57
Create Date: 2015-12-24 02:17:41.959007
"""
# revision identifiers, used by Alembic.
revision = '15712d19833'
down_revision = '162dd748b57'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('tb_banphrase',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=256), nullable=False),
sa.Column('phrase', sa.String(length=256), nullable=False),
sa.Column('length', sa.Integer(), nullable=False),
sa.Column('permanent', sa.Boolean(), nullable=False),
sa.Column('warning', sa.Boolean(), nullable=False),
sa.Column('notify', sa.Boolean(), nullable=False),
sa.Column('case_sensitive', sa.Boolean(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_table('tb_banphrase_data',
sa.Column('banphrase_id', sa.Integer(), autoincrement=False, nullable=False),
sa.Column('num_uses', sa.Integer(), nullable=False),
sa.Column('added_by', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['added_by'], ['tb_user.id'], ),
sa.ForeignKeyConstraint(['banphrase_id'], ['tb_banphrase.id'], ),
sa.PrimaryKeyConstraint('banphrase_id')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('tb_banphrase_data')
op.drop_table('tb_banphrase')
### end Alembic commands ###
| {
"repo_name": "gigglearrows/anniesbot",
"path": "alembic/versions/15712d19833_added_tables_for_pure_banphrases.py",
"copies": "1",
"size": "1572",
"license": "mit",
"hash": 2323388562591786500,
"line_mean": 32.4468085106,
"line_max": 81,
"alpha_frac": 0.6755725191,
"autogenerated": false,
"ratio": 3.454945054945055,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9581761459290558,
"avg_score": 0.009751222950899505,
"num_lines": 47
} |
"""added tags to logs
Revision ID: 3154c750f093
Revises: 337c8696cf2a
Create Date: 2015-09-07 13:57:53.932576
"""
# revision identifiers, used by Alembic.
revision = '3154c750f093'
down_revision = '337c8696cf2a'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('logs', sa.Column('contact_nit', sa.Boolean(), nullable=True))
op.add_column('logs', sa.Column('coplan', sa.Boolean(), nullable=True))
op.add_column('logs', sa.Column('coteach', sa.Boolean(), nullable=True))
op.add_column('logs', sa.Column('general_teacher_tech_help', sa.Boolean(), nullable=True))
op.add_column('logs', sa.Column('google_maintenance', sa.Boolean(), nullable=True))
op.add_column('logs', sa.Column('google_resources', sa.Boolean(), nullable=True))
op.add_column('logs', sa.Column('hardware', sa.Boolean(), nullable=True))
op.add_column('logs', sa.Column('jeffpd_publication', sa.Boolean(), nullable=True))
op.add_column('logs', sa.Column('teacher_chromebook_help', sa.Boolean(), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('logs', 'teacher_chromebook_help')
op.drop_column('logs', 'jeffpd_publication')
op.drop_column('logs', 'hardware')
op.drop_column('logs', 'google_resources')
op.drop_column('logs', 'google_maintenance')
op.drop_column('logs', 'general_teacher_tech_help')
op.drop_column('logs', 'coteach')
op.drop_column('logs', 'coplan')
op.drop_column('logs', 'contact_nit')
### end Alembic commands ###
| {
"repo_name": "jeffthemaximum/jeffPD",
"path": "migrations/versions/3154c750f093_added_tags_to_logs.py",
"copies": "1",
"size": "1666",
"license": "mit",
"hash": -6248773273340749000,
"line_mean": 38.6666666667,
"line_max": 94,
"alpha_frac": 0.6776710684,
"autogenerated": false,
"ratio": 3.2349514563106796,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.441262252471068,
"avg_score": null,
"num_lines": null
} |
"""Added target table
Revision ID: 28374b800521
Revises: 460741ff1212
Create Date: 2016-08-24 23:55:14.231624
"""
# revision identifiers, used by Alembic.
revision = '28374b800521'
down_revision = '460741ff1212'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table(
'bazeltarget',
sa.Column('id', sa.GUID(), nullable=False),
sa.Column('step_id', sa.GUID(), nullable=False),
sa.Column('job_id', sa.GUID(), nullable=False),
sa.Column('name', sa.Text(), nullable=False),
sa.Column('status', sa.Enum(), nullable=False),
sa.Column('result', sa.Enum(), nullable=False),
sa.Column('duration', sa.Integer(), nullable=True),
sa.Column('date_created', sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(
['step_id'], ['jobstep.id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(
['job_id'], ['job.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
def downgrade():
op.drop_table('bazeltarget')
| {
"repo_name": "dropbox/changes",
"path": "migrations/versions/28374b800521_added_target_table.py",
"copies": "1",
"size": "1062",
"license": "apache-2.0",
"hash": 2559668249549127700,
"line_mean": 27.7027027027,
"line_max": 65,
"alpha_frac": 0.6242937853,
"autogenerated": false,
"ratio": 3.4705882352941178,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.45948820205941177,
"avg_score": null,
"num_lines": null
} |
"""added task cells
Revision ID: 167914646830
Revises: 724cde206c17
Create Date: 2018-06-23 07:46:30.221922
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '167914646830'
down_revision = '724cde206c17'
branch_labels = None
depends_on = None
def _get_or_create_table(*args):
ctx = op.get_context()
con = op.get_bind()
table_exists = ctx.dialect.has_table(con, args[0])
if not table_exists:
table = op.create_table(*args)
else:
table = sa.sql.table(*args)
return table
def upgrade():
"""
To upgrade we need to split the grade_cell and solution_cell database
entries into a part in base_cell and a part in either grade_cell or
solution_cell. Because the names are the same for the new and old
grade_cell and solution_cell tables we create temporary tables
'grade_cells' and 'solution_cells' and once the transfer has occured
we drop the old tables and rename the temporary tables to the
original names.
"""
new_grade_table = _get_or_create_table(
'grade_cells',
sa.Column('id', sa.VARCHAR(32), nullable=False),
sa.Column('max_score', sa.Float(), nullable=False),
sa.Column('cell_type', sa.VARCHAR(8), nullable=False),
)
new_solution_table = _get_or_create_table(
'solution_cells',
sa.Column('id', sa.VARCHAR(32), nullable=False),
)
old_grade_table = sa.Table(
'grade_cell',
sa.MetaData(),
sa.Column('id', sa.VARCHAR(32), nullable=False),
sa.Column('name', sa.VARCHAR(128), nullable=False),
sa.Column('max_score', sa.Float(), nullable=False),
sa.Column('cell_type', sa.VARCHAR(8), nullable=False),
sa.Column('notebook_id', sa.VARCHAR(32)),
)
old_solution_table = sa.Table(
'solution_cell',
sa.MetaData(),
sa.Column('id', sa.VARCHAR(32), nullable=False),
sa.Column('name', sa.VARCHAR(128), nullable=False),
sa.Column('max_score', sa.Float(), nullable=False),
sa.Column('cell_type', sa.VARCHAR(8), nullable=False),
sa.Column('notebook_id', sa.VARCHAR(32)),
)
base_cell_table = _get_or_create_table(
'base_cell',
sa.Column('id', sa.VARCHAR(32), nullable=False),
sa.Column('name', sa.VARCHAR(128), nullable=False),
sa.Column('notebook_id', sa.VARCHAR(32)),
sa.Column('type', sa.VARCHAR(50))
)
connection = op.get_bind()
results = connection.execute(sa.select([
old_grade_table.c.name,
old_grade_table.c.id,
old_grade_table.c.cell_type,
old_grade_table.c.notebook_id,
old_grade_table.c.max_score
])).fetchall()
# copy info to the base_cell table
base_grade_cells = [
{
'name': name,
'id': cellid,
'type': 'GradeCell',
'notebook_id': notebook_id,
} for name, cellid, _, notebook_id, _ in results]
op.bulk_insert(base_cell_table, base_grade_cells)
# copy the grade_cell specific info to the grade_cells temporary database
grade_cells = [
{
'id': cellid,
'cell_type': celltype,
'max_score': max_score,
} for _, cellid, celltype, _, max_score in results]
op.bulk_insert(new_grade_table, grade_cells)
# now transfer the solution cells...
results = connection.execute(sa.select([
old_solution_table.c.name,
old_solution_table.c.id,
old_solution_table.c.notebook_id,
])).fetchall()
# copy info to the base_cell table
base_solution_cells = [
{
'name': name,
'id': cellid,
'type': 'SolutionCell',
'notebook_id': notebook_id,
} for name, cellid, notebook_id in results]
op.bulk_insert(base_cell_table, base_solution_cells)
# copy the solution_cell specific info to the solution_cells
# temporary database
solution_cells = [
{
'id': cellid,
} for _, cellid, _ in results]
op.bulk_insert(new_solution_table, solution_cells)
# drop the old tables
op.drop_table(u'grade_cell')
op.drop_table(u'solution_cell')
def downgrade():
pass
| {
"repo_name": "jhamrick/nbgrader",
"path": "nbgrader/alembic/versions/167914646830_added_task_cells.py",
"copies": "2",
"size": "4277",
"license": "bsd-3-clause",
"hash": -4809585646483568000,
"line_mean": 28.7013888889,
"line_max": 77,
"alpha_frac": 0.6025251344,
"autogenerated": false,
"ratio": 3.482899022801303,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5085424157201304,
"avg_score": null,
"num_lines": null
} |
"""Added TeamInfo table
Revision ID: c71a1b0747a0
Revises: 2a147893565a
Create Date: 2016-02-27 19:38:42.508684
"""
# revision identifiers, used by Alembic.
revision = 'c71a1b0747a0'
down_revision = '2a147893565a'
from alembic import op
import sqlalchemy as sa
def upgrade(engine_name):
eval("upgrade_%s" % engine_name)()
def downgrade(engine_name):
eval("downgrade_%s" % engine_name)()
def upgrade_engine1():
### commands auto generated by Alembic - please adjust! ###
op.create_table('team_info',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=True),
sa.Column('greeting', sa.Text(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
def downgrade_engine1():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('team_info')
### end Alembic commands ###
def upgrade_engine2():
### commands auto generated by Alembic - please adjust! ###
op.create_table('team_info',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=True),
sa.Column('greeting', sa.Text(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
def downgrade_engine2():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('team_info')
### end Alembic commands ###
def upgrade_engine3():
### commands auto generated by Alembic - please adjust! ###
op.create_table('team_info',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=True),
sa.Column('greeting', sa.Text(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
def downgrade_engine3():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('team_info')
### end Alembic commands ###
| {
"repo_name": "atiaxi/chromabot",
"path": "alembic/versions/c71a1b0747a0_added_teaminfo_table.py",
"copies": "1",
"size": "1952",
"license": "mit",
"hash": -7930698694532358000,
"line_mean": 24.3506493506,
"line_max": 63,
"alpha_frac": 0.6521516393,
"autogenerated": false,
"ratio": 3.479500891265597,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4631652530565597,
"avg_score": null,
"num_lines": null
} |
"""Added Term models
Revision ID: 8e49c4425462
Revises: 3da45596c5c1
Create Date: 2016-08-28 17:19:39.131717
"""
# revision identifiers, used by Alembic.
revision = '8e49c4425462'
down_revision = '3da45596c5c1'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('terms',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('subject_id', sa.Integer(), nullable=True),
sa.Column('type', sa.Enum('lab', 'project', 'seminary', 'exercises', name='termtype'), nullable=False),
sa.Column('day', sa.Enum('monday', 'tuesday', 'wednesday', 'thursday', 'friday', 'saturday', 'sunday', name='day'), nullable=False),
sa.Column('time_from', sa.Time(), nullable=False),
sa.Column('time_to', sa.Time(), nullable=False),
sa.ForeignKeyConstraint(['subject_id'], ['subjects.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('terms_groups',
sa.Column('term_id', sa.Integer(), nullable=False),
sa.Column('group_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['group_id'], ['groups.id'], ),
sa.ForeignKeyConstraint(['term_id'], ['terms.id'], ),
sa.PrimaryKeyConstraint('term_id', 'group_id')
)
op.create_table('terms_signup',
sa.Column('term_id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('points', sa.Integer(), nullable=False),
sa.Column('reason', sa.String(), nullable=False),
sa.Column('reason_accepted', sa.Boolean(), nullable=False),
sa.Column('reason_accepted_by', sa.Integer(), nullable=True),
sa.Column('is_assigned', sa.Boolean(), nullable=False),
sa.ForeignKeyConstraint(['reason_accepted_by'], ['users.id'], ),
sa.ForeignKeyConstraint(['term_id'], ['terms.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('term_id', 'user_id')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('terms_signup')
op.drop_table('terms_groups')
op.drop_table('terms')
### end Alembic commands ###
| {
"repo_name": "disconnect3d/TermsScheduler",
"path": "backend/src/migrations/versions/8e49c4425462_added_term_models.py",
"copies": "1",
"size": "2194",
"license": "mit",
"hash": -7404360004104402000,
"line_mean": 37.4912280702,
"line_max": 136,
"alpha_frac": 0.6517775752,
"autogenerated": false,
"ratio": 3.3343465045592704,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.448612407975927,
"avg_score": null,
"num_lines": null
} |
"""Added the deck table
Revision ID: 46e47b8f160
Revises: 4f48d3dd0fb
Create Date: 2015-11-04 00:03:57.770809
"""
# revision identifiers, used by Alembic.
revision = '46e47b8f160'
down_revision = '4f48d3dd0fb'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('tb_deck',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=128), nullable=True),
sa.Column('class', sa.String(length=128), nullable=True),
sa.Column('link', sa.String(length=256), nullable=True),
sa.Column('first_used', sa.DateTime(), nullable=True),
sa.Column('last_used', sa.DateTime(), nullable=True),
sa.Column('times_used', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('tb_deck')
### end Alembic commands ###
| {
"repo_name": "gigglearrows/anniesbot",
"path": "alembic/versions/46e47b8f160_added_the_deck_table.py",
"copies": "1",
"size": "1040",
"license": "mit",
"hash": 5996744944749684000,
"line_mean": 27.1081081081,
"line_max": 63,
"alpha_frac": 0.6740384615,
"autogenerated": false,
"ratio": 3.260188087774295,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9390122708105044,
"avg_score": 0.008820768233850227,
"num_lines": 37
} |
"""Added the status table.
Revision ID: e421977e17
Revises: 208ea31c61b
Create Date: 2015-10-03 16:08:03.843081
"""
# revision identifiers, used by Alembic.
revision = 'e421977e17'
down_revision = '208ea31c61b'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('status',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('character_id', sa.Integer(), nullable=True),
sa.Column('name', sa.String(length=20), nullable=True),
sa.Column('sect', sa.String(length=20), nullable=True),
sa.Column('type', sa.String(length=20), nullable=True),
sa.Column('location_earned', sa.String(length=254), nullable=True),
sa.Column('story', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['character_id'], ['character.id'], ),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('status')
### end Alembic commands ###
| {
"repo_name": "sayeghr/harpy-network",
"path": "alembic/versions/e421977e17_added_the_status_table.py",
"copies": "1",
"size": "1227",
"license": "mit",
"hash": -794771918949396500,
"line_mean": 29.675,
"line_max": 71,
"alpha_frac": 0.6731866341,
"autogenerated": false,
"ratio": 3.3801652892561984,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.950642694463619,
"avg_score": 0.009384995744001484,
"num_lines": 40
} |
"""added thing_id to experiment_things
Revision ID: 370964c4a364
Revises: a0cca001e8a7
Create Date: 2018-04-22 11:13:22.370143
"""
# revision identifiers, used by Alembic.
revision = '370964c4a364'
down_revision = 'a0cca001e8a7'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_development():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('experiment_things', sa.Column('thing_id', sa.String(length=256), nullable=True))
op.create_index(op.f('ix_experiment_things_thing_id'), 'experiment_things', ['thing_id'], unique=False)
# ### end Alembic commands ###
def downgrade_development():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_experiment_things_thing_id'), table_name='experiment_things')
op.drop_column('experiment_things', 'thing_id')
# ### end Alembic commands ###
def upgrade_test():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('experiment_things', sa.Column('thing_id', sa.String(length=256), nullable=True))
op.create_index(op.f('ix_experiment_things_thing_id'), 'experiment_things', ['thing_id'], unique=False)
# ### end Alembic commands ###
def downgrade_test():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_experiment_things_thing_id'), table_name='experiment_things')
op.drop_column('experiment_things', 'thing_id')
# ### end Alembic commands ###
def upgrade_production():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('experiment_things', sa.Column('thing_id', sa.String(length=256), nullable=True))
op.create_index(op.f('ix_experiment_things_thing_id'), 'experiment_things', ['thing_id'], unique=False)
# ### end Alembic commands ###
def downgrade_production():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_experiment_things_thing_id'), table_name='experiment_things')
op.drop_column('experiment_things', 'thing_id')
# ### end Alembic commands ###
| {
"repo_name": "c4fcm/CivilServant",
"path": "alembic/versions/370964c4a364_added_thing_id_to_experiment_things.py",
"copies": "1",
"size": "2288",
"license": "mit",
"hash": -3613151045142145500,
"line_mean": 31.6857142857,
"line_max": 107,
"alpha_frac": 0.6774475524,
"autogenerated": false,
"ratio": 3.4251497005988023,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.46025972529988024,
"avg_score": null,
"num_lines": null
} |
"""Added threshold support.
Revision ID: 4fa888fd7eda
Revises: 3dd7e13527f3
Create Date: 2015-05-05 14:39:24.562388
"""
# revision identifiers, used by Alembic.
revision = '4fa888fd7eda'
down_revision = '3dd7e13527f3'
from alembic import op
import sqlalchemy as sa
def upgrade():
# NOTE(sheeprine): Hack to let the migrations pass for postgresql
dialect = op.get_context().dialect.name
if dialect == 'postgresql':
constraints = ['uniq_field_threshold', 'uniq_service_threshold']
else:
constraints = ['uniq_field_mapping', 'uniq_service_mapping']
op.create_table('hashmap_thresholds',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('threshold_id', sa.String(length=36), nullable=False),
sa.Column('level', sa.Numeric(precision=20, scale=8), nullable=True),
sa.Column('cost', sa.Numeric(precision=20, scale=8), nullable=False),
sa.Column('map_type', sa.Enum('flat', 'rate', name='enum_map_type'),
nullable=False),
sa.Column('service_id', sa.Integer(), nullable=True),
sa.Column('field_id', sa.Integer(), nullable=True),
sa.Column('group_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['field_id'], ['hashmap_fields.id'],
ondelete='CASCADE'),
sa.ForeignKeyConstraint(['group_id'], ['hashmap_groups.id'],
ondelete='SET NULL'),
sa.ForeignKeyConstraint(['service_id'], ['hashmap_services.id'],
ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('threshold_id'),
sa.UniqueConstraint('level', 'field_id', name=constraints[0]),
sa.UniqueConstraint('level', 'service_id', name=constraints[1]),
mysql_charset='utf8',
mysql_engine='InnoDB')
def downgrade():
op.drop_table('hashmap_thresholds')
| {
"repo_name": "FNST-OpenStack/cloudkitty",
"path": "cloudkitty/rating/hash/db/sqlalchemy/alembic/versions/4fa888fd7eda_added_threshold_support.py",
"copies": "2",
"size": "1839",
"license": "apache-2.0",
"hash": -3151534792239106000,
"line_mean": 36.5306122449,
"line_max": 73,
"alpha_frac": 0.650353453,
"autogenerated": false,
"ratio": 3.563953488372093,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5214306941372093,
"avg_score": null,
"num_lines": null
} |
"""added timestamp completed to logs
Revision ID: 1abfb1cdc0ea
Revises: 38c25da6e018
Create Date: 2015-10-10 08:56:25.842989
"""
# revision identifiers, used by Alembic.
revision = '1abfb1cdc0ea'
down_revision = '38c25da6e018'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('logs', sa.Column('timestamp_completed', sa.DateTime(), nullable=True))
op.add_column('logs', sa.Column('timestamp_created', sa.DateTime(), nullable=True))
op.create_index('ix_logs_timestamp_created', 'logs', ['timestamp_created'], unique=False)
op.drop_index('ix_logs_timestamp', 'logs')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_index('ix_logs_timestamp', 'logs', ['timestamp'], unique=False)
op.drop_index('ix_logs_timestamp_created', 'logs')
op.drop_column('logs', 'timestamp_created')
op.drop_column('logs', 'timestamp_completed')
### end Alembic commands ###
| {
"repo_name": "jeffthemaximum/jeffPD",
"path": "migrations/versions/1abfb1cdc0ea_added_timestamp_completed_to_logs.py",
"copies": "1",
"size": "1100",
"license": "mit",
"hash": 8079580909490246000,
"line_mean": 33.375,
"line_max": 93,
"alpha_frac": 0.7,
"autogenerated": false,
"ratio": 3.426791277258567,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4626791277258567,
"avg_score": null,
"num_lines": null
} |
"""Added timezones
Revision ID: 40f60062ddd5
Revises: ceddbf7c791c
Create Date: 2017-10-12 20:48:54.112515
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '40f60062ddd5'
down_revision = 'ceddbf7c791c'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column(
table_name='users',
column_name='joined_date',
type_=sa.DateTime(timezone=True)
)
op.alter_column(
table_name='posts',
column_name='posted_date',
type_=sa.DateTime(timezone=True)
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column(
table_name='users',
column_name='joined_date',
type_=sa.DateTime(timezone=False)
)
op.alter_column(
table_name='posts',
column_name='posted_date',
type_=sa.DateTime(timezone=False)
)
# ### end Alembic commands ###
| {
"repo_name": "alethiophile/openakun",
"path": "alembic/versions/40f60062ddd5_added_timezones.py",
"copies": "1",
"size": "1063",
"license": "mit",
"hash": 2407421502505692000,
"line_mean": 22.1086956522,
"line_max": 65,
"alpha_frac": 0.6255879586,
"autogenerated": false,
"ratio": 3.4852459016393444,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9610833860239345,
"avg_score": 0,
"num_lines": 46
} |
"""Added Topology Discovery Table
Revision ID: 388cbc17f111
Revises: 32076c59a4c1
Create Date: 2017-01-04 00:19:06.384325
"""
# revision identifiers, used by Alembic.
revision = '388cbc17f111'
down_revision = '32076c59a4c1'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table(
'topology_discovery',
sa.Column('host', sa.String(255), primary_key=True, nullable=False),
sa.Column('protocol_interface', sa.String(48), primary_key=True,
nullable=False),
sa.Column('phy_interface', sa.String(48), nullable=True),
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('heartbeat', sa.DateTime(), nullable=True),
sa.Column('remote_mgmt_addr', sa.String(32), nullable=True),
sa.Column('remote_system_name', sa.String(128), nullable=True),
sa.Column('remote_system_desc', sa.String(256), nullable=True),
sa.Column('remote_port_id_mac', sa.String(17), nullable=True),
sa.Column('remote_chassis_id_mac', sa.String(17), nullable=True),
sa.Column('remote_port', sa.String(48), nullable=True),
sa.Column('remote_evb_cfgd', sa.Boolean(), nullable=True),
sa.Column('remote_evb_mode', sa.String(16), nullable=True),
sa.Column('configurations', sa.String(512), nullable=True)
)
def downgrade():
op.drop_table('topology_discovery')
| {
"repo_name": "CiscoSystems/fabric_enabler",
"path": "dfa/db/migration/versions/388cbc17f111_added_topology_discovery_table.py",
"copies": "1",
"size": "1448",
"license": "apache-2.0",
"hash": -538450905722037700,
"line_mean": 34.3170731707,
"line_max": 76,
"alpha_frac": 0.6581491713,
"autogenerated": false,
"ratio": 3.3364055299539173,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4494554701253917,
"avg_score": null,
"num_lines": null
} |
"""Added total_score to ExamsTaken
Revision ID: 49783ae83566
Revises: 6723848870f0
Create Date: 2017-11-24 15:02:39.633000
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = '49783ae83566'
down_revision = '6723848870f0'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column('courses', 'randomize_questions',
existing_type=mysql.TINYINT(display_width=1),
type_=sa.Boolean(),
existing_nullable=False)
op.add_column('exams_taken', sa.Column('total_score', sa.Integer(), nullable=False))
op.alter_column('users', 'is_active_premium',
existing_type=mysql.TINYINT(display_width=1),
type_=sa.Boolean(),
existing_nullable=True)
op.alter_column('users', 'is_confirmed',
existing_type=mysql.TINYINT(display_width=1),
type_=sa.Boolean(),
existing_nullable=True)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column('users', 'is_confirmed',
existing_type=sa.Boolean(),
type_=mysql.TINYINT(display_width=1),
existing_nullable=True)
op.alter_column('users', 'is_active_premium',
existing_type=sa.Boolean(),
type_=mysql.TINYINT(display_width=1),
existing_nullable=True)
op.drop_column('exams_taken', 'total_score')
op.alter_column('courses', 'randomize_questions',
existing_type=sa.Boolean(),
type_=mysql.TINYINT(display_width=1),
existing_nullable=False)
# ### end Alembic commands ###
| {
"repo_name": "iamOgunyinka/sproot",
"path": "migrations/versions/49783ae83566_added_total_score_to_examstaken.py",
"copies": "1",
"size": "1838",
"license": "apache-2.0",
"hash": 286535817712303680,
"line_mean": 34.3461538462,
"line_max": 88,
"alpha_frac": 0.6158868335,
"autogenerated": false,
"ratio": 3.7663934426229506,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9841408556252875,
"avg_score": 0.008174343974015218,
"num_lines": 52
} |
"""Added transactions table
Revision ID: 5632aa202d89
Revises: 3a47813ce501
Create Date: 2015-03-18 14:54:09.061787
"""
# revision identifiers, used by Alembic.
revision = '5632aa202d89'
down_revision = '4d3ed7925db3'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table('quark_transactions',
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint('id'),
mysql_engine='InnoDB')
op.add_column(u'quark_ip_addresses',
sa.Column('transaction_id', sa.Integer(), nullable=True))
op.create_foreign_key('fk_quark_ips_transaction_id',
'quark_ip_addresses',
'quark_transactions',
['transaction_id'],
['id'])
def downgrade():
op.drop_constraint('fk_quark_ips_transaction_id', 'quark_ip_addresses',
type_='foreignkey')
op.drop_column(u'quark_ip_addresses', 'transaction_id')
op.drop_table('quark_transactions')
| {
"repo_name": "asadoughi/quark",
"path": "quark/db/migration/alembic/versions/5632aa202d89_added_transactions_table.py",
"copies": "7",
"size": "1152",
"license": "apache-2.0",
"hash": -2396718754030776300,
"line_mean": 31,
"line_max": 75,
"alpha_frac": 0.5815972222,
"autogenerated": false,
"ratio": 3.7041800643086815,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.778577728650868,
"avg_score": null,
"num_lines": null
} |
"""Added travel_multiplier to Region
Revision ID: 15cb2e66f74
Revises: 4effbf7f3fe7
Create Date: 2015-03-14 10:19:58.780850
"""
# revision identifiers, used by Alembic.
revision = '15cb2e66f74'
down_revision = '4effbf7f3fe7'
from alembic import op
import sqlalchemy as sa
def upgrade(engine_name):
eval("upgrade_%s" % engine_name)()
def downgrade(engine_name):
eval("downgrade_%s" % engine_name)()
def upgrade_engine1():
### commands auto generated by Alembic - please adjust! ###
op.add_column('regions', sa.Column('travel_multiplier', sa.Float(), server_default='1.0', nullable=False))
### end Alembic commands ###
def downgrade_engine1():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('regions', 'travel_multiplier')
### end Alembic commands ###
def upgrade_engine2():
### commands auto generated by Alembic - please adjust! ###
op.add_column('regions', sa.Column('travel_multiplier', sa.Float(), server_default='1.0', nullable=False))
### end Alembic commands ###
def downgrade_engine2():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('regions', 'travel_multiplier')
### end Alembic commands ###
def upgrade_engine3():
### commands auto generated by Alembic - please adjust! ###
op.add_column('regions', sa.Column('travel_multiplier', sa.Float(), server_default='1.0', nullable=False))
### end Alembic commands ###
def downgrade_engine3():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('regions', 'travel_multiplier')
### end Alembic commands ###
| {
"repo_name": "atiaxi/chromabot",
"path": "alembic/versions/15cb2e66f74_added_travel_multiplier_to_region.py",
"copies": "1",
"size": "1642",
"license": "mit",
"hash": -3003194992372215000,
"line_mean": 25.4838709677,
"line_max": 110,
"alpha_frac": 0.6729598051,
"autogenerated": false,
"ratio": 3.5464362850971924,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.47193960901971926,
"avg_score": null,
"num_lines": null
} |
#Added two arguements
def add(a,b):
return a + b
c = add(3, 4)
print c
#Subtraction two arguements
def sub(j,k):
return j - k
l = sub(5, 3)
print l
#Multiplied two arguements
def mul(r,t):
return r * t
e = mul(4,4)
print e
#Divided two arguements
def div(q,w):
return float(q / w)
y = div(2,3)
print y
#Defined hours from seconds
def hours_from_seconds_div(a,b):
return a/b
s = div(86400, 3600)
print s
#Representation of a radius of a circle
def circle_area(r):
return 3.14159265359 * (r**2)
print circle_area(5)
#Representation of a volume of the sphere
def sphere_volume(v):
return 3.14159265359 * 1.333333333333 * (v**3)
print sphere_volume(5)
#Representation of the average of the volumes
def avg_volume(a,b):
return ((1.0/6 * 3.14159265359 * a**3) + (1.0/6 * 3.14159265359 * b**3)) /2
print avg_volume (10,20)
#Representation of the 3 side lengths of a triangle
def area(a,b,c):
n= (a+b+c)/2
return (n*(n-a)*(n-b)*(n-c))**0.5
print area(1, 2, 2.5)
#Making a string an agrument and returnng it as a work with additional space
def right_align(word):
return (80-len(word))*(" ") + word
print right_align( "Hello" )
#String as an agrument that is centered
def center(word):
return (40-len(word))*(" ") + word
print center("Hello")
#Message box
#string as an argument and returns a message box
def msg_box(word):
return "+" + ((len(word)+4)*"-") + "+" + "\n" + "|" + (2*" ") + (word)+ (2*" ") + "|" + "\n" + "+" + ((len(word)+4)*"-") + "+"
print msg_box("Hello")
print msg_box("I eat cats!")
#calling functions
add1= add(5,6)
add2= add(6,3)
sub1= sub(9,3)
sub2= sub(5,4)
mul1= mul(2,3)
mul2= mul(2,4)
div1= div(5,3)
div2= div(7,4)
hoursfromsec1= hours_from_seconds_div(97000,4800)
hoursfromsec2= hours_from_seconds_div(87000,4800)
circlearea1= circle_area(4)
circlearea2= circle_area(9)
spherevolume1= sphere_volume(8)
spherevolume2= sphere_volume(3)
averagevolume1= avg_volume(6,4)
averagevolume2= avg_volume(4,4)
area1= area(1,2,3)
area2= area(4,5,6)
rightalign1= right_align("LOL")
rightalign2= right_align("YEAA")
center1= center("hahaha")
center2= center("What")
msgbox1= msg_box("Poop")
msgbox2= msg_box("yo")
#printing the functions
print msg_box (str(add1))
print msg_box (str(add2))
print msg_box (str(sub1))
print msg_box (str(sub2))
print msg_box (str(mul1))
print msg_box (str(mul2))
print msg_box (str(div1))
print msg_box (str(div2))
print msg_box (str(hoursfromsec1))
print msg_box (str(hoursfromsec2))
print msg_box (str(circlearea1))
print msg_box (str(circlearea2))
print msg_box (str(spherevolume1))
print msg_box (str(spherevolume2))
print msg_box (str(averagevolume1))
print msg_box (str(averagevolume2))
print msg_box (str(area1))
print msg_box (str(area2))
print msg_box (str(rightalign1))
print msg_box (str(rightalign2))
print msg_box (str(center1))
print msg_box (str(center2))
print msg_box (str(msgbox1))
print msg_box (str(msgbox2))
#def is a keyword that indicates that this is a function definition
#print would print out the outcome
| {
"repo_name": "joook1710-cmis/joook1710-cmis-cs2",
"path": "function.py",
"copies": "1",
"size": "3016",
"license": "cc0-1.0",
"hash": 5947570063472755000,
"line_mean": 22.9365079365,
"line_max": 130,
"alpha_frac": 0.6863395225,
"autogenerated": false,
"ratio": 2.4863973619126134,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.8403499885125741,
"avg_score": 0.053847399857374384,
"num_lines": 126
} |
#Added two arguments
def add(a,b):
return a + b
print add(7,7)
#Subtracted two arguments
def sub(a,b):
return a - b
print sub(8,4)
#Multiplied two arguments
def mul(a,b):
return a*b
print mul(9,2)
#Divided two arguments
def div(a,b):
return float(a)/b
print div(4,2)
#Conversion of hours to seconds
def hours_from_seconds_div(a,b):
return a/b
print div(86400,3600)
#Representation of a radius of a circle with one argument
def circle_area(r):
return 3.14159265359 * (r**2)
print circle_area(5)
#Representation of a volume of the spere
def sphere_volume(v):
return 3.14159265359 * 1.333333333333 * (v**3)
print sphere_volume(5)
#Representation of the average volumes
def avg_volume(a,b):
return ((1.0/6 * 3.14159265359 * a**3) + (1.0/6 * 3.14159265359 * b**3)) /2
print avg_volume (10,20)
#Area of the three side lengths of a triangle
def area(a,b,c):
n=(a+b+c)/2
return (n*(n-a)*(n-b)*(n-c))**0.5
print area(1,2,2.5)
#String as an argument that returns the word with additional spaces
def right_align(word):
return (80-len(word))* " " + word
print right_align( "Hello" )
#String as an argument that returns the spaces centered on the screen
def center(word):
return (40-len(word))*" "+word
print center("Hello")
#Defineing a function that takes a string as an argument and returning a Message Box
def msg_box(x):
return "+" + ((len(x)+4)*"-") + "+" + "\n" + "|" + (2*" ") + (x)+ (2*" ") + "|" + "\n" + "+" + ((len(x)+4)*"-") + "+"
print msg_box("Hello")
#Returning a message box with the given phrase
print msg_box("I eat cats!")
#Calling functions1
addition1= add(8,7)
subtraction1= sub(6,4)
multiplication1= mul(4,2)
division1= div(8,3)
hours_to_seconds1= div(96400,3600)
area_of_circle1= circle_area(4)
volume_of_sphere1= sphere_volume(7)
average_volume1= avg_volume (20,40)
area_of_three_sides1= area(2,3,3.5)
right_align1= right_align( "Hey" )
center1= center("HaHaHa")
message_box_one1= msg_box("Hi")
message_box_two1= msg_box("I eat burgers!")
#Calling functions 2
addition2= add(3,5)
subtraction2= sub(6,1)
multiplication2= mul(3,2)
division2= div(10,5)
hrs_to_sec2= (76400,3600)
circle_area2= circle_area(3)
volume_of_sphere2= sphere_volume(9)
average_volume2= avg_volume (60,40)
area_of_three_sides2= area(4,6,6.5)
rightalign2= right_align("Bye")
center2= center("Goaway")
messagebox_one2= msg_box("Eww")
messagebox_two2= msg_box("I hate you")
#printing functions1
print msg_box (str(addition1))
print msg_box (str(subtraction1))
print msg_box (str(multiplication1))
print msg_box (str(division1))
print msg_box (str(hours_to_seconds1))
print msg_box (str(area_of_circle1))
print msg_box (str(volume_of_sphere1))
print msg_box (str(average_volume1))
print msg_box (str(area_of_three_sides1))
print msg_box (str(right_align1))
print msg_box (str(center1))
print msg_box (str(message_box_one1))
print msg_box (str(message_box_two1))
#printing functions2
print msg_box (str(addition2))
print msg_box (str(subtraction2))
print msg_box (str(multiplication2))
print msg_box (str(division2))
print msg_box (str(hrs_to_sec2))
print msg_box (str(circle_area2))
print msg_box (str(volume_of_sphere2))
print msg_box (str(average_volume2))
print msg_box (str(area_of_three_sides2))
print msg_box (str(rightalign2))
print msg_box (str(center2))
print msg_box (str(messagebox_one2))
print msg_box (str(messagebox_two2))
| {
"repo_name": "grace1912-cmis/grace1912-cmis-cs2",
"path": "functions.py",
"copies": "1",
"size": "3368",
"license": "cc0-1.0",
"hash": -1872668364780696000,
"line_mean": 28.0344827586,
"line_max": 121,
"alpha_frac": 0.7019002375,
"autogenerated": false,
"ratio": 2.6149068322981366,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.38168070697981366,
"avg_score": null,
"num_lines": null
} |
"""add education and disability for workbench research
Revision ID: f4316bf05797
Revises: 038364a84126
Create Date: 2020-01-09 14:48:57.556654
"""
from alembic import op
import sqlalchemy as sa
import rdr_service.model.utils
from rdr_service.participant_enums import WorkbenchResearcherEducation, WorkbenchResearcherDisability
# revision identifiers, used by Alembic.
revision = 'f4316bf05797'
down_revision = '038364a84126'
branch_labels = None
depends_on = None
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('workbench_researcher', sa.Column('disability',
rdr_service.model.utils.Enum(WorkbenchResearcherDisability),
nullable=True))
op.add_column('workbench_researcher', sa.Column('education',
rdr_service.model.utils.Enum(WorkbenchResearcherEducation),
nullable=True))
op.add_column('workbench_researcher_history', sa.Column('disability',
rdr_service.model.utils.Enum(WorkbenchResearcherDisability),
nullable=True))
op.add_column('workbench_researcher_history', sa.Column('education',
rdr_service.model.utils.Enum(WorkbenchResearcherEducation),
nullable=True))
op.execute("ALTER TABLE workbench_researcher CHANGE COLUMN `created` `created` DATETIME(6);")
op.execute("ALTER TABLE workbench_researcher CHANGE COLUMN `modified` `modified` DATETIME(6);")
op.execute("ALTER TABLE workbench_researcher_history CHANGE COLUMN `created` `created` DATETIME(6);")
op.execute("ALTER TABLE workbench_researcher_history CHANGE COLUMN `modified` `modified` DATETIME(6);")
op.execute("ALTER TABLE workbench_researcher CHANGE COLUMN `creation_time` `creation_time` DATETIME(6);")
op.execute("ALTER TABLE workbench_researcher CHANGE COLUMN `modified_time` `modified_time` DATETIME(6);")
op.execute("ALTER TABLE workbench_researcher_history CHANGE COLUMN `creation_time` `creation_time` DATETIME(6);")
op.execute("ALTER TABLE workbench_researcher_history CHANGE COLUMN `modified_time` `modified_time` DATETIME(6);")
op.execute("ALTER TABLE workbench_institutional_affiliations CHANGE COLUMN `created` `created` DATETIME(6);")
op.execute("ALTER TABLE workbench_institutional_affiliations CHANGE COLUMN `modified` `modified` DATETIME(6);")
op.execute("ALTER TABLE workbench_institutional_affiliations_history CHANGE COLUMN `created` `created` "
"DATETIME(6);")
op.execute("ALTER TABLE workbench_institutional_affiliations_history CHANGE COLUMN `modified` `modified` "
"DATETIME(6);")
op.execute("ALTER TABLE workbench_workspace CHANGE COLUMN `created` `created` DATETIME(6);")
op.execute("ALTER TABLE workbench_workspace CHANGE COLUMN `modified` `modified` DATETIME(6);")
op.execute("ALTER TABLE workbench_workspace_history CHANGE COLUMN `created` `created` DATETIME(6);")
op.execute("ALTER TABLE workbench_workspace_history CHANGE COLUMN `modified` `modified` DATETIME(6);")
op.execute("ALTER TABLE workbench_workspace CHANGE COLUMN `creation_time` `creation_time` DATETIME(6);")
op.execute("ALTER TABLE workbench_workspace CHANGE COLUMN `modified_time` `modified_time` DATETIME(6);")
op.execute("ALTER TABLE workbench_workspace_history CHANGE COLUMN `creation_time` `creation_time` DATETIME(6);")
op.execute("ALTER TABLE workbench_workspace_history CHANGE COLUMN `modified_time` `modified_time` DATETIME(6);")
op.execute("ALTER TABLE workbench_workspace_user CHANGE COLUMN `created` `created` DATETIME(6);")
op.execute("ALTER TABLE workbench_workspace_user CHANGE COLUMN `modified` `modified` DATETIME(6);")
op.execute("ALTER TABLE workbench_workspace_user_history CHANGE COLUMN `created` `created` DATETIME(6);")
op.execute("ALTER TABLE workbench_workspace_user_history CHANGE COLUMN `modified` `modified` DATETIME(6);")
# ### end Alembic commands ###
def downgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('workbench_researcher_history', 'education')
op.drop_column('workbench_researcher_history', 'disability')
op.drop_column('workbench_researcher', 'education')
op.drop_column('workbench_researcher', 'disability')
# ### end Alembic commands ###
def upgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
| {
"repo_name": "all-of-us/raw-data-repository",
"path": "rdr_service/alembic/versions/f4316bf05797_add_education_and_disability_for_.py",
"copies": "1",
"size": "5082",
"license": "bsd-3-clause",
"hash": 2932792771540345000,
"line_mean": 51.9375,
"line_max": 120,
"alpha_frac": 0.6727666273,
"autogenerated": false,
"ratio": 3.9303944315545243,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.013735176665876288,
"num_lines": 96
} |
"""added uploaded_images table
Revision ID: 20b9c5b88f9
Revises: 46f4932e624
Create Date: 2015-08-06 15:35:09.129482
"""
# revision identifiers, used by Alembic.
revision = '20b9c5b88f9'
down_revision = '38331aa4875'
from alembic import op
import sqlalchemy as sa
from project.lib.orm.types import TypeEnum
from project.models import UploadedImage
def upgrade():
# commands auto generated by Alembic - please adjust!
op.create_table(
'uploaded_images',
sa.Column('uid', sa.VARCHAR(length=40), nullable=False),
sa.Column(
'img_category',
TypeEnum(UploadedImage.IMG_CATEGORY),
nullable=False
),
sa.Column('title', sa.VARCHAR(length=32), nullable=True),
sa.Column('description', sa.VARCHAR(length=128), nullable=True),
sa.PrimaryKeyConstraint('uid', 'img_category')
)
# end Alembic commands ###
def downgrade():
# commands auto generated by Alembic - please adjust!
op.drop_table('uploaded_images')
# end Alembic commands
| {
"repo_name": "saklar13/Meowth",
"path": "migrations/versions/2015_08_06_20b9_added_uploaded_images_table.py",
"copies": "2",
"size": "1047",
"license": "bsd-3-clause",
"hash": 6103767115889642000,
"line_mean": 25.8461538462,
"line_max": 72,
"alpha_frac": 0.6723973257,
"autogenerated": false,
"ratio": 3.573378839590444,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0034048970613374174,
"num_lines": 39
} |
"""Added User, FacebookAccount
Revision ID: 58533af39fd7
Revises: None
Create Date: 2013-02-04 19:44:58.018873
"""
# revision identifiers, used by Alembic.
revision = '58533af39fd7'
down_revision = None
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table('users',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created', sa.DateTime(True), nullable=False),
sa.Column('updated', sa.DateTime(True), nullable=False),
sa.Column('nickname', sa.String(length=256), nullable=True),
sa.Column('email', sa.String(length=256), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('email'),
)
op.create_table('facebook_accounts',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created', sa.DateTime(True), nullable=False),
sa.Column('updated', sa.DateTime(True), nullable=False),
sa.Column('uid', sa.String(length=32), nullable=False),
sa.Column('name', sa.String(length=256), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False, index=True),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('uid'),
sa.UniqueConstraint('user_id')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('facebook_accounts')
op.drop_table('users')
### end Alembic commands ###
| {
"repo_name": "funnyplan/Pager",
"path": "migration/versions/58533af39fd7_added_user_facebooka.py",
"copies": "1",
"size": "1460",
"license": "cc0-1.0",
"hash": 1757702474925226000,
"line_mean": 30.7391304348,
"line_max": 67,
"alpha_frac": 0.6719178082,
"autogenerated": false,
"ratio": 3.4761904761904763,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4648108284390476,
"avg_score": null,
"num_lines": null
} |
"""Added user following relation to user
Revision ID: 1e3be3ff186d
Revises: 29918969a90
Create Date: 2016-10-24 13:02:49.915605
"""
# revision identifiers, used by Alembic.
revision = '1e3be3ff186d'
down_revision = '29918969a90'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('user_following',
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('committee_id', sa.Integer(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text(u'now()'), nullable=False),
sa.ForeignKeyConstraint(['committee_id'], ['committee.id'], name=op.f('fk_user_following_committee_id_committee'), ondelete='CASCADE'),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_user_following_user_id_user'), ondelete='CASCADE')
)
op.create_index(op.f('ix_user_following_created_at'), 'user_following', ['created_at'], unique=False)
op.execute('insert into user_following select * from user_committee_alerts')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_user_following_created_at'), table_name='user_following')
op.drop_table('user_following')
### end Alembic commands ###
| {
"repo_name": "Code4SA/pmg-cms-2",
"path": "migrations/versions/1e3be3ff186d_added_user_following_relation_to_user.py",
"copies": "1",
"size": "1343",
"license": "apache-2.0",
"hash": -2373656006607441000,
"line_mean": 37.3714285714,
"line_max": 139,
"alpha_frac": 0.69545793,
"autogenerated": false,
"ratio": 3.3242574257425743,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9477547092041223,
"avg_score": 0.008433652740270388,
"num_lines": 35
} |
"""Added users table
Revision ID: 4503a2e36a01
Revises:
Create Date: 2015-06-01 15:30:52.565284
"""
# revision identifiers, used by Alembic.
revision = '4503a2e36a01'
down_revision = None
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('users',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('email', sa.String(length=255), nullable=False),
sa.Column('encrypted_password', sa.String(length=255), nullable=False),
sa.Column('reset_password_token', sa.String(length=255), nullable=True),
sa.Column('reset_password_sent_at', sa.DateTime(), nullable=True),
sa.Column('remember_created_at', sa.DateTime(), nullable=True),
sa.Column('sign_in_count', sa.Integer(), nullable=False),
sa.Column('current_sign_in_at', sa.DateTime(), nullable=True),
sa.Column('last_sign_in_at', sa.DateTime(), nullable=True),
sa.Column('current_sign_in_ip', sa.String(length=255), nullable=True),
sa.Column('last_sign_in_ip', sa.String(length=255), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('locale', sa.SmallInteger(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('email'),
sa.UniqueConstraint('reset_password_token')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('users')
### end Alembic commands ###
| {
"repo_name": "vigov5/oshougatsu2015",
"path": "alembic/versions/4503a2e36a01_added_users_table.py",
"copies": "1",
"size": "1611",
"license": "mit",
"hash": 64856040441691700,
"line_mean": 34.0217391304,
"line_max": 76,
"alpha_frac": 0.6809435133,
"autogenerated": false,
"ratio": 3.3773584905660377,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9496238660809082,
"avg_score": 0.01241266861139111,
"num_lines": 46
} |
"""added users table
Revision ID: 46a8aad56
Revises: 35b2b9f7b64e
Create Date: 2015-03-01 07:04:44.409709
"""
# revision identifiers, used by Alembic.
revision = '46a8aad56'
down_revision = '35b2b9f7b64e'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('users',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('fb_id', sa.String(length=120), nullable=False),
sa.Column('email', sa.String(length=64), nullable=False),
sa.Column('username', sa.String(length=64), nullable=False),
sa.Column('name', sa.String(length=64), nullable=False),
sa.Column('avatar_url', sa.String(), nullable=True),
sa.Column('gender', sa.String(length=30), nullable=True),
sa.Column('country', sa.String(length=100), nullable=True),
sa.Column('state', sa.String(length=10), nullable=True),
sa.Column('city', sa.String(length=60), nullable=True),
sa.Column('member_since', sa.DateTime(), nullable=True),
sa.Column('last_seen', sa.DateTime(), nullable=True),
sa.Column('is_admin', sa.Boolean(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('avatar_url')
)
op.create_index(op.f('ix_users_email'), 'users', ['email'], unique=True)
op.create_index(op.f('ix_users_fb_id'), 'users', ['fb_id'], unique=True)
op.create_index(op.f('ix_users_username'), 'users', ['username'], unique=True)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_users_username'), table_name='users')
op.drop_index(op.f('ix_users_fb_id'), table_name='users')
op.drop_index(op.f('ix_users_email'), table_name='users')
op.drop_table('users')
### end Alembic commands ###
| {
"repo_name": "rosariomgomez/tradyfit",
"path": "vagrant/tradyfit/migrations/versions/46a8aad56_added_users_table.py",
"copies": "1",
"size": "1828",
"license": "mit",
"hash": 8555893766896678000,
"line_mean": 37.0833333333,
"line_max": 82,
"alpha_frac": 0.6635667396,
"autogenerated": false,
"ratio": 3.1355060034305318,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.42990727430305314,
"avg_score": null,
"num_lines": null
} |
"""Added user table and updated other tables accordingly
Revision ID: 1568b7548f2
Revises: 1b64df25658
Create Date: 2015-07-26 02:03:46.590328
"""
# revision identifiers, used by Alembic.
revision = '1568b7548f2'
down_revision = '1b64df25658'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table('users',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.Column('email', sa.String(100), nullable=True),
sa.Column('password_salt', sa.LargeBinary, nullable=False),
sa.Column('password_salt', sa.LargeBinary, nullable=False),
sa.Column('first_name', sa.String(40)),
sa.Column('last_name', sa.String(40)),
sa.PrimaryKeyConstraint('id')
)
op.add_column('entries', sa.Column('user_id', sa.Integer(), nullable=True))
op.create_foreign_key('entries_user_id_fkey', 'entries', 'users', ['user_id'], ['id'])
op.add_column('entry_fields', sa.Column('user_id', sa.Integer(), nullable=True))
op.create_foreign_key('entry_fields_user_id_fkey', 'entry_fields', 'users', ['user_id'], ['id'])
def downgrade():
op.drop_table('users')
op.drop_constraint('entry_fields_user_id_fkey', 'entry_fields', type_='foreignkey')
op.drop_column('entry_fields', 'user_id')
op.drop_constraint('entries_user_id_fkey', 'entries', type_='foreignkey')
op.drop_column('entries', 'user_id')
| {
"repo_name": "timesqueezer/mooddiary",
"path": "migrations/versions/1568b7548f2_.py",
"copies": "2",
"size": "1513",
"license": "mit",
"hash": 7675769835843664000,
"line_mean": 36.825,
"line_max": 100,
"alpha_frac": 0.6602775942,
"autogenerated": false,
"ratio": 3.2123142250530785,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9854173683121571,
"avg_score": 0.0036836272263015063,
"num_lines": 40
} |
"""Added venue, room details
Revision ID: 1925329c798a
Revises: 316aaa757c8c
Create Date: 2013-11-05 19:48:59.132327
"""
# revision identifiers, used by Alembic.
revision = '1925329c798a'
down_revision = '316aaa757c8c'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('venue',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.Column('proposal_space_id', sa.Integer(), nullable=False),
sa.Column('description_text', sa.UnicodeText(), nullable=False),
sa.Column('description_html', sa.UnicodeText(), nullable=False),
sa.Column('address1', sa.Unicode(length=160), nullable=False),
sa.Column('address2', sa.Unicode(length=160), nullable=False),
sa.Column('city', sa.Unicode(length=30), nullable=False),
sa.Column('state', sa.Unicode(length=30), nullable=False),
sa.Column('postcode', sa.Unicode(length=20), nullable=False),
sa.Column('country', sa.Unicode(length=2), nullable=False),
sa.Column('latitude', sa.Numeric(precision=8, scale=5), nullable=True),
sa.Column('longitude', sa.Numeric(precision=8, scale=5), nullable=True),
sa.Column('name', sa.Unicode(length=250), nullable=False),
sa.Column('title', sa.Unicode(length=250), nullable=False),
sa.ForeignKeyConstraint(['proposal_space_id'], ['proposal_space.id'], ),
sa.UniqueConstraint('proposal_space_id', 'name'),
sa.PrimaryKeyConstraint('id')
)
op.create_table('venue_room',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.Column('venue_id', sa.Integer(), nullable=False),
sa.Column('description_text', sa.UnicodeText(), nullable=False),
sa.Column('description_html', sa.UnicodeText(), nullable=False),
sa.Column('name', sa.Unicode(length=250), nullable=False),
sa.Column('title', sa.Unicode(length=250), nullable=False),
sa.ForeignKeyConstraint(['venue_id'], ['venue.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('venue_id','name')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('venue_room')
op.drop_table('venue')
### end Alembic commands ###
| {
"repo_name": "jace/failconfunnel",
"path": "alembic/versions/1925329c798a_added_venue_room_det.py",
"copies": "1",
"size": "2473",
"license": "bsd-2-clause",
"hash": -6030646271547781000,
"line_mean": 40.2166666667,
"line_max": 76,
"alpha_frac": 0.6789324707,
"autogenerated": false,
"ratio": 3.4490934449093444,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9569003458556828,
"avg_score": 0.011804491410503192,
"num_lines": 60
} |
"""Added vote tables
Revision ID: 7210849b51fe
Revises: 1c465843735b
Create Date: 2020-09-27 15:47:08.142317
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '7210849b51fe'
down_revision = '1c465843735b'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('writein_entries',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('anon_id', sa.String(), nullable=True),
sa.Column('text', sa.String(), nullable=True),
sa.Column('date_added', sa.DateTime(timezone=True), nullable=False),
sa.CheckConstraint('(user_id is null) != (anon_id is null)', name=op.f('ck_writein_entries_user_or_anon')),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], name=op.f('fk_writein_entries_user_id_users')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_writein_entries'))
)
op.create_table('vote_entries',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('post_id', sa.Integer(), nullable=False),
sa.Column('vote_text', sa.String(), nullable=False),
sa.Column('killed', sa.Boolean(), nullable=False),
sa.Column('killed_text', sa.String(), nullable=True),
sa.ForeignKeyConstraint(['post_id'], ['posts.id'], name=op.f('fk_vote_entries_post_id_posts')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_vote_entries'))
)
op.create_table('vote_info',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('post_id', sa.Integer(), nullable=False),
sa.Column('multivote', sa.Boolean(), nullable=False),
sa.Column('votes_hidden', sa.Boolean(), nullable=False),
sa.Column('active', sa.Boolean(), nullable=False),
sa.Column('time_closed', sa.DateTime(timezone=True), nullable=True),
sa.ForeignKeyConstraint(['post_id'], ['posts.id'], name=op.f('fk_vote_info_post_id_posts')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_vote_info'))
)
op.create_table('user_votes',
sa.Column('entry_id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('anon_id', sa.String(), nullable=False),
sa.CheckConstraint('(user_id is null) != (anon_id is null)', name=op.f('ck_user_votes_user_or_anon')),
sa.ForeignKeyConstraint(['entry_id'], ['vote_entries.id'], name=op.f('fk_user_votes_entry_id_vote_entries')),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], name=op.f('fk_user_votes_user_id_users')),
sa.PrimaryKeyConstraint('entry_id', 'user_id', 'anon_id', name=op.f('pk_user_votes'))
)
enum_type = sa.Enum('Text', 'Vote', 'Writein', name='posttype')
# enum_type.create(op)
op.add_column('posts', sa.Column('post_type', enum_type, nullable=False, server_default='Text'))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('posts', 'post_type')
op.drop_table('user_votes')
op.drop_table('vote_info')
op.drop_table('vote_entries')
op.drop_table('writein_entries')
# ### end Alembic commands ###
| {
"repo_name": "alethiophile/openakun",
"path": "alembic/versions/7210849b51fe_added_vote_tables.py",
"copies": "1",
"size": "3172",
"license": "mit",
"hash": 3727293589787159000,
"line_mean": 43.0555555556,
"line_max": 113,
"alpha_frac": 0.657629256,
"autogenerated": false,
"ratio": 3.175175175175175,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4332804431175175,
"avg_score": null,
"num_lines": null
} |
"""add-ehr-metrics
Revision ID: 4825a0ad42e1
Revises: 3513057132ca
Create Date: 2019-03-04 10:31:53.101508
"""
import model.utils
import sqlalchemy as sa
from alembic import op
from rdr_service.participant_enums import EhrStatus
# revision identifiers, used by Alembic.
revision = "4825a0ad42e1"
down_revision = "3513057132ca"
branch_labels = None
depends_on = None
_PARTICIPANT_VIEW_SQL = """
CREATE OR REPLACE VIEW participant_view AS
SELECT
p.participant_id,
p.sign_up_time,
p.withdrawal_status,
p.withdrawal_time,
p.suspension_status,
p.suspension_time,
hpo.name hpo,
ps.zip_code,
state_code.value state,
recontact_method_code.value recontact_method,
language_code.value language,
TIMESTAMPDIFF(YEAR, ps.date_of_birth, CURDATE()) age_years,
gender_code.value gender,
sex_code.value sex,
sexual_orientation_code.value sexual_orientation,
education_code.value education,
income_code.value income,
ps.enrollment_status,
ps.race,
ps.physical_measurements_status,
ps.physical_measurements_finalized_time,
ps.physical_measurements_time,
ps.physical_measurements_created_site_id,
ps.physical_measurements_finalized_site_id,
ps.consent_for_study_enrollment,
ps.consent_for_study_enrollment_time,
ps.consent_for_electronic_health_records,
ps.consent_for_electronic_health_records_time,
ps.questionnaire_on_overall_health,
ps.questionnaire_on_overall_health_time,
ps.questionnaire_on_lifestyle,
ps.questionnaire_on_lifestyle_time,
ps.questionnaire_on_the_basics,
ps.questionnaire_on_the_basics_time,
ps.questionnaire_on_healthcare_access,
ps.questionnaire_on_healthcare_access_time,
ps.questionnaire_on_medical_history,
ps.questionnaire_on_medical_history_time,
ps.questionnaire_on_medications,
ps.questionnaire_on_medications_time,
ps.questionnaire_on_family_health,
ps.questionnaire_on_family_health_time,
ps.biospecimen_status,
ps.biospecimen_order_time,
ps.biospecimen_source_site_id,
ps.biospecimen_collected_site_id,
ps.biospecimen_processed_site_id,
ps.biospecimen_finalized_site_id,
ps.sample_order_status_1sst8,
ps.sample_order_status_1sst8_time,
ps.sample_order_status_1pst8,
ps.sample_order_status_1pst8_time,
ps.sample_order_status_1hep4,
ps.sample_order_status_1hep4_time,
ps.sample_order_status_1ed04,
ps.sample_order_status_1ed04_time,
ps.sample_order_status_1ed10,
ps.sample_order_status_1ed10_time,
ps.sample_order_status_2ed10,
ps.sample_order_status_2ed10_time,
ps.sample_order_status_1ur10,
ps.sample_order_status_1ur10_time,
ps.sample_order_status_1sal,
ps.sample_order_status_1sal_time,
ps.sample_order_status_1sal2,
ps.sample_order_status_1sal2_time,
ps.sample_order_status_1cfd9,
ps.sample_order_status_1cfd9_time,
ps.sample_order_status_1pxr2,
ps.sample_order_status_1pxr2_time,
ps.sample_status_1sst8,
ps.sample_status_1sst8_time,
ps.sample_status_1pst8,
ps.sample_status_1pst8_time,
ps.sample_status_1hep4,
ps.sample_status_1hep4_time,
ps.sample_status_1ed04,
ps.sample_status_1ed04_time,
ps.sample_status_1ed10,
ps.sample_status_1ed10_time,
ps.sample_status_2ed10,
ps.sample_status_2ed10_time,
ps.sample_status_1ur10,
ps.sample_status_1ur10_time,
ps.sample_status_1sal,
ps.sample_status_1sal_time,
ps.sample_status_1sal2,
ps.sample_status_1sal2_time,
ps.sample_status_1cfd9,
ps.sample_status_1cfd9_time,
ps.sample_status_1pxr2,
ps.sample_status_1pxr2_time,
ps.num_completed_baseline_ppi_modules,
ps.num_completed_ppi_modules,
ps.num_baseline_samples_arrived,
ps.samples_to_isolate_dna,
ps.consent_for_cabor,
ps.consent_for_cabor_time,
ps.ehr_status,
ps.ehr_receipt_time,
ps.ehr_update_time,
(SELECT IFNULL(GROUP_CONCAT(
IF(ac.value = 'WhatRaceEthnicity_RaceEthnicityNoneOfThese',
'NoneOfThese',
TRIM(LEADING 'WhatRaceEthnicity_' FROM
TRIM(LEADING 'PMI_' FROM ac.value)))),
'None')
FROM questionnaire_response qr, questionnaire_response_answer qra,
questionnaire_question qq, code c, code ac
WHERE qra.end_time IS NULL AND
qr.questionnaire_response_id = qra.questionnaire_response_id AND
qra.question_id = qq.questionnaire_question_id AND
qq.code_id = c.code_id AND c.value = 'Race_WhatRaceEthnicity' AND
qr.participant_id = p.participant_id AND
qra.value_code_id = ac.code_id AND
ac.value != 'WhatRaceEthnicity_Hispanic'
) race_codes,
(SELECT COUNT(ac.value)
FROM questionnaire_response qr, questionnaire_response_answer qra,
questionnaire_question qq, code c, code ac
WHERE qra.end_time IS NULL AND
qr.questionnaire_response_id = qra.questionnaire_response_id AND
qra.question_id = qq.questionnaire_question_id AND
qq.code_id = c.code_id AND c.value = 'Race_WhatRaceEthnicity' AND
qr.participant_id = p.participant_id AND
qra.value_code_id = ac.code_id AND
ac.value = 'WhatRaceEthnicity_Hispanic'
) hispanic
FROM
participant p
LEFT OUTER JOIN hpo ON p.hpo_id = hpo.hpo_id
LEFT OUTER JOIN participant_summary ps ON p.participant_id = ps.participant_id
LEFT OUTER JOIN code state_code ON ps.state_id = state_code.code_id
LEFT OUTER JOIN code recontact_method_code ON ps.recontact_method_id = recontact_method_code.code_id
LEFT OUTER JOIN code language_code ON ps.language_id = language_code.code_id
LEFT OUTER JOIN code gender_code ON ps.gender_identity_id = gender_code.code_id
LEFT OUTER JOIN code sex_code ON ps.sex_id = sex_code.code_id
LEFT OUTER JOIN code sexual_orientation_code ON ps.sexual_orientation_id = sexual_orientation_code.code_id
LEFT OUTER JOIN code education_code ON ps.education_id = education_code.code_id
LEFT OUTER JOIN code income_code ON ps.income_id = income_code.code_id
WHERE (ps.email IS NULL OR ps.email NOT LIKE '%@example.com') AND
(hpo.name IS NULL OR hpo.name != 'TEST')
"""
_RAW_PARTICIPANT_VIEW_EXPORT_SQL = """
CREATE OR REPLACE VIEW raw_ppi_participant_view AS
SELECT
p.participant_id,
ps.last_name,
ps.first_name,
ps.email,
p.sign_up_time,
p.suspension_status,
p.suspension_time,
hpo.name hpo,
ps.zip_code,
state_code.value state,
language_code.value language,
ps.date_of_birth,
gender_code.value gender,
sex_code.value sex,
sexual_orientation_code.value sexual_orientation,
education_code.value education,
income_code.value income,
ps.enrollment_status,
ps.race,
ps.physical_measurements_status,
ps.physical_measurements_finalized_time,
ps.physical_measurements_time,
ps.physical_measurements_created_site_id,
ps.physical_measurements_finalized_site_id,
ps.consent_for_study_enrollment,
ps.consent_for_study_enrollment_time,
ps.consent_for_electronic_health_records,
ps.consent_for_electronic_health_records_time,
ps.questionnaire_on_overall_health,
ps.questionnaire_on_overall_health_time,
ps.questionnaire_on_lifestyle,
ps.questionnaire_on_lifestyle_time,
ps.questionnaire_on_the_basics,
ps.questionnaire_on_the_basics_time,
ps.questionnaire_on_healthcare_access,
ps.questionnaire_on_healthcare_access_time,
ps.questionnaire_on_medical_history,
ps.questionnaire_on_medical_history_time,
ps.questionnaire_on_medications,
ps.questionnaire_on_medications_time,
ps.questionnaire_on_family_health,
ps.questionnaire_on_family_health_time,
ps.biospecimen_status,
ps.biospecimen_order_time,
ps.biospecimen_source_site_id,
ps.biospecimen_collected_site_id,
ps.biospecimen_processed_site_id,
ps.biospecimen_finalized_site_id,
ps.sample_order_status_1sst8,
ps.sample_order_status_1sst8_time,
ps.sample_order_status_1pst8,
ps.sample_order_status_1pst8_time,
ps.sample_order_status_1hep4,
ps.sample_order_status_1hep4_time,
ps.sample_order_status_1ed04,
ps.sample_order_status_1ed04_time,
ps.sample_order_status_1ed10,
ps.sample_order_status_1ed10_time,
ps.sample_order_status_2ed10,
ps.sample_order_status_2ed10_time,
ps.sample_order_status_1ur10,
ps.sample_order_status_1ur10_time,
ps.sample_order_status_1sal,
ps.sample_order_status_1sal_time,
ps.sample_order_status_1sal2,
ps.sample_order_status_1sal2_time,
ps.sample_order_status_1cfd9,
ps.sample_order_status_1cfd9_time,
ps.sample_order_status_1pxr2,
ps.sample_order_status_1pxr2_time,
ps.sample_status_1sst8,
ps.sample_status_1sst8_time,
ps.sample_status_1pst8,
ps.sample_status_1pst8_time,
ps.sample_status_1hep4,
ps.sample_status_1hep4_time,
ps.sample_status_1ed04,
ps.sample_status_1ed04_time,
ps.sample_status_1ed10,
ps.sample_status_1ed10_time,
ps.sample_status_2ed10,
ps.sample_status_2ed10_time,
ps.sample_status_1ur10,
ps.sample_status_1ur10_time,
ps.sample_status_1sal,
ps.sample_status_1sal_time,
ps.sample_status_1sal2,
ps.sample_status_1sal2_time,
ps.sample_status_1cfd9,
ps.sample_status_1cfd9_time,
ps.sample_status_1pxr2,
ps.sample_status_1pxr2_time,
ps.num_completed_baseline_ppi_modules,
ps.num_completed_ppi_modules,
ps.num_baseline_samples_arrived,
ps.samples_to_isolate_dna,
ps.consent_for_cabor,
ps.consent_for_cabor_time,
ps.ehr_status,
ps.ehr_receipt_time,
ps.ehr_update_time,
(SELECT IFNULL(GROUP_CONCAT(
IF(ac.value = 'WhatRaceEthnicity_RaceEthnicityNoneOfThese',
'NoneOfThese',
TRIM(LEADING 'WhatRaceEthnicity_' FROM
TRIM(LEADING 'PMI_' FROM ac.value)))),
'None')
FROM questionnaire_response qr, questionnaire_response_answer qra,
questionnaire_question qq, code c, code ac
WHERE qra.end_time IS NULL AND
qr.questionnaire_response_id = qra.questionnaire_response_id AND
qra.question_id = qq.questionnaire_question_id AND
qq.code_id = c.code_id AND c.value = 'Race_WhatRaceEthnicity' AND
qr.participant_id = p.participant_id AND
qra.value_code_id = ac.code_id AND
ac.value != 'WhatRaceEthnicity_Hispanic'
) race_codes,
(SELECT COUNT(ac.value)
FROM questionnaire_response qr, questionnaire_response_answer qra,
questionnaire_question qq, code c, code ac
WHERE qra.end_time IS NULL AND
qr.questionnaire_response_id = qra.questionnaire_response_id AND
qra.question_id = qq.questionnaire_question_id AND
qq.code_id = c.code_id AND c.value = 'Race_WhatRaceEthnicity' AND
qr.participant_id = p.participant_id AND
qra.value_code_id = ac.code_id AND
ac.value = 'WhatRaceEthnicity_Hispanic'
) hispanic
FROM
participant p
LEFT OUTER JOIN hpo ON p.hpo_id = hpo.hpo_id
LEFT OUTER JOIN participant_summary ps ON p.participant_id = ps.participant_id
LEFT OUTER JOIN code state_code ON ps.state_id = state_code.code_id
LEFT OUTER JOIN code recontact_method_code ON ps.recontact_method_id = recontact_method_code.code_id
LEFT OUTER JOIN code language_code ON ps.language_id = language_code.code_id
LEFT OUTER JOIN code gender_code ON ps.gender_identity_id = gender_code.code_id
LEFT OUTER JOIN code sex_code ON ps.sex_id = sex_code.code_id
LEFT OUTER JOIN code sexual_orientation_code ON ps.sexual_orientation_id = sexual_orientation_code.code_id
LEFT OUTER JOIN code education_code ON ps.education_id = education_code.code_id
LEFT OUTER JOIN code income_code ON ps.income_id = income_code.code_id
WHERE p.withdrawal_status = 1 AND # NOT_WITHDRAWN
(ps.email IS NULL OR ps.email NOT LIKE '%@example.com') AND
(hpo.name IS NULL OR hpo.name != 'TEST')
"""
_PPI_PARTICIPANT_VIEW_SQL = """
CREATE OR REPLACE VIEW ppi_participant_view AS
SELECT
p.participant_id,
YEAR(p.sign_up_time) sign_up_year,
p.suspension_status,
YEAR(p.suspension_time) suspension_year,
hpo.name hpo,
/* Deidentify low population zip codes; assumes a 5-digit format. */
IF(LENGTH(ps.zip_code) != 5, 'INVALID',
IF(SUBSTR(ps.zip_code, 1, 3) IN (
'036', '692', '878', '059', '790', '879', '063', '821', '884', '102',
'823', '890', '203', '830', '893', '556', '831'),
'000', SUBSTR(ps.zip_code, 1, 3)
)) deidentified_zip_code,
state_code.value state,
language_code.value language,
LEAST(89, TIMESTAMPDIFF(YEAR, ps.date_of_birth, CURDATE())) age_years,
gender_code.value gender,
sex_code.value sex,
sexual_orientation_code.value sexual_orientation,
education_code.value education,
income_code.value income,
ps.enrollment_status,
ps.race,
ps.physical_measurements_status,
YEAR(ps.physical_measurements_finalized_time) physical_measurements_finalized_year,
YEAR(ps.physical_measurements_time) physical_measurements_year,
ps.physical_measurements_created_site_id,
ps.physical_measurements_finalized_site_id,
ps.consent_for_study_enrollment,
YEAR(ps.consent_for_study_enrollment_time) consent_for_study_enrollment_year,
ps.consent_for_electronic_health_records,
YEAR(ps.consent_for_electronic_health_records_time) consent_for_electronic_health_records_year,
ps.questionnaire_on_overall_health,
YEAR(ps.questionnaire_on_overall_health_time) questionnaire_on_overall_health_year,
ps.questionnaire_on_lifestyle,
YEAR(ps.questionnaire_on_lifestyle_time) questionnaire_on_lifestyle_year,
ps.questionnaire_on_the_basics,
YEAR(ps.questionnaire_on_the_basics_time) questionnaire_on_the_basics_year,
ps.questionnaire_on_healthcare_access,
YEAR(ps.questionnaire_on_healthcare_access_time) questionnaire_on_healthcare_access_year,
ps.questionnaire_on_medical_history,
YEAR(ps.questionnaire_on_medical_history_time) questionnaire_on_medical_history_year,
ps.questionnaire_on_medications,
YEAR(ps.questionnaire_on_medications_time) questionnaire_on_medications_year,
ps.questionnaire_on_family_health,
YEAR(ps.questionnaire_on_family_health_time) questionnaire_on_family_health_year,
ps.biospecimen_status,
YEAR(ps.biospecimen_order_time) biospecimen_order_year,
ps.biospecimen_source_site_id,
ps.biospecimen_collected_site_id,
ps.biospecimen_processed_site_id,
ps.biospecimen_finalized_site_id,
ps.sample_order_status_1sst8,
YEAR(ps.sample_order_status_1sst8_time) sample_order_status_1sst8_year,
ps.sample_order_status_1pst8,
YEAR(ps.sample_order_status_1pst8_time) sample_order_status_1pst8_year,
ps.sample_order_status_1hep4,
YEAR(ps.sample_order_status_1hep4_time) sample_order_status_1hep4_year,
ps.sample_order_status_1ed04,
YEAR(ps.sample_order_status_1ed04_time) sample_order_status_1ed04_year,
ps.sample_order_status_1ed10,
YEAR(ps.sample_order_status_1ed10_time) sample_order_status_1ed10_year,
ps.sample_order_status_2ed10,
YEAR(ps.sample_order_status_2ed10_time) sample_order_status_2ed10_year,
ps.sample_order_status_1ur10,
YEAR(ps.sample_order_status_1ur10_time) sample_order_status_1ur10_year,
ps.sample_order_status_1sal,
YEAR(ps.sample_order_status_1sal_time) sample_order_status_1sal_year,
ps.sample_order_status_1sal2,
YEAR(ps.sample_order_status_1sal2_time) sample_order_status_1sal2_year,
ps.sample_order_status_1cfd9,
YEAR(ps.sample_order_status_1cfd9_time) sample_order_status_1cfd9_year,
ps.sample_order_status_1pxr2,
YEAR(ps.sample_order_status_1pxr2_time) sample_order_status_1pxr2_year,
ps.sample_status_1sst8,
YEAR(ps.sample_status_1sst8_time) sample_status_1sst8_year,
ps.sample_status_1pst8,
YEAR(ps.sample_status_1pst8_time) sample_status_1pst8_year,
ps.sample_status_1hep4,
YEAR(ps.sample_status_1hep4_time) sample_status_1hep4_year,
ps.sample_status_1ed04,
YEAR(ps.sample_status_1ed04_time) sample_status_1ed04_year,
ps.sample_status_1ed10,
YEAR(ps.sample_status_1ed10_time) sample_status_1ed10_year,
ps.sample_status_2ed10,
YEAR(ps.sample_status_2ed10_time) sample_status_2ed10_year,
ps.sample_status_1ur10,
YEAR(ps.sample_status_1ur10_time) sample_status_1ur10_year,
ps.sample_status_1sal,
YEAR(ps.sample_status_1sal_time) sample_status_1sal_year,
ps.sample_status_1sal2,
YEAR(ps.sample_status_1sal2_time) sample_status_1sal2_year,
ps.sample_status_1cfd9,
YEAR(ps.sample_status_1cfd9_time) sample_status_1cfd9_year,
ps.sample_status_1pxr2,
YEAR(ps.sample_status_1pxr2_time) sample_status_1pxr2_year,
ps.num_completed_baseline_ppi_modules,
ps.num_completed_ppi_modules,
ps.num_baseline_samples_arrived,
ps.samples_to_isolate_dna,
ps.consent_for_cabor,
YEAR(ps.consent_for_cabor_time) consent_for_cabor_year,
ps.ehr_status,
YEAR(ps.ehr_receipt_time) ehr_receipt_year,
YEAR(ps.ehr_update_time) ehr_update_year,
(SELECT IFNULL(GROUP_CONCAT(
IF(ac.value = 'WhatRaceEthnicity_RaceEthnicityNoneOfThese',
'NoneOfThese',
TRIM(LEADING 'WhatRaceEthnicity_' FROM
TRIM(LEADING 'PMI_' FROM ac.value)))),
'None')
FROM questionnaire_response qr, questionnaire_response_answer qra,
questionnaire_question qq, code c, code ac
WHERE qra.end_time IS NULL AND
qr.questionnaire_response_id = qra.questionnaire_response_id AND
qra.question_id = qq.questionnaire_question_id AND
qq.code_id = c.code_id AND c.value = 'Race_WhatRaceEthnicity' AND
qr.participant_id = p.participant_id AND
qra.value_code_id = ac.code_id AND
ac.value != 'WhatRaceEthnicity_Hispanic'
) race_codes,
(SELECT COUNT(ac.value)
FROM questionnaire_response qr, questionnaire_response_answer qra,
questionnaire_question qq, code c, code ac
WHERE qra.end_time IS NULL AND
qr.questionnaire_response_id = qra.questionnaire_response_id AND
qra.question_id = qq.questionnaire_question_id AND
qq.code_id = c.code_id AND c.value = 'Race_WhatRaceEthnicity' AND
qr.participant_id = p.participant_id AND
qra.value_code_id = ac.code_id AND
ac.value = 'WhatRaceEthnicity_Hispanic'
) hispanic
FROM
participant p
LEFT OUTER JOIN hpo ON p.hpo_id = hpo.hpo_id
LEFT OUTER JOIN participant_summary ps ON p.participant_id = ps.participant_id
LEFT OUTER JOIN code state_code ON ps.state_id = state_code.code_id
LEFT OUTER JOIN code recontact_method_code ON ps.recontact_method_id = recontact_method_code.code_id
LEFT OUTER JOIN code language_code ON ps.language_id = language_code.code_id
LEFT OUTER JOIN code gender_code ON ps.gender_identity_id = gender_code.code_id
LEFT OUTER JOIN code sex_code ON ps.sex_id = sex_code.code_id
LEFT OUTER JOIN code sexual_orientation_code ON ps.sexual_orientation_id = sexual_orientation_code.code_id
LEFT OUTER JOIN code education_code ON ps.education_id = education_code.code_id
LEFT OUTER JOIN code income_code ON ps.income_id = income_code.code_id
WHERE p.withdrawal_status = 1 AND # NOT_WITHDRAWN
(ps.email IS NULL OR ps.email NOT LIKE '%@example.com') AND
(hpo.name IS NULL OR hpo.name != 'TEST')
"""
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"ehr_receipt",
sa.Column("ehr_receipt_id", sa.Integer(), nullable=False),
sa.Column("recorded_time", model.utils.UTCDateTime(), nullable=False),
sa.Column("received_time", model.utils.UTCDateTime(), nullable=False),
sa.Column("participant_id", sa.Integer(), nullable=False),
sa.Column("site_id", sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(["participant_id"], ["participant.participant_id"], ondelete="CASCADE"),
sa.ForeignKeyConstraint(["site_id"], ["site.site_id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("ehr_receipt_id"),
)
op.create_index(op.f("ix_ehr_receipt_recorded_time"), "ehr_receipt", ["recorded_time"], unique=False)
op.add_column("participant_summary", sa.Column("ehr_receipt_time", model.utils.UTCDateTime(), nullable=True))
op.add_column("participant_summary", sa.Column("ehr_status", model.utils.Enum(EhrStatus), nullable=True))
op.add_column("participant_summary", sa.Column("ehr_update_time", model.utils.UTCDateTime(), nullable=True))
# ### end Alembic commands ###
op.execute(_PARTICIPANT_VIEW_SQL)
op.execute(_RAW_PARTICIPANT_VIEW_EXPORT_SQL)
op.execute(_PPI_PARTICIPANT_VIEW_SQL)
def downgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column("participant_summary", "ehr_update_time")
op.drop_column("participant_summary", "ehr_status")
op.drop_column("participant_summary", "ehr_receipt_time")
op.drop_index(op.f("ix_ehr_receipt_recorded_time"), table_name="ehr_receipt")
op.drop_table("ehr_receipt")
# ### end Alembic commands ###
def upgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
| {
"repo_name": "all-of-us/raw-data-repository",
"path": "rdr_service/alembic/versions/4825a0ad42e1_add_ehr_metrics.py",
"copies": "1",
"size": "21362",
"license": "bsd-3-clause",
"hash": -2934578967365020000,
"line_mean": 40.0807692308,
"line_max": 113,
"alpha_frac": 0.7151483943,
"autogenerated": false,
"ratio": 2.8754879526181183,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.40906363469181184,
"avg_score": null,
"num_lines": null
} |
"""" add electrical labels to each device port
"""
import phidl.device_layout as pd
import pp
from omegaconf.listconfig import ListConfig
from phidl.device_layout import Label
from pp.component import Component, ComponentReference
from pp.port import Port
from typing import Optional, Union
def add_label(component, text, position=(0, 0), layer=pp.LAYER.LABEL):
gds_layer_label, gds_datatype_label = pd._parse_layer(layer)
label = pd.Label(
text=text,
position=position,
anchor="o",
layer=gds_layer_label,
texttype=gds_datatype_label,
)
component.add(label)
return component
def add_labels(component):
c = pp.Component()
electrical_ports = component.get_ports_list(port_type="dc")
c.add(component.ref())
for i, port in enumerate(electrical_ports):
label = get_input_label_electrical(port, i, component_name=component.name)
c.add(label)
return c
def get_optical_text(
port: Port,
gc: Union[ComponentReference, Component],
gc_index: Optional[int] = None,
component_name: Optional[str] = None,
) -> str:
polarization = gc.get_property("polarization")
wavelength_nm = gc.get_property("wavelength")
assert polarization in [
"te",
"tm",
], f"Not valid polarization {polarization} in [te, tm]"
assert (
isinstance(wavelength_nm, (int, float)) and 1000 < wavelength_nm < 2000
), f"{wavelength_nm} is Not valid 1000 < wavelength < 2000"
if component_name:
name = component_name
elif type(port.parent) == pp.Component:
name = port.parent.name
else:
name = port.parent.ref_cell.name
if isinstance(gc_index, int):
text = (
f"opt_{polarization}_{int(wavelength_nm)}_({name})_{gc_index}_{port.name}"
)
else:
text = f"opt_{polarization}_{int(wavelength_nm)}_({name})_{port.name}"
return text
def get_input_label(
port: Port,
gc: ComponentReference,
gc_index: Optional[int] = None,
gc_port_name: str = "W0",
layer_label: ListConfig = pp.LAYER.LABEL,
component_name: Optional[str] = None,
) -> Label:
"""
Generate a label with component info for a given grating coupler.
This is the label used by T&M to extract grating coupler coordinates
and match it to the component.
"""
text = get_optical_text(
port=port, gc=gc, gc_index=gc_index, component_name=component_name
)
if gc_port_name is None:
gc_port_name = list(gc.ports.values())[0].name
layer, texttype = pd._parse_layer(layer_label)
label = pd.Label(
text=text,
position=gc.ports[gc_port_name].midpoint,
anchor="o",
layer=layer,
texttype=texttype,
)
return label
def get_input_label_electrical(
port, index=0, component_name=None, layer_label=pp.LAYER.LABEL
):
"""
Generate a label to test component info for a given grating coupler.
This is the label used by T&M to extract grating coupler coordinates
and match it to the component.
"""
if component_name:
name = component_name
elif type(port.parent) == pp.Component:
name = port.parent.name
else:
name = port.parent.ref_cell.name
text = "elec_{}_({})_{}".format(index, name, port.name)
layer, texttype = pd._parse_layer(layer_label)
label = pd.Label(
text=text, position=port.midpoint, anchor="o", layer=layer, texttype=texttype,
)
return label
def _demo_input_label():
c = pp.c.bend_circular()
gc = pp.c.grating_coupler_elliptical_te()
label = get_input_label(port=c.ports["W0"], gc=gc, layer_label=pp.LAYER.LABEL)
print(label)
if __name__ == "__main__":
from pp.components.electrical.pad import pad
c = pad(width=10, height=10)
print(c.ports)
c2 = add_labels(c)
pp.show(c2)
| {
"repo_name": "psiq/gdsfactory",
"path": "pp/add_labels.py",
"copies": "1",
"size": "3893",
"license": "mit",
"hash": -8168506198754939000,
"line_mean": 25.8482758621,
"line_max": 86,
"alpha_frac": 0.6362702286,
"autogenerated": false,
"ratio": 3.417910447761194,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9552557059420184,
"avg_score": 0.00032472338820203526,
"num_lines": 145
} |
"""Add electron lifetime
"""
import sympy
import pytz
import hax
from pax import configuration, units
from sympy.parsing.sympy_parser import parse_expr
from cax import config
from cax.task import Task
PAX_CONFIG = configuration.load_configuration('XENON1T')
PAX_CONFIG_MV = configuration.load_configuration('XENON1T_MV')
class CorrectionBase(Task):
"""Base class for corrections.
Child classes can set the following class attributes:
key: run doc setting this correction will override (usually processor.XXX,
you can leave off processor if you want)
collection_name: collection in the runs db where the correction values are stored.
We expect documents in the correction contain:
- calculation_time: timestamp, indicating when YOU made this correction setting
And optionally also:
- version: string indicating correction version. If not given, timestamp-based version will be used.
- function: sympy expression, passed to sympy.evaluate then stored in the 'function' instance attribute
NOTE: if you can't express your correction as a sympy function you can add a custom handling
by overriding the 'evaluate' method with whatever you need
We keep track of the correction versions in the run doc, in processor.correction_versions.
This is a subsection of processor, so the correction versions used in processing will be stored in the processor's
metadata.
"""
key = 'not_set'
collection_name = 'not_set'
version = 'not_set'
def __init__(self):
self.correction_collection = config.mongo_collection(self.collection_name)
if self.key == 'not_set':
raise ValueError("You must set a correction key attribute")
if self.collection_name == 'not_set':
raise ValueError("You must set a correction collection_name attribute")
Task.__init__(self)
def each_run(self):
if 'end' not in self.run_doc:
# Run is still in progress, don't compute the correction
return
if not config.DATABASE_LOG:
# This setting apparently means we should do nothing?
return
# Fetch the latest correction settings.
# We can't do this in init: cax is a long-running application, and corrections may change while it is running.
self.correction_doc = cdoc = self.correction_collection.find_one(sort=(('calculation_time', -1), ))
self.version = cdoc.get('version', str(cdoc['calculation_time']))
# Check if this correction's version correction has already been applied. If so, skip this run.
classname = self.__class__.__name__
this_run_version = self.run_doc.get('processor', {}).get('correction_versions', {}).get(classname, 'not_set')
self.log.debug("Checking if version %s == %s" % (this_run_version, self.version))
if this_run_version == self.version:
# No change was made in the correction, nothing to do for this run.
self.log.debug("Skipping, no change with version %s" % this_run_version)
return
# Get the correction sympy function, if one is set
if 'function' in cdoc:
self.function = parse_expr(cdoc['function'])
# We have to (re)compute the correction setting value. This is done in the evaluate method.
# There used to be an extra check for self.key: {'$exists': False} in the query, but now that we allow
# automatic updates of correction values by cax this is no longer appropriate.
try:
self.collection.find_one_and_update({'_id': self.run_doc['_id']},
{'$set': {self.key: self.evaluate(),
'processor.correction_versions.' + classname: self.version}})
except RuntimeError as e:
self.log.exception(e)
def evaluate(self):
raise NotImplementedError
def evaluate_function(self, **kwargs):
"""Evaluate the sympy function of this correction with the given kwargs"""
return self.function.evalf(subs=kwargs)
class AddDriftVelocity(CorrectionBase):
key = 'processor.DEFAULT.drift_velocity_liquid'
collection_name = 'drift_velocity'
def evaluate(self):
run_number = self.run_doc['number']
# Minimal init of hax. It's ok if hax is inited again with different settings before or after this.
hax.init(pax_version_policy='loose', main_data_paths=[])
# Get the cathode voltage in kV
cathode_kv = hax.slow_control.get('XE1T.GEN_HEINZVMON.PI', run_number).mean()
# Get the drift velocity
value = float(self.evaluate_function(v=cathode_kv))
self.log.info("Run %d: calculated drift velocity of %0.3f km/sec" % (run_number, value))
return value * units.km / units.s
class AddGains(CorrectionBase):
"""Add PMT gains to each run"""
key = 'processor.DEFAULT.gains'
collection_name = 'gains'
correction_units = units.V # should be 1
def evaluate(self):
"""Make an array of all PMT gains."""
start = self.run_doc['start']
timestamp = start.replace(tzinfo=pytz.utc).timestamp()
if self.run_doc['reader']['self_trigger']:
self.log.info("Run %d: gains computing" % self.run_doc['number'])
gains = self.get_gains(timestamp)
elif self.run_doc['detector'] == 'muon_veto':
self.log.info("Run %d: using 1e6 as gain for MV" % self.run_doc['number'])
gains = len(PAX_CONFIG_MV['DEFAULT']['pmts'])*[1e6]
else:
self.log.info("Run %d: using 1 as gain for LED" % self.run_doc['number'])
gains = len(PAX_CONFIG['DEFAULT']['pmts'])*[1]
return gains
def get_gains(self, timestamp):
"""Timestamp is a UNIX timestamp in UTC
"""
V = sympy.symbols('V')
pmt = sympy.symbols('pmt', integer=True)
t = sympy.symbols('t')
# Minimal init of hax. It's ok if hax is inited again with different settings before or after this.
hax.init(pax_version_policy='loose', main_data_paths=[])
gains = []
for i in range(0, len(PAX_CONFIG['DEFAULT']['pmts'])):
gain = self.function.evalf(subs={pmt: i,
t: self.run_doc['start'].replace(tzinfo=pytz.utc).timestamp(),
't0': 0
})
gains.append(float(gain) * self.correction_units)
return gains
class SetNeuralNetwork(CorrectionBase):
'''Set the proper neural network file according to run number'''
key = "processor.NeuralNet|PosRecNeuralNet.neural_net_file"
collection_name = 'neural_network'
def evaluate(self):
number = self.run_doc['number']
for rdef in self.correction_doc['correction']:
if number >= rdef['min'] and number < rdef['max']:
return rdef['value']
return None
class SetFieldDistortion(CorrectionBase):
'''Set the proper field distortion map according to run number'''
key = 'processor.WaveformSimulator.rz_position_distortion_map'
collection_name = 'field_distortion'
def evaluate(self):
number = self.run_doc['number']
for rdef in self.correction_doc['correction']:
if number >= rdef['min'] and number < rdef['max']:
return rdef['value']
return None
class SetLightCollectionEfficiency(CorrectionBase):
'''Set the proper LCE map according to run number'''
key = 'processor.WaveformSimulator.s1_light_yield_map'
collection_name = 'light_collection_efficiency'
def evaluate(self):
number = self.run_doc['number']
for rdef in self.correction_doc['correction']:
if number >= rdef['min'] and number < rdef['max']:
return rdef['value']
return None
class SetS2xyMap(CorrectionBase):
"""Set the proper S2 x, y map according to run number"""
key = 'processor.WaveformSimulator.s2_light_yield_map'
collection_name = 's2_xy_map'
def evaluate(self):
number = self.run_doc['number']
for rdef in self.correction_doc['correction']:
if number >= rdef['min'] and number < rdef['max']:
return rdef['value']
return None
| {
"repo_name": "XENON1T/cax",
"path": "cax/tasks/corrections.py",
"copies": "1",
"size": "8398",
"license": "isc",
"hash": 8844547248270687000,
"line_mean": 40.7810945274,
"line_max": 119,
"alpha_frac": 0.6299118838,
"autogenerated": false,
"ratio": 3.991444866920152,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5121356750720152,
"avg_score": null,
"num_lines": null
} |
"""add_email_and_verified_institution_to_workbench_researcher
Revision ID: 64e68e221460
Revises: f5520e9dde27
Create Date: 2020-02-18 12:05:18.783714
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '64e68e221460'
down_revision = 'f5520e9dde27'
branch_labels = None
depends_on = None
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('workbench_institutional_affiliations', sa.Column('is_verified', sa.Boolean(), nullable=True))
op.add_column('workbench_institutional_affiliations_history', sa.Column('is_verified', sa.Boolean(), nullable=True))
op.add_column('workbench_researcher', sa.Column('email', sa.String(length=250), nullable=True))
op.add_column('workbench_researcher_history', sa.Column('email', sa.String(length=250), nullable=True))
# ### end Alembic commands ###
def downgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('workbench_researcher_history', 'email')
op.drop_column('workbench_researcher', 'email')
op.drop_column('workbench_institutional_affiliations_history', 'is_verified')
op.drop_column('workbench_institutional_affiliations', 'is_verified')
# ### end Alembic commands ###
def upgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
| {
"repo_name": "all-of-us/raw-data-repository",
"path": "rdr_service/alembic/versions/64e68e221460_add_email_and_verified_institution_to_.py",
"copies": "1",
"size": "1719",
"license": "bsd-3-clause",
"hash": 1068102314247900000,
"line_mean": 30.2545454545,
"line_max": 120,
"alpha_frac": 0.695753345,
"autogenerated": false,
"ratio": 3.397233201581028,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9492877457445583,
"avg_score": 0.02002181782708888,
"num_lines": 55
} |
"""add email column
Revision ID: 02bcf59a82c9
Revises:
Create Date: 2017-09-17 20:37:18.323728
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '02bcf59a82c9'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('roles',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=64), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name')
)
op.create_table('users',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('email', sa.String(length=64), nullable=True),
sa.Column('username', sa.String(length=64), nullable=True),
sa.Column('role_id', sa.Integer(), nullable=True),
sa.Column('password_hash', sa.String(length=128), nullable=True),
sa.Column('confirmed', sa.Boolean(), nullable=True),
sa.ForeignKeyConstraint(['role_id'], ['roles.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_users_email'), 'users', ['email'], unique=True)
op.create_index(op.f('ix_users_username'), 'users', ['username'], unique=True)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_users_username'), table_name='users')
op.drop_index(op.f('ix_users_email'), table_name='users')
op.drop_table('users')
op.drop_table('roles')
# ### end Alembic commands ###
| {
"repo_name": "mikkylok/mikky.lu",
"path": "migrations/versions/02bcf59a82c9_add_email_column.py",
"copies": "1",
"size": "1555",
"license": "mit",
"hash": 4243794847522219000,
"line_mean": 31.3958333333,
"line_max": 82,
"alpha_frac": 0.6559485531,
"autogenerated": false,
"ratio": 3.294491525423729,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.44504400785237286,
"avg_score": null,
"num_lines": null
} |
"""Add embed application model
Revision ID: 4811ae4298e3
Revises: 51d493c4d3e1
Create Date: 2015-07-31 15:01:02.135850
"""
# revision identifiers, used by Alembic.
revision = '4811ae4298e3'
down_revision = '51d493c4d3e1'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('EmbedApplications',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('url', sa.Unicode(length=255), nullable=False),
sa.Column('name', sa.Unicode(length=100), nullable=False),
sa.Column('owner_id', sa.Integer(), nullable=True),
sa.Column('identifier', sa.Unicode(length=36), nullable=False),
sa.Column('creation', sa.DateTime(), nullable=False),
sa.Column('last_update', sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(['owner_id'], ['GoLabOAuthUsers.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(u'ix_EmbedApplications_creation', 'EmbedApplications', ['creation'], unique=False)
op.create_index(u'ix_EmbedApplications_identifier', 'EmbedApplications', ['identifier'], unique=True)
op.create_index(u'ix_EmbedApplications_last_update', 'EmbedApplications', ['last_update'], unique=False)
op.create_index(u'ix_EmbedApplications_name', 'EmbedApplications', ['name'], unique=False)
op.create_index(u'ix_EmbedApplications_url', 'EmbedApplications', ['url'], unique=False)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_index(u'ix_EmbedApplications_url', table_name='EmbedApplications')
op.drop_index(u'ix_EmbedApplications_name', table_name='EmbedApplications')
op.drop_index(u'ix_EmbedApplications_last_update', table_name='EmbedApplications')
op.drop_index(u'ix_EmbedApplications_identifier', table_name='EmbedApplications')
op.drop_index(u'ix_EmbedApplications_creation', table_name='EmbedApplications')
op.drop_table('EmbedApplications')
### end Alembic commands ###
| {
"repo_name": "go-lab/appcomposer",
"path": "alembic/versions/4811ae4298e3_add_embed_application_model.py",
"copies": "3",
"size": "2032",
"license": "bsd-2-clause",
"hash": -7919287928071256000,
"line_mean": 43.1739130435,
"line_max": 108,
"alpha_frac": 0.7160433071,
"autogenerated": false,
"ratio": 3.503448275862069,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5719491582962068,
"avg_score": null,
"num_lines": null
} |
"""add emergency models
Revision ID: 98ecbc1e5b66
Revises: 8e7a2acd24b8
Create Date: 2019-04-26 14:07:15.245678
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '98ecbc1e5b66'
down_revision = '8e7a2acd24b8'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('emergency_timeline',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_by', sa.Unicode(length=9), nullable=False),
sa.Column('room', sa.Unicode(length=9), nullable=False),
sa.Column('title', sa.Unicode(length=128), nullable=False),
sa.Column('is_closed', sa.BOOLEAN(), nullable=False),
sa.Column('ctime', sa.DateTime(), nullable=False),
sa.Column('utime', sa.DateTime(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_table('timeline_log',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('timeline_id', sa.Integer(), nullable=False),
sa.Column('created_by', sa.Unicode(length=9), nullable=False),
sa.Column('ctime', sa.DateTime(), nullable=False),
sa.Column('utime', sa.DateTime(), nullable=False),
sa.Column('entry', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['timeline_id'], ['emergency_timeline.id'], ),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('timeline_log')
op.drop_table('emergency_timeline')
# ### end Alembic commands ###
| {
"repo_name": "beproud/beproudbot",
"path": "src/alembic/migrations/versions/98ecbc1e5b66_add_emergency_models.py",
"copies": "1",
"size": "1598",
"license": "mit",
"hash": -5968449500678388000,
"line_mean": 33,
"line_max": 74,
"alpha_frac": 0.6714643304,
"autogenerated": false,
"ratio": 3.3291666666666666,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9461678702766728,
"avg_score": 0.007790458859987684,
"num_lines": 47
} |
"""Add end notifications
Revision ID: 081d4c97060a
Revises: 7aabedfb5e3a
Create Date: 2019-02-27 15:23:31.582776
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '081d4c97060a'
down_revision = '7aabedfb5e3a'
branch_labels = None
depends_on = None
def upgrade():
op.add_column('reservations',
sa.Column('end_notification_sent', sa.Boolean(), nullable=False, server_default='false'),
schema='roombooking')
op.alter_column('reservations', 'end_notification_sent', server_default=None, schema='roombooking')
op.add_column('rooms', sa.Column('end_notification_daily', sa.Integer()), schema='roombooking')
op.add_column('rooms', sa.Column('end_notification_weekly', sa.Integer()), schema='roombooking')
op.add_column('rooms', sa.Column('end_notification_monthly', sa.Integer()), schema='roombooking')
op.add_column('rooms',
sa.Column('end_notifications_enabled', sa.Boolean(), nullable=False, server_default='true'),
schema='roombooking')
op.alter_column('rooms', 'end_notifications_enabled', server_default=None, schema='roombooking')
def downgrade():
op.drop_column('reservations', 'end_notification_sent', schema='roombooking')
op.drop_column('rooms', 'end_notification_daily', schema='roombooking')
op.drop_column('rooms', 'end_notification_weekly', schema='roombooking')
op.drop_column('rooms', 'end_notification_monthly', schema='roombooking')
op.drop_column('rooms', 'end_notifications_enabled', schema='roombooking')
| {
"repo_name": "pferreir/indico",
"path": "indico/migrations/versions/20190227_1523_081d4c97060a_add_end_notifications.py",
"copies": "7",
"size": "1601",
"license": "mit",
"hash": -769335640972611000,
"line_mean": 41.1315789474,
"line_max": 110,
"alpha_frac": 0.6958151156,
"autogenerated": false,
"ratio": 3.443010752688172,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0020969651737242014,
"num_lines": 38
} |
"""add enrolling status
Revision ID: 2be6f6d054e8
Revises: f17f0686ea6b
Create Date: 2018-03-21 13:57:41.685020
"""
import model.utils
import sqlalchemy as sa
from alembic import op
from rdr_service.model.site_enums import EnrollingStatus
# revision identifiers, used by Alembic.
revision = "2be6f6d054e8"
down_revision = "f17f0686ea6b"
branch_labels = None
depends_on = None
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column("site", sa.Column("enrolling_status", model.utils.Enum(EnrollingStatus), nullable=True))
# ### end Alembic commands ###
def downgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column("site", "enrolling_status")
# ### end Alembic commands ###
def upgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
| {
"repo_name": "all-of-us/raw-data-repository",
"path": "rdr_service/alembic/versions/2be6f6d054e8_add_enrolling_status.py",
"copies": "1",
"size": "1200",
"license": "bsd-3-clause",
"hash": 8340926055682165000,
"line_mean": 23,
"line_max": 106,
"alpha_frac": 0.6783333333,
"autogenerated": false,
"ratio": 3.4285714285714284,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9605970182432176,
"avg_score": 0.00018691588785046728,
"num_lines": 50
} |
"""add enrollment site to participant table
Revision ID: c069abb92cc0
Revises: df7c40b6a209
Create Date: 2020-08-18 20:59:18.237666
"""
from alembic import op
import sqlalchemy as sa
import rdr_service.model.utils
from rdr_service.participant_enums import PhysicalMeasurementsStatus, QuestionnaireStatus, OrderStatus
from rdr_service.participant_enums import WithdrawalStatus, WithdrawalReason, SuspensionStatus, QuestionnaireDefinitionStatus
from rdr_service.participant_enums import EnrollmentStatus, Race, SampleStatus, OrganizationType, BiobankOrderStatus
from rdr_service.participant_enums import OrderShipmentTrackingStatus, OrderShipmentStatus
from rdr_service.participant_enums import MetricSetType, MetricsKey, GenderIdentity
from rdr_service.model.base import add_table_history_table, drop_table_history_table
from rdr_service.model.code import CodeType
from rdr_service.model.site_enums import SiteStatus, EnrollingStatus, DigitalSchedulingStatus, ObsoleteStatus
# revision identifiers, used by Alembic.
revision = 'c069abb92cc0'
down_revision = 'df7c40b6a209'
branch_labels = None
depends_on = None
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('participant', sa.Column('enrollment_site_id', sa.Integer(), nullable=True))
op.create_foreign_key(None, 'participant', 'site', ['enrollment_site_id'], ['site_id'])
op.add_column('participant_history', sa.Column('enrollment_site_id', sa.Integer(), nullable=True))
op.create_foreign_key(None, 'participant_history', 'site', ['enrollment_site_id'], ['site_id'])
op.add_column('site', sa.Column('site_type', sa.String(length=255), nullable=True))
# ### end Alembic commands ###
def downgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('site', 'site_type')
op.drop_constraint(None, 'participant_history', type_='foreignkey')
op.drop_column('participant_history', 'enrollment_site_id')
op.drop_constraint(None, 'participant', type_='foreignkey')
op.drop_column('participant', 'enrollment_site_id')
# ### end Alembic commands ###
def upgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
| {
"repo_name": "all-of-us/raw-data-repository",
"path": "rdr_service/alembic/versions/c069abb92cc0_add_enrollment_site_to_participant_table.py",
"copies": "1",
"size": "2559",
"license": "bsd-3-clause",
"hash": 6976407588853596000,
"line_mean": 36.0869565217,
"line_max": 125,
"alpha_frac": 0.7342711997,
"autogenerated": false,
"ratio": 3.5991561181434597,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.48334273178434595,
"avg_score": null,
"num_lines": null
} |
"""add enrollment site to ps
Revision ID: d28bd6bd0a8c
Revises: a233678becb4
Create Date: 2020-10-15 14:17:35.828066
"""
from alembic import op
import sqlalchemy as sa
import rdr_service.model.utils
from rdr_service.participant_enums import PhysicalMeasurementsStatus, QuestionnaireStatus, OrderStatus
from rdr_service.participant_enums import WithdrawalStatus, WithdrawalReason, SuspensionStatus, QuestionnaireDefinitionStatus
from rdr_service.participant_enums import EnrollmentStatus, Race, SampleStatus, OrganizationType, BiobankOrderStatus
from rdr_service.participant_enums import OrderShipmentTrackingStatus, OrderShipmentStatus
from rdr_service.participant_enums import MetricSetType, MetricsKey, GenderIdentity
from rdr_service.model.base import add_table_history_table, drop_table_history_table
from rdr_service.model.code import CodeType
from rdr_service.model.site_enums import SiteStatus, EnrollingStatus, DigitalSchedulingStatus, ObsoleteStatus
# revision identifiers, used by Alembic.
revision = 'd28bd6bd0a8c'
down_revision = 'a233678becb4'
branch_labels = None
depends_on = None
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('participant_summary', sa.Column('enrollment_site_id', sa.Integer(), nullable=True))
op.create_foreign_key(None, 'participant_summary', 'site', ['enrollment_site_id'], ['site_id'])
# ### end Alembic commands ###
def downgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, 'participant_summary', type_='foreignkey')
op.drop_column('participant_summary', 'enrollment_site_id')
# ### end Alembic commands ###
def upgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
| {
"repo_name": "all-of-us/raw-data-repository",
"path": "rdr_service/alembic/versions/d28bd6bd0a8c_add_enrollment_site_to_ps.py",
"copies": "1",
"size": "2108",
"license": "bsd-3-clause",
"hash": 914825404124579300,
"line_mean": 33,
"line_max": 125,
"alpha_frac": 0.745256167,
"autogenerated": false,
"ratio": 3.6282271944922546,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9781363848667571,
"avg_score": 0.018423902564936694,
"num_lines": 62
} |
"""add enrollment status time to participant summary
Revision ID: c7c4b2f17f46
Revises: 147b312fe1c1
Create Date: 2018-10-31 11:34:00.788777
"""
import model.utils
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "c7c4b2f17f46"
down_revision = "147b312fe1c1"
branch_labels = None
depends_on = None
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column(
"participant_summary",
sa.Column("enrollment_status_core_ordered_sample_time", model.utils.UTCDateTime(), nullable=True),
)
op.add_column(
"participant_summary",
sa.Column("enrollment_status_core_stored_sample_time", model.utils.UTCDateTime(), nullable=True),
)
op.add_column(
"participant_summary", sa.Column("enrollment_status_member_time", model.utils.UTCDateTime(), nullable=True)
)
# ### end Alembic commands ###
def downgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column("participant_summary", "enrollment_status_member_time")
op.drop_column("participant_summary", "enrollment_status_core_stored_sample_time")
op.drop_column("participant_summary", "enrollment_status_core_ordered_sample_time")
# ### end Alembic commands ###
def upgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
| {
"repo_name": "all-of-us/raw-data-repository",
"path": "rdr_service/alembic/versions/c7c4b2f17f46_add_enrollment_status_time_to_.py",
"copies": "1",
"size": "1733",
"license": "bsd-3-clause",
"hash": 391299010633236100,
"line_mean": 27.8833333333,
"line_max": 115,
"alpha_frac": 0.679746105,
"autogenerated": false,
"ratio": 3.595435684647303,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.47751817896473026,
"avg_score": null,
"num_lines": null
} |
"""add entries to lots table for DOS
Revision ID: 430
Revises: 420
Create Date: 2015-11-16 11:34:41.412730
"""
# revision identifiers, used by Alembic.
revision = '430'
down_revision = '420'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.sql import table, column
def upgrade():
# Insert DOS lot records
lot_table = table(
'lots',
column('name', sa.String),
column('slug', sa.String),
column('one_service_limit', sa.Boolean)
)
op.bulk_insert(lot_table, [
{'name': 'Digital outcomes', 'slug': 'digital-outcomes', 'one_service_limit': True},
{'name': 'Digital specialists', 'slug': 'digital-specialists', 'one_service_limit': True},
{'name': 'User research studios', 'slug': 'user-research-studios',
'one_service_limit': False},
{'name': 'User research participants', 'slug': 'user-research-participants',
'one_service_limit': True},
])
conn = op.get_bind()
res = conn.execute("SELECT id FROM frameworks WHERE slug = 'digital-outcomes-and-specialists'")
framework = list(res.fetchall())
res = conn.execute("SELECT id FROM lots WHERE slug in ('digital-outcomes'," +
"'digital-specialists', 'user-research-studios'," +
" 'user-research-participants')")
lots = list(res.fetchall())
if len(framework) == 0:
raise Exception("Framework not found")
for lot in lots:
op.execute("INSERT INTO framework_lots (framework_id, lot_id) VALUES({}, {})".format(
framework[0]["id"], lot["id"]))
def downgrade():
conn = op.get_bind()
res = conn.execute("SELECT id FROM frameworks WHERE slug = 'digital-outcomes-and-specialists'")
framework = list(res.fetchall())
op.execute("""
DELETE FROM framework_lots WHERE framework_id={}
""".format(framework[0]['id']))
op.execute("""
DELETE from lots WHERE slug in ('digital-outcomes', 'digital-specialists',
'user-research-studios', 'user-research-participants');
""")
| {
"repo_name": "alphagov/digitalmarketplace-api",
"path": "migrations/versions/430_add_entries_to_lots_table_for_dos.py",
"copies": "1",
"size": "2077",
"license": "mit",
"hash": -8205103368155994000,
"line_mean": 30.4696969697,
"line_max": 99,
"alpha_frac": 0.6186807896,
"autogenerated": false,
"ratio": 3.7155635062611805,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9819317387665065,
"avg_score": 0.002985381639223049,
"num_lines": 66
} |
"""Add Envelope Functionality
Revision ID: b3c0c76ac2e6
Revises: c7f05adcf7d9
Create Date: 2018-04-06 20:17:17.556595
"""
# revision identifiers, used by Alembic.
revision = "b3c0c76ac2e6"
down_revision = "c7f05adcf7d9"
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table(
"envelope_templates",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("agency_ein", sa.String(length=4), nullable=True),
sa.Column("title", sa.String(), nullable=False),
sa.Column("template_name", sa.String(), nullable=False),
sa.ForeignKeyConstraint(["agency_ein"], ["agencies.ein"]),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"envelopes",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("latex", sa.String(), nullable=True),
sa.ForeignKeyConstraint(["id"], ["responses.id"]),
sa.PrimaryKeyConstraint("id"),
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table("envelopes")
op.drop_table("envelope_templates")
### end Alembic commands ###
| {
"repo_name": "CityOfNewYork/NYCOpenRecords",
"path": "migrations/versions/b3c0c76ac2e6_add_envelope_functionality.py",
"copies": "1",
"size": "1238",
"license": "apache-2.0",
"hash": -2084686863915391700,
"line_mean": 28.4761904762,
"line_max": 68,
"alpha_frac": 0.640549273,
"autogenerated": false,
"ratio": 3.4677871148459385,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9587264841638523,
"avg_score": 0.004214309241483154,
"num_lines": 42
} |
"""Add epoch column for smarter refetch tracking
Revision ID: d5dba845418b
Revises: 2d10f39bfbfd
Create Date: 2020-04-26 09:51:40.312747
"""
# revision identifiers, used by Alembic.
revision = 'd5dba845418b'
down_revision = '2d10f39bfbfd'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
from sqlalchemy_utils.types import TSVectorType
from sqlalchemy_searchable import make_searchable
import sqlalchemy_utils
# Patch in knowledge of the citext type, so it reflects properly.
from sqlalchemy.dialects.postgresql.base import ischema_names
import citext
import queue
import datetime
from sqlalchemy.dialects.postgresql import ENUM
from sqlalchemy.dialects.postgresql import JSON
from sqlalchemy.dialects.postgresql import JSONB
from sqlalchemy.dialects.postgresql import TSVECTOR
ischema_names['citext'] = citext.CIText
def upgrade():
op.execute("SET statement_timeout TO 144000000;")
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('raw_web_pages', sa.Column('epoch', sa.Integer(), nullable=True))
op.add_column('raw_web_pages_version', sa.Column('epoch', sa.Integer(), autoincrement=False, nullable=True))
op.add_column('web_pages', sa.Column('epoch', sa.Integer(), nullable=True))
op.add_column('web_pages_version', sa.Column('epoch', sa.Integer(), autoincrement=False, nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('web_pages_version', 'epoch')
op.drop_column('web_pages', 'epoch')
op.drop_column('raw_web_pages_version', 'epoch')
op.drop_column('raw_web_pages', 'epoch')
# ### end Alembic commands ###
| {
"repo_name": "fake-name/ReadableWebProxy",
"path": "alembic/versions/2020-04-26_d5dba845418b_add_epoch_column_for_smarter_refetch_.py",
"copies": "1",
"size": "1729",
"license": "bsd-3-clause",
"hash": 3645558356344198000,
"line_mean": 32.25,
"line_max": 112,
"alpha_frac": 0.7374204743,
"autogenerated": false,
"ratio": 3.5,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9682369848787611,
"avg_score": 0.011010125102477782,
"num_lines": 52
} |
# Add error handling to Exercise P3.10. If the user provides an invalid unit for the
# altitude, print an error message and end the program.
from sys import exit
FREEZE_CELSIUS = 0
FREEZE_FAHRENHEIT = 32
boil_celsius = 100
boil_fahrenheit = 212
temperature = float(input("Enter the temperature: "))
type = str(input("Enter C for celsius or F for fahrenheit: "))
altitude = float(input("Enter the altitude above sea level: "))
units = str(input("Enter M for meters or F for feet: "))
if type == "C":
if units == "M":
boil_celsius -= (altitude / 300)
elif units == "F":
boil_celsius -= (altitude / 1000)
else:
exit("Invalid unit for the altitude. Exiting...")
if temperature <= FREEZE_CELSIUS:
print("Solid")
elif temperature >= boil_celsius:
print("Gas")
else:
print("Liquid")
else:
if units == "M":
boil_fahrenheit -= (altitude / 300)
elif units == "F":
boil_fahrenheit -= (altitude / 1000)
else:
exit("Invalid unit for the altitude. Exiting...")
if temperature <= FREEZE_FAHRENHEIT:
print("Solid")
elif temperature >= boil_fahrenheit:
print("Gas")
else:
print("Liquid") | {
"repo_name": "futurepr0n/Books-solutions",
"path": "Python-For-Everyone-Horstmann/Chapter3-Decisions/P3.11.py",
"copies": "1",
"size": "1229",
"license": "mit",
"hash": -5177496381354171000,
"line_mean": 22.6538461538,
"line_max": 84,
"alpha_frac": 0.6159479251,
"autogenerated": false,
"ratio": 3.330623306233062,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4446571231333062,
"avg_score": null,
"num_lines": null
} |
"""Add ErrorLog table.
Revision ID: 9715822acf6c
Revises: 860cb49889a9
Create Date: 2017-02-27 10:15:42.372833
"""
# revision identifiers, used by Alembic.
revision = '9715822acf6c'
down_revision = '860cb49889a9'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table('errorlog',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('function', sa.String(length=100), nullable=True),
sa.Column('location', sa.String(length=255), nullable=True),
sa.Column('message', sa.Text(), nullable=True),
sa.Column('traceback', sa.Text(), nullable=True),
sa.Column('version', sa.String(length=100), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
with op.batch_alter_table('pageviews') as batch_op:
batch_op.add_column(sa.Column('id_errorlog', sa.Integer(), nullable=True))
batch_op.drop_column('error_message')
def downgrade():
with op.batch_alter_table('pageviews') as batch_op:
batch_op.add_column(sa.Column('error_message', sa.Text(), nullable=True))
batch_op.drop_column('id_errorlog')
op.drop_table('errorlog')
| {
"repo_name": "airbnb/knowledge-repo",
"path": "knowledge_repo/app/migrations/versions/9715822acf6c_add_errorlog_table.py",
"copies": "1",
"size": "1213",
"license": "apache-2.0",
"hash": 6429767738368144000,
"line_mean": 30.1025641026,
"line_max": 82,
"alpha_frac": 0.6628194559,
"autogenerated": false,
"ratio": 3.269541778975741,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.941600609892185,
"avg_score": 0.003271027190778306,
"num_lines": 39
} |
"""addErrorsPresent
Revision ID: a17156edb8a0
Revises: cdcaa9c693e3
Create Date: 2016-04-08 11:40:49.599000
"""
# revision identifiers, used by Alembic.
revision = 'a17156edb8a0'
down_revision = 'a87f915e4b62'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_error_data():
### commands auto generated by Alembic - please adjust! ###
op.add_column('file_status', sa.Column('errors_present', sa.Boolean(), nullable=True))
### end Alembic commands ###
def downgrade_error_data():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('file_status', 'errors_present')
### end Alembic commands ###
def upgrade_job_tracker():
### commands auto generated by Alembic - please adjust! ###
pass
### end Alembic commands ###
def downgrade_job_tracker():
### commands auto generated by Alembic - please adjust! ###
pass
### end Alembic commands ###
def upgrade_user_manager():
### commands auto generated by Alembic - please adjust! ###
pass
### end Alembic commands ###
def downgrade_user_manager():
### commands auto generated by Alembic - please adjust! ###
pass
### end Alembic commands ###
| {
"repo_name": "fedspendingtransparency/data-act-core",
"path": "dataactcore/migrations/versions/a17156edb8a0_adderrorspresent.py",
"copies": "1",
"size": "1393",
"license": "cc0-1.0",
"hash": -5968881429426676000,
"line_mean": 20.765625,
"line_max": 90,
"alpha_frac": 0.664034458,
"autogenerated": false,
"ratio": 3.6754617414248023,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.48394961994248026,
"avg_score": null,
"num_lines": null
} |
"""add_error_table_fks
Revision ID: 0c857b50962a
Revises: 51679e7bfe01
Create Date: 2016-09-26 21:42:17.896556
"""
# revision identifiers, used by Alembic.
revision = '0c857b50962a'
down_revision = '51679e7bfe01'
branch_labels = None
depends_on = None
from alembic import op
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_data_broker():
### commands auto generated by Alembic - please adjust! ###
op.create_foreign_key('fk_target_file_type_file_status_id', 'error_metadata', 'file_type', ['target_file_type_id'], ['file_type_id'])
op.create_foreign_key('fk_error_severity_id', 'error_metadata', 'rule_severity', ['severity_id'], ['rule_severity_id'])
op.create_foreign_key('fk_file_type_file_status_id', 'error_metadata', 'file_type', ['file_type_id'], ['file_type_id'])
### end Alembic commands ###
def downgrade_data_broker():
### commands auto generated by Alembic - please adjust! ###
op.drop_constraint('fk_file_type_file_status_id', 'error_metadata', type_='foreignkey')
op.drop_constraint('fk_error_severity_id', 'error_metadata', type_='foreignkey')
op.drop_constraint('fk_target_file_type_file_status_id', 'error_metadata', type_='foreignkey')
### end Alembic commands ###
| {
"repo_name": "fedspendingtransparency/data-act-broker-backend",
"path": "dataactcore/migrations/versions/0c857b50962a_add_error_table_fks.py",
"copies": "2",
"size": "1347",
"license": "cc0-1.0",
"hash": 3923069668894865400,
"line_mean": 30.3255813953,
"line_max": 137,
"alpha_frac": 0.6837416481,
"autogenerated": false,
"ratio": 3.147196261682243,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.48309379097822436,
"avg_score": null,
"num_lines": null
} |
"""Add event_id to Announcements and sponsor Levels
Revision ID: 4a435efab65
Revises: 177a65486a0
Create Date: 2015-04-28 00:07:35.365746
"""
# revision identifiers, used by Alembic.
revision = '4a435efab65'
down_revision = '4fb5af01b5'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('announcements', sa.Column('event_id', sa.Integer(), nullable=True))
op.create_foreign_key(op.f('announcements_event_id_fkey'), 'announcements', 'events', ['event_id'], ['id'])
op.add_column('sponsor_levels', sa.Column('event_id', sa.Integer(), nullable=True))
op.create_foreign_key(op.f('sponsor_levels_event_id_fkey'), 'sponsor_levels', 'events', ['event_id'], ['id'])
### end Alembic commands ###
# Begin data migration
op.execute(
'UPDATE announcements SET event_id=('
'SELECT id FROM events ORDER BY id DESC LIMIT 1'
')'
)
op.execute(
'UPDATE sponsor_levels SET event_id=('
'SELECT id FROM events ORDER BY id DESC LIMIT 1'
')'
)
# End data migration
### commands auto generated by Alembic - please adjust! ###
op.alter_column('announcements', 'event_id',
existing_type=sa.INTEGER(),
nullable=False)
op.alter_column('sponsor_levels', 'event_id',
existing_type=sa.INTEGER(),
nullable=False)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.alter_column('sponsor_levels', 'event_id',
existing_type=sa.INTEGER(),
nullable=True)
op.alter_column('announcements', 'event_id',
existing_type=sa.INTEGER(),
nullable=True)
op.drop_constraint(op.f('sponsor_levels_event_id_fkey'), 'sponsor_levels', type_='foreignkey')
op.drop_column('sponsor_levels', 'event_id')
op.drop_constraint(op.f('announcements_event_id_fkey'), 'announcements', type_='foreignkey')
op.drop_column('announcements', 'event_id')
### end Alembic commands ###
| {
"repo_name": "djds23/pygotham-1",
"path": "migrations/versions/4a435efab65_add_event_id_to_announcements_and_.py",
"copies": "3",
"size": "2134",
"license": "bsd-3-clause",
"hash": -2158877892630555000,
"line_mean": 34.5666666667,
"line_max": 113,
"alpha_frac": 0.6298031865,
"autogenerated": false,
"ratio": 3.550748752079867,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.011801729148334316,
"num_lines": 60
} |
"""Add event log.
Revision ID: d17d4d4fd1ee
Revises: 95779b509fe4
Create Date: 2016-07-22 12:21:14.296489
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = 'd17d4d4fd1ee'
down_revision = '95779b509fe4'
def upgrade():
op.create_table('event_log',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('action', sa.Unicode(length=255), nullable=True),
sa.Column('source_ip', sa.Unicode(length=255), nullable=True),
sa.Column('path', sa.Unicode(), nullable=True),
sa.Column('query', postgresql.JSONB(), nullable=True),
sa.Column('data', postgresql.JSONB(), nullable=True),
sa.Column('role_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['role_id'], ['role.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_event_log_action'), 'event_log', ['action'], unique=False)
def downgrade():
op.drop_index(op.f('ix_event_log_action'), table_name='event_log')
op.drop_table('event_log')
| {
"repo_name": "pudo/aleph",
"path": "aleph/migrate/versions/d17d4d4fd1ee_add_event_log.py",
"copies": "4",
"size": "1225",
"license": "mit",
"hash": -7103205565923469000,
"line_mean": 33.0277777778,
"line_max": 87,
"alpha_frac": 0.6579591837,
"autogenerated": false,
"ratio": 3.232189973614776,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0008207070707070707,
"num_lines": 36
} |
"""Add event roles table
Revision ID: f1eee7b4880a
Revises:
Create Date: 2017-09-05 14:45:28.673606
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = 'f1eee7b4880a'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
op.create_table(
'roles',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('event_id', sa.Integer(), nullable=False, index=True),
sa.Column('name', sa.String(), nullable=False),
sa.Column('code', sa.String(), nullable=False),
sa.Column('color', sa.String(), nullable=False),
sa.Index(None, 'event_id', 'code', unique=True),
sa.CheckConstraint('code = upper(code)', name='uppercase_code'),
sa.ForeignKeyConstraint(['event_id'], ['events.events.id']),
sa.PrimaryKeyConstraint('id'),
schema='events'
)
op.create_table(
'role_members',
sa.Column('role_id', sa.Integer(), nullable=False, index=True),
sa.Column('user_id', sa.Integer(), nullable=False, index=True),
sa.ForeignKeyConstraint(['role_id'], ['events.roles.id']),
sa.ForeignKeyConstraint(['user_id'], ['users.users.id']),
sa.PrimaryKeyConstraint('role_id', 'user_id'),
schema='events'
)
def downgrade():
op.drop_table('role_members', schema='events')
op.drop_table('roles', schema='events')
| {
"repo_name": "indico/indico",
"path": "indico/migrations/versions/20170905_1445_f1eee7b4880a_add_event_roles_table.py",
"copies": "7",
"size": "1423",
"license": "mit",
"hash": -5614272291211560000,
"line_mean": 29.9347826087,
"line_max": 72,
"alpha_frac": 0.6268446943,
"autogenerated": false,
"ratio": 3.4877450980392157,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.7614589792339215,
"avg_score": null,
"num_lines": null
} |
"""add events
Revision ID: 1ae579134fd
Revises: 1bd59c36b7b
Create Date: 2013-10-26 17:46:21.489618
"""
# revision identifiers, used by Alembic.
revision = '1ae579134fd'
down_revision = '1bd59c36b7b'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('events',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=True),
sa.Column('slug', sa.String(length=50), nullable=True),
sa.Column('begins', sa.Date(), nullable=True),
sa.Column('ends', sa.Date(), nullable=True),
sa.Column('number_of_recipients', sa.SmallInteger(), nullable=True),
sa.Column('suggested_limit', sa.Float(), nullable=True),
sa.Column('active', sa.Boolean(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('slug')
)
op.create_table('events_users',
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('event_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['event_id'], ['events.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
sa.PrimaryKeyConstraint()
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('events_users')
op.drop_table('events')
### end Alembic commands ###
| {
"repo_name": "dirn/Secret-Santa",
"path": "migrations/versions/1ae579134fd_add_events.py",
"copies": "1",
"size": "1407",
"license": "bsd-3-clause",
"hash": -6131925884669122000,
"line_mean": 30.2666666667,
"line_max": 72,
"alpha_frac": 0.6602700782,
"autogenerated": false,
"ratio": 3.398550724637681,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4558820802837681,
"avg_score": null,
"num_lines": null
} |
"""add events table
Revision ID: 34133c318c56
Revises: 1710dfa54dbd
Create Date: 2014-02-08 16:04:20.919954
"""
# revision identifiers, used by Alembic.
revision = '34133c318c56'
down_revision = '1710dfa54dbd'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('event',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('race_id', sa.Integer(), nullable=True),
sa.Column('player', sa.Integer(), nullable=True),
sa.Column('timestamp', sa.Numeric(precision=7, scale=1), nullable=True),
sa.Column('lap', sa.Integer(), nullable=True),
sa.Column('event_type', sa.Enum('Lap', 'Item', 'Collision', 'Pass', 'Shortcut', 'Tag', 'Fall', 'Reverse', name='event_type'), nullable=True),
sa.Column('event_subtype', sa.String(), nullable=True),
sa.Column('event_info', sa.String(), nullable=True),
sa.Column('linked_event_id', sa.Integer(), nullable=True),
sa.Column('image_url', sa.VARCHAR(length=1024), nullable=True),
sa.ForeignKeyConstraint(['linked_event_id'], ['event.id'], ),
sa.ForeignKeyConstraint(['race_id'], ['race.id'], ),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('event')
### end Alembic commands ###
| {
"repo_name": "kartyboyz/n64-storage-flask",
"path": "migrations/versions/34133c318c56_add_events_table.py",
"copies": "1",
"size": "1392",
"license": "mit",
"hash": 3652565531179057000,
"line_mean": 33.8,
"line_max": 145,
"alpha_frac": 0.6609195402,
"autogenerated": false,
"ratio": 3.36231884057971,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.452323838077971,
"avg_score": null,
"num_lines": null
} |
"""Add expected file source and handling options.
Revision ID: 29c521f4449c
Revises: 2ad667bb4592
Create Date: 2013-04-03 22:13:32.034983
"""
# revision identifiers, used by Alembic.
revision = '29c521f4449c'
down_revision = '2ad667bb4592'
from alembic import op
import sqlalchemy as sa
output_type_type = sa.Enum(u'diff', u'image', u'text', name=u'output_type')
source_type = sa.Enum(u'file', u'stderr', u'stdout', name=u'source')
def upgrade():
### commands auto generated by Alembic - please adjust! ###
output_type_type.create(op.get_bind(), checkfirst=False)
source_type.create(op.get_bind(), checkfirst=False)
op.add_column('testcase', sa.Column(
'source', source_type, server_default='stdout', nullable=False))
op.add_column('testcase', sa.Column('output_filename', sa.Unicode(),
nullable=True))
op.add_column('testcase', sa.Column(
'output_type', output_type_type, server_default='diff',
nullable=False))
op.alter_column('testcase', u'expected_id',
existing_type=sa.INTEGER(),
nullable=True)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.alter_column('testcase', u'expected_id',
existing_type=sa.INTEGER(),
nullable=False)
op.drop_column('testcase', 'output_type')
op.drop_column('testcase', 'output_filename')
op.drop_column('testcase', 'source')
source_type.drop(op.get_bind(), checkfirst=False)
output_type_type.drop(op.get_bind(), checkfirst=False)
### end Alembic commands ###
| {
"repo_name": "ucsb-cs/submit",
"path": "submit/migrations/versions/29c521f4449c_add_expected_file_so.py",
"copies": "1",
"size": "1663",
"license": "bsd-2-clause",
"hash": -6278748922898100000,
"line_mean": 33.6458333333,
"line_max": 76,
"alpha_frac": 0.6416115454,
"autogenerated": false,
"ratio": 3.4502074688796682,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4591819014279668,
"avg_score": null,
"num_lines": null
} |
"""Add Expedition table and relationship
Revision ID: ecf3973e1118
Revises: 62f3d26f34ea
Create Date: 2016-01-21 22:34:38.311542
"""
# revision identifiers, used by Alembic.
revision = 'ecf3973e1118'
down_revision = '62f3d26f34ea'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('expedition',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('resources_id', sa.Integer(), nullable=True),
sa.Column('constraints', postgresql.JSONB(), nullable=True),
sa.ForeignKeyConstraint(['resources_id'], ['resources.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('admiral_j_expedition',
sa.Column('adm_id', sa.Integer(), nullable=True),
sa.Column('expedition_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['adm_id'], ['admiral.id'], ),
sa.ForeignKeyConstraint(['expedition_id'], ['expedition.id'], )
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('admiral_j_expedition')
op.drop_table('expedition')
### end Alembic commands ###
| {
"repo_name": "KanColleTool/kcsrv",
"path": "migrations/versions/ecf3973e1118_add_expedition_table_and_relationship.py",
"copies": "1",
"size": "1229",
"license": "mit",
"hash": -8914400695824412000,
"line_mean": 30.5128205128,
"line_max": 67,
"alpha_frac": 0.688364524,
"autogenerated": false,
"ratio": 3.3487738419618527,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4537138365961853,
"avg_score": null,
"num_lines": null
} |
"""add external identity tables
Revision ID: 24ab8d11f014
Revises: 2bb1ba973f0b
Create Date: 2011-11-10 23:18:19.446844
"""
from __future__ import unicode_literals
import sqlalchemy as sa
from alembic import op
# downgrade revision identifier, used by Alembic.
revision = "24ab8d11f014"
down_revision = "2bb1ba973f0b"
def upgrade():
op.create_table(
"external_identities",
sa.Column("external_id", sa.Unicode(255), primary_key=True),
sa.Column("external_user_name", sa.Unicode(50), default=""),
sa.Column(
"local_user_name",
sa.Unicode(50),
sa.ForeignKey("users.user_name", onupdate="CASCADE", ondelete="CASCADE"),
primary_key=True,
),
sa.Column("provider_name", sa.Unicode(50), default="", primary_key=True),
sa.Column("access_token", sa.Unicode(255), default=""),
sa.Column("alt_token", sa.Unicode(255), default=""),
sa.Column("token_secret", sa.Unicode(255), default=""),
)
def downgrade():
op.drop_table("external_identities")
| {
"repo_name": "ergo/ziggurat_foundations",
"path": "ziggurat_foundations/migrations/versions/24ab8d11f014_external_identities.py",
"copies": "1",
"size": "1071",
"license": "bsd-3-clause",
"hash": 9150653764453551000,
"line_mean": 27.9459459459,
"line_max": 85,
"alpha_frac": 0.6367880486,
"autogenerated": false,
"ratio": 3.3892405063291138,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.45260285549291135,
"avg_score": null,
"num_lines": null
} |
"""add external id to participant
Revision ID: 147b312fe1c1
Revises: 9b2ec162bfde
Create Date: 2018-10-24 11:01:57.219090
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "147b312fe1c1"
down_revision = "9b2ec162bfde"
branch_labels = None
depends_on = None
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column("participant", sa.Column("external_id", sa.BigInteger(), nullable=True))
op.create_unique_constraint(None, "participant", ["external_id"])
op.add_column("participant_history", sa.Column("external_id", sa.BigInteger(), nullable=True))
# ### end Alembic commands ###
def downgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column("participant_history", "external_id")
op.drop_constraint(None, "participant", type_="unique")
op.drop_column("participant", "external_id")
# ### end Alembic commands ###
def upgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
| {
"repo_name": "all-of-us/raw-data-repository",
"path": "rdr_service/alembic/versions/147b312fe1c1_add_external_id_to_participant.py",
"copies": "1",
"size": "1405",
"license": "bsd-3-clause",
"hash": -5168182029271392000,
"line_mean": 26.5490196078,
"line_max": 98,
"alpha_frac": 0.6725978648,
"autogenerated": false,
"ratio": 3.53904282115869,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9709573036832218,
"avg_score": 0.0004135298252945312,
"num_lines": 51
} |
"""Add extra attendees to CustomForm.
Revision ID: 825dad56a2c3
Revises: 7525fd3b67d5
Create Date: 2018-01-13 16:19:50.651900
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '825dad56a2c3'
down_revision = '009352d11348'
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('custom_form', sa.Column('introductions', sa.Integer(),
nullable=True))
op.add_column('custom_form_result', sa.Column('introductions',
sa.Integer(), nullable=True))
# ### end Alembic commands ###
connection = op.get_bind()
connection.execute("""
UPDATE custom_form_result
SET introductions = 0
WHERE introductions is NULL
""")
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('custom_form_result', 'introductions')
op.drop_column('custom_form', 'introductions')
# ### end Alembic commands ###
| {
"repo_name": "viaict/viaduct",
"path": "migrations/versions/2018_01_13_825dad56a2c3_add_extra_attendees_to_customform.py",
"copies": "1",
"size": "1077",
"license": "mit",
"hash": -6332718407557932000,
"line_mean": 28.9166666667,
"line_max": 79,
"alpha_frac": 0.6165273909,
"autogenerated": false,
"ratio": 3.8464285714285715,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4962955962328572,
"avg_score": null,
"num_lines": null
} |
"""Add failing tables
Revision ID: a3997837668c
Revises: dfc939482d8d
Create Date: 2017-10-15 15:03:08.130965
"""
# revision identifiers, used by Alembic.
revision = 'a3997837668c'
down_revision = 'dfc939482d8d'
from alembic import op
import sqlalchemy as sa
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('RepositoryAppCheckUrls',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('repository_app_id', sa.Integer(), nullable=False),
sa.Column('url', sa.Unicode(length=1024), nullable=False),
sa.Column('url_hash', sa.Unicode(length=255), nullable=False),
sa.Column('supports_ssl', sa.Boolean(), nullable=True),
sa.Column('working', sa.Boolean(), nullable=True),
sa.Column('contains_flash', sa.Boolean(), nullable=True),
sa.Column('last_update', sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(['repository_app_id'], ['RepositoryApps.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('repository_app_id', 'url_hash')
)
op.create_index(op.f('ix_RepositoryAppCheckUrls_contains_flash'), 'RepositoryAppCheckUrls', ['contains_flash'], unique=False)
op.create_index(op.f('ix_RepositoryAppCheckUrls_last_update'), 'RepositoryAppCheckUrls', ['last_update'], unique=False)
op.create_index(op.f('ix_RepositoryAppCheckUrls_supports_ssl'), 'RepositoryAppCheckUrls', ['supports_ssl'], unique=False)
op.create_index(op.f('ix_RepositoryAppCheckUrls_url_hash'), 'RepositoryAppCheckUrls', ['url_hash'], unique=False)
op.create_index('ix_RepositoryAppCheckUrls_url_shortened', 'RepositoryAppCheckUrls', ['url'], unique=False, mysql_length={'url': 255})
op.create_index(op.f('ix_RepositoryAppCheckUrls_working'), 'RepositoryAppCheckUrls', ['working'], unique=False)
op.create_table('RepositoryAppFailures',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('repository_app_check_url_id', sa.Integer(), nullable=True),
sa.Column('current', sa.Boolean(), nullable=True),
sa.Column('started', sa.DateTime(), nullable=True),
sa.Column('ended', sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(['repository_app_check_url_id'], ['RepositoryAppCheckUrls.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_RepositoryAppFailures_current'), 'RepositoryAppFailures', ['current'], unique=False)
op.create_index(op.f('ix_RepositoryAppFailures_ended'), 'RepositoryAppFailures', ['ended'], unique=False)
op.create_index(op.f('ix_RepositoryAppFailures_started'), 'RepositoryAppFailures', ['started'], unique=False)
op.add_column(u'RepositoryApps', sa.Column('contains_flash', sa.Boolean(), nullable=True))
op.add_column(u'RepositoryApps', sa.Column('supports_ssl', sa.Boolean(), nullable=True))
op.create_index(op.f('ix_RepositoryApps_contains_flash'), 'RepositoryApps', ['contains_flash'], unique=False)
op.create_index(op.f('ix_RepositoryApps_supports_ssl'), 'RepositoryApps', ['supports_ssl'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_RepositoryApps_supports_ssl'), table_name='RepositoryApps')
op.drop_index(op.f('ix_RepositoryApps_contains_flash'), table_name='RepositoryApps')
op.drop_column(u'RepositoryApps', 'supports_ssl')
op.drop_column(u'RepositoryApps', 'contains_flash')
op.drop_index(op.f('ix_RepositoryAppFailures_started'), table_name='RepositoryAppFailures')
op.drop_index(op.f('ix_RepositoryAppFailures_ended'), table_name='RepositoryAppFailures')
op.drop_index(op.f('ix_RepositoryAppFailures_current'), table_name='RepositoryAppFailures')
op.drop_table('RepositoryAppFailures')
op.drop_index(op.f('ix_RepositoryAppCheckUrls_working'), table_name='RepositoryAppCheckUrls')
op.drop_index('ix_RepositoryAppCheckUrls_url_shortened', table_name='RepositoryAppCheckUrls')
op.drop_index(op.f('ix_RepositoryAppCheckUrls_url_hash'), table_name='RepositoryAppCheckUrls')
op.drop_index(op.f('ix_RepositoryAppCheckUrls_supports_ssl'), table_name='RepositoryAppCheckUrls')
op.drop_index(op.f('ix_RepositoryAppCheckUrls_last_update'), table_name='RepositoryAppCheckUrls')
op.drop_index(op.f('ix_RepositoryAppCheckUrls_contains_flash'), table_name='RepositoryAppCheckUrls')
op.drop_table('RepositoryAppCheckUrls')
# ### end Alembic commands ###
| {
"repo_name": "porduna/appcomposer",
"path": "alembic/versions/a3997837668c_add_failing_tables.py",
"copies": "3",
"size": "4444",
"license": "bsd-2-clause",
"hash": -5431394034482506000,
"line_mean": 59.0540540541,
"line_max": 138,
"alpha_frac": 0.7229972997,
"autogenerated": false,
"ratio": 3.3489073097211755,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.008688096587700192,
"num_lines": 74
} |
"""Add FailureReason
Revision ID: 1c5907e309f1
Revises: 4a12e7f0159d
Create Date: 2014-06-02 15:31:02.991394
"""
# revision identifiers, used by Alembic.
revision = '1c5907e309f1'
down_revision = '4a12e7f0159d'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table(
'failurereason',
sa.Column('id', sa.GUID(), nullable=False),
sa.Column('step_id', sa.GUID(), nullable=False),
sa.Column('job_id', sa.GUID(), nullable=False),
sa.Column('build_id', sa.GUID(), nullable=False),
sa.Column('project_id', sa.GUID(), nullable=False),
sa.Column('reason', sa.String(length=32), nullable=False),
sa.Column('date_created', sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(['build_id'], ['build.id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(['job_id'], ['job.id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(['project_id'], ['project.id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(['step_id'], ['jobstep.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('step_id', 'reason', name='unq_failurereason_key')
)
op.create_index('idx_failurereason_build_id', 'failurereason', ['build_id'], unique=False)
op.create_index('idx_failurereason_job_id', 'failurereason', ['job_id'], unique=False)
op.create_index('idx_failurereason_project_id', 'failurereason', ['project_id'], unique=False)
def downgrade():
op.drop_table('failurereason')
| {
"repo_name": "alex/changes",
"path": "migrations/versions/1c5907e309f1_add_failurereason.py",
"copies": "4",
"size": "1593",
"license": "apache-2.0",
"hash": 7627041655368663000,
"line_mean": 37.8536585366,
"line_max": 98,
"alpha_frac": 0.6578782172,
"autogenerated": false,
"ratio": 3.425806451612903,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.004114433445405971,
"num_lines": 41
} |
"""add fall and reverse
Revision ID: 566b0cc2f2c
Revises: 20c9101ad47f
Create Date: 2014-04-09 21:30:57.061767
"""
# revision identifiers, used by Alembic.
revision = '566b0cc2f2c'
down_revision = '20c9101ad47f'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
#op.alter_column('event', 'event_type',
#type_=sa.Enum('Lap', 'Item', 'Collision', 'Pass', 'Shortcut', "Tag", 'Reverse', 'Fall', name='event_type'),
#existing_type=sa.Enum('Lap', 'Item', 'Collision', 'Pass', 'Shortcut', 'Tag', name='event_type'))
pass
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
#op.alter_column('event', 'event_type',
#existing_type=sa.Enum('Lap', 'Item', 'Collision', 'Pass', 'Shortcut', "Tag", 'Fall', 'Reverse', name='event_type'),
#type_=sa.Enum('Lap', 'Item', 'Collision', 'Pass', 'Shortcut', 'Tag', name='event_type'))
pass
### end Alembic commands ###
| {
"repo_name": "kartyboyz/n64-storage-flask",
"path": "migrations/versions/566b0cc2f2c_add_fall_and_reverse.py",
"copies": "1",
"size": "1061",
"license": "mit",
"hash": 6618206265198975000,
"line_mean": 32.15625,
"line_max": 128,
"alpha_frac": 0.6211121583,
"autogenerated": false,
"ratio": 3.1671641791044776,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9237153020535933,
"avg_score": 0.010224663373709,
"num_lines": 32
} |
"""Add FastCache for avoiding retrieving the app_url in every single update
Revision ID: 471e6f7722a7
Revises: 6d09f595667
Create Date: 2015-04-14 07:40:57.006143
"""
# revision identifiers, used by Alembic.
revision = '471e6f7722a7'
down_revision = '6d09f595667'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('TranslationFastCaches',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('app_url', sa.Unicode(length=255), nullable=True),
sa.Column('translation_url', sa.Unicode(length=255), nullable=True),
sa.Column('original_messages', sa.UnicodeText(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(u'ix_TranslationFastCaches_app_url', 'TranslationFastCaches', ['app_url'], unique=True)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_index(u'ix_TranslationFastCaches_app_url', table_name='TranslationFastCaches')
op.drop_table('TranslationFastCaches')
### end Alembic commands ###
| {
"repo_name": "go-lab/appcomposer",
"path": "alembic/versions/471e6f7722a7_add_fastcache_for_avoiding_retrieving_.py",
"copies": "3",
"size": "1130",
"license": "bsd-2-clause",
"hash": -3582429246096965000,
"line_mean": 32.2352941176,
"line_max": 107,
"alpha_frac": 0.7123893805,
"autogenerated": false,
"ratio": 3.323529411764706,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5535918792264707,
"avg_score": null,
"num_lines": null
} |
"""AddFeeds
Revision ID: 6620d1c95ab9
Revises: bdce5a60ee7d
Create Date: 2017-08-12 14:52:19.067761
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '6620d1c95ab9'
down_revision = 'bdce5a60ee7d'
branch_labels = None
depends_on = None
feed_table_name = 'feeds'
feed_items_table_name = 'feed_items'
def upgrade():
op.create_table(
feed_table_name,
sa.Column('id', sa.Integer, primary_key=True, autoincrement=True),
sa.Column('name', sa.String(256), nullable=False),
sa.Column('url', sa.String(2048), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('last_updated_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('last_attempted', sa.DateTime(timezone=True), nullable=False)
)
op.create_table(
feed_items_table_name,
sa.Column('id', sa.Integer, primary_key=True, autoincrement=True),
sa.Column('feed_id', sa.Integer, nullable=False),
sa.Column('title', sa.String(256), nullable=False),
sa.Column('url', sa.String(2048), nullable=False),
sa.Column('body', sa.Text(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('published_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('read_at', sa.DateTime(timezone=True), nullable=True)
)
def downgrade():
op.drop_table(feed_items_table_name)
op.drop_table(feed_table_name)
| {
"repo_name": "charlesj/Apollo",
"path": "database/versions/6620d1c95ab9_addfeeds.py",
"copies": "1",
"size": "1544",
"license": "mit",
"hash": -7582440735881333000,
"line_mean": 31.8510638298,
"line_max": 81,
"alpha_frac": 0.6670984456,
"autogenerated": false,
"ratio": 3.20997920997921,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.437707765557921,
"avg_score": null,
"num_lines": null
} |
"""Add fetch ref column
Revision ID: 725816dc500
Revises: 38104b4c1b84
Create Date: 2014-05-31 14:51:08.078616
"""
# revision identifiers, used by Alembic.
revision = '725816dc500'
down_revision = '38104b4c1b84'
import warnings
from alembic import op
import sqlalchemy as sa
from gertty.dbsupport import sqlite_alter_columns
def upgrade():
with warnings.catch_warnings():
warnings.simplefilter("ignore")
op.add_column('revision', sa.Column('fetch_auth', sa.Boolean()))
op.add_column('revision', sa.Column('fetch_ref', sa.String(length=255)))
conn = op.get_bind()
res = conn.execute('select r.key, r.number, c.number from revision r, "change" c where r.change_key=c.key')
for (rkey, rnumber, cnumber) in res.fetchall():
q = sa.text('update revision set fetch_auth=:auth, fetch_ref=:ref where "key"=:key')
ref = 'refs/changes/%s/%s/%s' % (str(cnumber)[-2:], cnumber, rnumber)
res = conn.execute(q, key=rkey, ref=ref, auth=False)
sqlite_alter_columns('revision', [
sa.Column('fetch_auth', sa.Boolean(), nullable=False),
sa.Column('fetch_ref', sa.String(length=255), nullable=False)
])
def downgrade():
op.drop_column('revision', 'fetch_auth')
op.drop_column('revision', 'fetch_ref')
| {
"repo_name": "aspiers/gertty",
"path": "gertty/alembic/versions/725816dc500_add_fetch_ref_column.py",
"copies": "1",
"size": "1291",
"license": "apache-2.0",
"hash": 8670674877061080000,
"line_mean": 30.487804878,
"line_max": 111,
"alpha_frac": 0.6646010844,
"autogenerated": false,
"ratio": 3.133495145631068,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.42980962300310677,
"avg_score": null,
"num_lines": null
} |
"""add field deleted, updated_at, user_id in Vacancy
Revision ID: 38331aa4875
Revises: 46f4932e624
Create Date: 2015-08-07 16:37:12.968236
"""
# revision identifiers, used by Alembic.
import datetime
revision = '38331aa4875'
down_revision = '46f4932e624'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.sql.expression import false
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('vacancies', sa.Column('deleted', sa.Boolean(), nullable=False,
server_default=sa.DefaultClause(false())))
op.add_column('vacancies', sa.Column('updated_at', sa.DateTime(), nullable=True,
server_default=sa.DefaultClause(str(datetime.datetime.now()))))
op.add_column('vacancies', sa.Column('user_id', sa.Integer(), nullable=True))
op.create_foreign_key(None, 'vacancies', 'users', ['user_id'], ['id'])
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('vacancies', 'user_id')
op.drop_column('vacancies', 'updated_at')
op.drop_column('vacancies', 'deleted')
### end Alembic commands ###
| {
"repo_name": "uaprom-summer-2015/Meowth",
"path": "migrations/versions/2015_08_07_3833_add_field_deleted_updated_at_user_id_in_.py",
"copies": "2",
"size": "1226",
"license": "bsd-3-clause",
"hash": 640873795420560500,
"line_mean": 33.0555555556,
"line_max": 104,
"alpha_frac": 0.6508972268,
"autogenerated": false,
"ratio": 3.492877492877493,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5143774719677493,
"avg_score": null,
"num_lines": null
} |
"""Add Fields and FieldEntries tables
Revision ID: 75e8ab9a0014
Revises: 0366ba6575ca
Create Date: 2020-08-19 00:36:17.579497
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "75e8ab9a0014"
down_revision = "0366ba6575ca"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"fields",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("name", sa.Text(), nullable=True),
sa.Column("type", sa.String(length=80), nullable=True),
sa.Column("field_type", sa.String(length=80), nullable=True),
sa.Column("description", sa.Text(), nullable=True),
sa.Column("required", sa.Boolean(), nullable=True),
sa.Column("public", sa.Boolean(), nullable=True),
sa.Column("editable", sa.Boolean(), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"field_entries",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("type", sa.String(length=80), nullable=True),
sa.Column("value", sa.JSON(), nullable=True),
sa.Column("field_id", sa.Integer(), nullable=True),
sa.Column("user_id", sa.Integer(), nullable=True),
sa.Column("team_id", sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(["field_id"], ["fields.id"], ondelete="CASCADE"),
sa.ForeignKeyConstraint(["team_id"], ["teams.id"], ondelete="CASCADE"),
sa.ForeignKeyConstraint(["user_id"], ["users.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("id"),
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table("field_entries")
op.drop_table("fields")
# ### end Alembic commands ###
| {
"repo_name": "CTFd/CTFd",
"path": "migrations/versions/75e8ab9a0014_add_fields_and_fieldentries_tables.py",
"copies": "4",
"size": "1867",
"license": "apache-2.0",
"hash": 8510370309069660000,
"line_mean": 34.2264150943,
"line_max": 81,
"alpha_frac": 0.6282806642,
"autogenerated": false,
"ratio": 3.625242718446602,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0002300966405890474,
"num_lines": 53
} |
"""add fields for workbench api
Revision ID: 038364a84126
Revises: 27b812b403cc
Create Date: 2020-01-08 14:06:44.251432
"""
from alembic import op
import sqlalchemy as sa
import rdr_service.model.utils
from rdr_service.participant_enums import WorkbenchResearcherSexAtBirth, WorkbenchResearcherSexualOrientation
# revision identifiers, used by Alembic.
revision = '038364a84126'
down_revision = '27b812b403cc'
branch_labels = None
depends_on = None
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.execute("ALTER TABLE `workbench_researcher` MODIFY `ethnicity` smallint(6);")
op.execute("ALTER TABLE `workbench_researcher_history` MODIFY `ethnicity` smallint(6);")
op.add_column('workbench_researcher', sa.Column('sex_at_birth',
rdr_service.model.utils.Enum(WorkbenchResearcherSexAtBirth),
nullable=True))
op.add_column('workbench_researcher', sa.Column('sexual_orientation',
rdr_service.model.utils.Enum(WorkbenchResearcherSexualOrientation),
nullable=True))
op.add_column('workbench_researcher_history', sa.Column('sex_at_birth',
rdr_service.model.utils.Enum(WorkbenchResearcherSexAtBirth),
nullable=True))
op.add_column('workbench_researcher_history', sa.Column('sexual_orientation',
rdr_service.model.utils.Enum(
WorkbenchResearcherSexualOrientation),
nullable=True))
op.add_column('workbench_workspace', sa.Column('findings_from_study', sa.String(length=500), nullable=True))
op.add_column('workbench_workspace', sa.Column('intend_to_study', sa.String(length=500), nullable=True))
op.add_column('workbench_workspace', sa.Column('reason_for_investigation', sa.String(length=500), nullable=True))
op.add_column('workbench_workspace_history', sa.Column('findings_from_study', sa.String(length=500), nullable=True))
op.add_column('workbench_workspace_history', sa.Column('intend_to_study', sa.String(length=500), nullable=True))
op.add_column('workbench_workspace_history', sa.Column('reason_for_investigation', sa.String(length=500),
nullable=True))
# ### end Alembic commands ###
def downgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('workbench_workspace_history', 'reason_for_investigation')
op.drop_column('workbench_workspace_history', 'intend_to_study')
op.drop_column('workbench_workspace_history', 'findings_from_study')
op.drop_column('workbench_workspace', 'reason_for_investigation')
op.drop_column('workbench_workspace', 'intend_to_study')
op.drop_column('workbench_workspace', 'findings_from_study')
op.drop_column('workbench_researcher_history', 'sexual_orientation')
op.drop_column('workbench_researcher_history', 'sex_at_birth')
op.drop_column('workbench_researcher', 'sexual_orientation')
op.drop_column('workbench_researcher', 'sex_at_birth')
op.execute("ALTER TABLE `workbench_researcher` MODIFY `ethnicity` varchar(80);")
op.execute("ALTER TABLE `workbench_researcher_history` MODIFY `ethnicity` varchar(80);")
# ### end Alembic commands ###
def upgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
| {
"repo_name": "all-of-us/raw-data-repository",
"path": "rdr_service/alembic/versions/038364a84126_add_fields_for_workbench_api.py",
"copies": "1",
"size": "4057",
"license": "bsd-3-clause",
"hash": -8822634046017681000,
"line_mean": 46.1744186047,
"line_max": 120,
"alpha_frac": 0.6275573084,
"autogenerated": false,
"ratio": 3.849146110056926,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9905911533171659,
"avg_score": 0.014158377057053525,
"num_lines": 86
} |
"""Add fields on SVM Classifier at scikit learn.
Revision ID: 4c1ebd8863a8
Revises: ba0fe62f7174
Create Date: 2019-11-04 16:20:48.481455
"""
from alembic import op
import sqlalchemy as sa
from alembic import context
from alembic import op
from sqlalchemy import String, Integer, Text
from sqlalchemy.orm import sessionmaker
from sqlalchemy.sql import table, column, text
import json
# revision identifiers, used by Alembic.
revision = '4c1ebd8863a8'
down_revision = 'ba0fe62f7174'
branch_labels = None
depends_on = None
SCIKIT_LEARN_PLATAFORM_ID = 4
ID_OPERATION = 4031
def _insert_operation_form_field():
tb = table(
'operation_form_field',
column('id', Integer),
column('name', String),
column('type', String),
column('required', Integer),
column('order', Integer),
column('default', Text),
column('suggested_widget', String),
column('values_url', String),
column('values', String),
column('scope', String),
column('form_id', Integer),
column('enable_conditions', String),
)
columns = ('id', 'name', 'type', 'required', 'order', 'default',
'suggested_widget', 'values_url', 'values', 'scope', 'form_id',
'enable_conditions')
data = [
(4178, 'gamma', 'DECIMAL', 0, 7, 'auto', 'decimal', None, None, 'EXECUTION', 4011, None),
(4179, 'coef0', 'DECIMAL', 0, 8, 0.0, 'decimal', None, None, 'EXECUTION', 4011, None),
(4180, 'shrinking', 'INTEGER', 0, 9, 1, 'checkbox', None, None, 'EXECUTION', 4011, None),
(4181, 'probability', 'INTEGER', 0, 10, 0, 'checkbox', None, None, 'EXECUTION', 4011, None),
(4182, 'cache_size', 'DECIMAL', 0, 11, 200, 'decimal', None, None, 'EXECUTION', 4011, None),
(4183, 'decision_function_shape', 'TEXT', 0, 12, 'ovr', 'dropdown', None,
json.dumps([
{'key': 'ovr', 'value': 'ovr'},
{'key': 'ovo', 'value': 'ovo'}
]),
'EXECUTION', 4011, None)
]
rows = [dict(list(zip(columns, row))) for row in data]
op.bulk_insert(tb, rows)
def _insert_operation_form_field_translation():
tb = table(
'operation_form_field_translation',
column('id', Integer),
column('locale', String),
column('label', String),
column('help', String), )
columns = ('id', 'locale', 'label', 'help')
data = [
(4178, 'en', 'Kernel coefficient.', 'Kernel coefficient for "rbf", "poly" and "sigmoid".'),
(4178, 'pt', 'Coeficiente do kernel.', 'Coeficiente do kernel para as métricas "rbf", "poly" e "sigmoid.'),
(4179, 'en', 'Independent term in kernel function.', 'Independent term in kernel function. It is only significant in "poly" and "sigmoid".'),
(4179, 'pt', 'Termo independente da função do kernel.', 'Termp independente da função do kernel. É significativo apenas nas métricas "poly" e "sigmoid".'),
(4180, 'en', 'Use the shrinking heuristic.', 'Whether to use the shrinking heuristic.'),
(4180, 'pt', 'Usar a heurística shrinking.', 'Necessidade de usar a heurística shrinking'),
(4181, 'en', 'Enable probability estimates.', 'Whether to enable probability estimates.'),
(4181, 'pt', 'Habilitar estimativas probailísticas.', 'Habilitar estimativas probailísticas.'),
(4182, 'en', 'Size of the kernel cache (in MB).', 'Specify the size of the kernel cache (in MB).'),
(4182, 'pt', 'Tamanho do cache em MB.', 'Especificação do tamanho do cache em MB.'),
(4183, 'en', 'Return decision.', 'Return (one-vs-rest) "ovr" or (one-vs-one) "ovo" decision funcion.'),
(4183, 'pt', 'Decisão de retorno.' , 'Decisão de retorno "ovr" (one-vs-rest : um por todos) ou "ovo" (one-vs-one: um por um).')
]
rows = [dict(list(zip(columns, row))) for row in data]
op.bulk_insert(tb, rows)
all_commands = [
(_insert_operation_form_field,
'DELETE FROM operation_form_field WHERE id BETWEEN 4178 AND 4183'),
(_insert_operation_form_field_translation,
'DELETE FROM operation_form_field_translation WHERE id BETWEEN 4178 AND 4183')
]
def upgrade():
ctx = context.get_context()
session = sessionmaker(bind=ctx.bind)()
connection = session.connection()
try:
connection.execute('SET FOREIGN_KEY_CHECKS=0;')
for cmd in all_commands:
if isinstance(cmd[0], str):
connection.execute(cmd[0])
elif isinstance(cmd[0], list):
for row in cmd[0]:
connection.execute(row)
else:
cmd[0]()
connection.execute('SET FOREIGN_KEY_CHECKS=1;')
except:
session.rollback()
raise
session.commit()
def downgrade():
ctx = context.get_context()
session = sessionmaker(bind=ctx.bind)()
connection = session.connection()
try:
connection.execute('SET FOREIGN_KEY_CHECKS=0;')
for cmd in reversed(all_commands):
if isinstance(cmd[1], str):
connection.execute(cmd[1])
elif isinstance(cmd[1], list):
for row in cmd[1]:
connection.execute(row)
else:
cmd[1]()
connection.execute('SET FOREIGN_KEY_CHECKS=1;')
except:
session.rollback()
raise
session.commit()
| {
"repo_name": "eubr-bigsea/tahiti",
"path": "migrations/versions/4c1ebd8863a8_add_fields_on_svm_classifier_at_scikit_.py",
"copies": "1",
"size": "5405",
"license": "apache-2.0",
"hash": 532660604140538900,
"line_mean": 36.4305555556,
"line_max": 163,
"alpha_frac": 0.5988868275,
"autogenerated": false,
"ratio": 3.4684684684684686,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9496217766244389,
"avg_score": 0.014227505944815807,
"num_lines": 144
} |
"""Add fields to ItemAudit table to support a future revamp of the issue system.
Mostly not using these new fields yet.
Action Instructions will hold information for how the user can fix the issue.
Background Information will hold information on why something is a problem, likely with links to AWS documentation for the user to read more.
Origin will hold the statement causing the issue. Hopefully the UI can use this to highlight the offending part of an item policy.
Origin Summary will hold a summary of the Origin. A JSON Policy statement may be summarized as something like "S3 READ FROM * TO s3:mybucket".
Class UUID will be used so that the text (itemaudit.issue, itemaudit.notes) can be changed in the future without losing justifications.
Revision ID: c9dd06c919ac
Revises: b8ccf5b8089b
Create Date: 2017-09-05 17:21:08.162000
"""
# revision identifiers, used by Alembic.
revision = 'c9dd06c919ac'
down_revision = 'b8ccf5b8089b'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('itemaudit', sa.Column('action_instructions', sa.Text(), nullable=True))
op.add_column('itemaudit', sa.Column('background_info', sa.Text(), nullable=True))
op.add_column('itemaudit', sa.Column('origin', sa.Text(), nullable=True))
op.add_column('itemaudit', sa.Column('origin_summary', sa.Text(), nullable=True))
op.add_column('itemaudit', sa.Column('class_uuid', sa.VARCHAR(length=32), nullable=True))
def downgrade():
op.drop_column('itemaudit', 'action_instructions')
op.drop_column('itemaudit', 'background_info')
op.drop_column('itemaudit', 'class_uuid')
op.drop_column('itemaudit', 'origin')
op.drop_column('itemaudit', 'origin_summary') | {
"repo_name": "markofu/security_monkey",
"path": "migrations/versions/c9dd06c919ac_.py",
"copies": "3",
"size": "1710",
"license": "apache-2.0",
"hash": 4885280905147781000,
"line_mean": 44.0263157895,
"line_max": 143,
"alpha_frac": 0.7467836257,
"autogenerated": false,
"ratio": 3.3661417322834644,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5612925357983465,
"avg_score": null,
"num_lines": null
} |
"""addFileData
Revision ID: 7ee5e2cfb9cd
Revises: b8cf7fa342bb
Create Date: 2016-04-06 09:30:37.743000
"""
# revision identifiers, used by Alembic.
revision = '7ee5e2cfb9cd'
down_revision = 'b8cf7fa342bb'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_error_data():
### commands auto generated by Alembic - please adjust! ###
pass
### end Alembic commands ###
def downgrade_error_data():
### commands auto generated by Alembic - please adjust! ###
pass
### end Alembic commands ###
def upgrade_job_tracker():
### commands auto generated by Alembic - please adjust! ###
op.add_column('job_status', sa.Column('file_size', sa.Integer(), nullable=True))
op.add_column('job_status', sa.Column('number_of_rows', sa.Integer(), nullable=True))
### end Alembic commands ###
def downgrade_job_tracker():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('job_status', 'number_of_rows')
op.drop_column('job_status', 'file_size')
### end Alembic commands ###
def upgrade_user_manager():
### commands auto generated by Alembic - please adjust! ###
pass
### end Alembic commands ###
def downgrade_user_manager():
### commands auto generated by Alembic - please adjust! ###
pass
### end Alembic commands ###
| {
"repo_name": "fedspendingtransparency/data-act-core",
"path": "dataactcore/migrations/versions/7ee5e2cfb9cd_addfiledata.py",
"copies": "1",
"size": "1516",
"license": "cc0-1.0",
"hash": 7631710345375738000,
"line_mean": 22.3230769231,
"line_max": 89,
"alpha_frac": 0.6609498681,
"autogenerated": false,
"ratio": 3.550351288056206,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.47113011561562057,
"avg_score": null,
"num_lines": null
} |
"""add file format to file generation table
Revision ID: 3c16b4d8506c
Revises: 0a36baf32413
Create Date: 2019-07-23 14:02:47.454725
"""
# revision identifiers, used by Alembic.
revision = '3c16b4d8506c'
down_revision = '0a36baf32413'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_data_broker():
# ### commands auto generated by Alembic - please adjust! ###
generation_file_formats = postgresql.ENUM('csv', 'txt', name='generation_file_formats')
generation_file_formats.create(op.get_bind())
op.add_column('file_generation', sa.Column('file_format', sa.Enum('csv', 'txt', name='generation_file_formats'), server_default='csv', nullable=True))
op.execute("""
UPDATE file_generation
SET file_format = 'csv'
""")
op.alter_column('file_generation', 'file_format', nullable=False)
op.create_index(op.f('ix_file_generation_file_format'), 'file_generation', ['file_format'], unique=False)
# ### end Alembic commands ###
def downgrade_data_broker():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_file_generation_file_format'), table_name='file_generation')
op.drop_column('file_generation', 'file_format')
op.execute("""
DROP TYPE generation_file_formats
""")
op.execute("""
UPDATE file_generation
SET is_cached_file = False
""")
# ### end Alembic commands ###
| {
"repo_name": "fedspendingtransparency/data-act-broker-backend",
"path": "dataactcore/migrations/versions/3c16b4d8506c_add_file_format_to_file_generation_table.py",
"copies": "1",
"size": "1685",
"license": "cc0-1.0",
"hash": -9053915860865724000,
"line_mean": 29.0892857143,
"line_max": 154,
"alpha_frac": 0.659347181,
"autogenerated": false,
"ratio": 3.5774946921443735,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9627763744522636,
"avg_score": 0.021815625724347473,
"num_lines": 56
} |
"""add_file_generation_task
Revision ID: 7d4f322c7661
Revises: 31876fecc214
Create Date: 2016-09-02 12:08:21.113516
"""
# revision identifiers, used by Alembic.
revision = '7d4f322c7661'
down_revision = '31876fecc214'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_data_broker():
### commands auto generated by Alembic - please adjust! ###
op.create_table('file_generation_task',
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('file_generation_task_id', sa.Integer(), nullable=False),
sa.Column('generation_task_key', sa.Text(), nullable=True),
sa.Column('submission_id', sa.Integer(), nullable=True),
sa.Column('file_type_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['file_type_id'], ['file_type.file_type_id'], name='fk_generation_file_type'),
sa.ForeignKeyConstraint(['submission_id'], ['submission.submission_id'], name='fk_generation_submission'),
sa.PrimaryKeyConstraint('file_generation_task_id')
)
op.create_index(op.f('ix_file_generation_task_generation_task_key'), 'file_generation_task', ['generation_task_key'], unique=True)
op.drop_table('d_file_metadata')
### end Alembic commands ###
def downgrade_data_broker():
### commands auto generated by Alembic - please adjust! ###
op.create_table('d_file_metadata',
sa.Column('created_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
sa.Column('updated_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
sa.Column('d_file_id', sa.INTEGER(), nullable=False),
sa.Column('type', sa.TEXT(), autoincrement=False, nullable=True),
sa.Column('submission_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('start_date', sa.DATE(), autoincrement=False, nullable=True),
sa.Column('end_date', sa.DATE(), autoincrement=False, nullable=True),
sa.Column('status_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('error_message', sa.TEXT(), autoincrement=False, nullable=True),
sa.Column('original_file_name', sa.TEXT(), autoincrement=False, nullable=True),
sa.Column('upload_file_name', sa.TEXT(), autoincrement=False, nullable=True),
sa.Column('is_submitted', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True),
sa.ForeignKeyConstraint(['status_id'], ['job_status.job_status_id'], name='fk_status_id'),
sa.ForeignKeyConstraint(['submission_id'], ['submission.submission_id'], name='fk_submission_id'),
sa.PrimaryKeyConstraint('d_file_id', name='d_file_metadata_pkey'),
sa.UniqueConstraint('submission_id', 'type', name='_submission_type_uc')
)
op.drop_index(op.f('ix_file_generation_task_generation_task_key'), table_name='file_generation_task')
op.drop_table('file_generation_task')
### end Alembic commands ###
| {
"repo_name": "fedspendingtransparency/data-act-broker-backend",
"path": "dataactcore/migrations/versions/7d4f322c7661_add_file_generation_task.py",
"copies": "2",
"size": "3141",
"license": "cc0-1.0",
"hash": 4603836465478573000,
"line_mean": 43.2394366197,
"line_max": 134,
"alpha_frac": 0.699777141,
"autogenerated": false,
"ratio": 3.497772828507795,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.025185764798145188,
"num_lines": 71
} |
"""Add file paths to genomic gc validation metrics
Revision ID: d5d97368b14d
Revises: 4507ede4f552
Create Date: 2020-07-24 14:14:21.478839
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'd5d97368b14d'
down_revision = '4507ede4f552'
branch_labels = None
depends_on = None
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('genomic_gc_validation_metrics', sa.Column('crai_path', sa.String(length=255), nullable=True))
op.add_column('genomic_gc_validation_metrics', sa.Column('cram_md5_path', sa.String(length=255), nullable=True))
op.add_column('genomic_gc_validation_metrics', sa.Column('cram_path', sa.String(length=255), nullable=True))
op.add_column('genomic_gc_validation_metrics', sa.Column('hf_vcf_md5_path', sa.String(length=255), nullable=True))
op.add_column('genomic_gc_validation_metrics', sa.Column('hf_vcf_path', sa.String(length=255), nullable=True))
op.add_column('genomic_gc_validation_metrics', sa.Column('hf_vcf_tbi_path', sa.String(length=255), nullable=True))
op.add_column('genomic_gc_validation_metrics', sa.Column('idat_green_md5_path', sa.String(length=255), nullable=True))
op.add_column('genomic_gc_validation_metrics', sa.Column('idat_green_path', sa.String(length=255), nullable=True))
op.add_column('genomic_gc_validation_metrics', sa.Column('idat_red_md5_path', sa.String(length=255), nullable=True))
op.add_column('genomic_gc_validation_metrics', sa.Column('idat_red_path', sa.String(length=255), nullable=True))
op.add_column('genomic_gc_validation_metrics', sa.Column('raw_vcf_md5_path', sa.String(length=255), nullable=True))
op.add_column('genomic_gc_validation_metrics', sa.Column('raw_vcf_path', sa.String(length=255), nullable=True))
op.add_column('genomic_gc_validation_metrics', sa.Column('raw_vcf_tbi_path', sa.String(length=255), nullable=True))
op.add_column('genomic_gc_validation_metrics', sa.Column('vcf_md5_path', sa.String(length=255), nullable=True))
op.add_column('genomic_gc_validation_metrics', sa.Column('vcf_path', sa.String(length=255), nullable=True))
# ### end Alembic commands ###
def downgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('genomic_gc_validation_metrics', 'vcf_path')
op.drop_column('genomic_gc_validation_metrics', 'vcf_md5_path')
op.drop_column('genomic_gc_validation_metrics', 'raw_vcf_tbi_path')
op.drop_column('genomic_gc_validation_metrics', 'raw_vcf_path')
op.drop_column('genomic_gc_validation_metrics', 'raw_vcf_md5_path')
op.drop_column('genomic_gc_validation_metrics', 'idat_red_path')
op.drop_column('genomic_gc_validation_metrics', 'idat_red_md5_path')
op.drop_column('genomic_gc_validation_metrics', 'idat_green_path')
op.drop_column('genomic_gc_validation_metrics', 'idat_green_md5_path')
op.drop_column('genomic_gc_validation_metrics', 'hf_vcf_tbi_path')
op.drop_column('genomic_gc_validation_metrics', 'hf_vcf_path')
op.drop_column('genomic_gc_validation_metrics', 'hf_vcf_md5_path')
op.drop_column('genomic_gc_validation_metrics', 'cram_path')
op.drop_column('genomic_gc_validation_metrics', 'cram_md5_path')
op.drop_column('genomic_gc_validation_metrics', 'crai_path')
# ### end Alembic commands ###
def upgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
| {
"repo_name": "all-of-us/raw-data-repository",
"path": "rdr_service/alembic/versions/d5d97368b14d_add_file_paths_to_genomic_gc_validation_.py",
"copies": "1",
"size": "3798",
"license": "bsd-3-clause",
"hash": 2730617545783588000,
"line_mean": 48.3246753247,
"line_max": 122,
"alpha_frac": 0.7043180621,
"autogenerated": false,
"ratio": 2.983503534956795,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4187821597056795,
"avg_score": null,
"num_lines": null
} |
"""add file relation
Revision ID: 52f4346513a8
Revises: d7067cd4169
Create Date: 2014-04-02 16:33:16.710455
"""
# revision identifiers, used by Alembic.
revision = '52f4346513a8'
down_revision = 'd7067cd4169'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def upgrade():
op.create_table('grano_file',
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('file_name', sa.Unicode(), nullable=True),
sa.Column('mime_type', sa.Unicode(), nullable=True),
sa.Column('project_id', sa.Integer(), nullable=True),
sa.Column('author_id', sa.Integer(), nullable=True),
sa.Column('data', sa.LargeBinary(), nullable=True),
sa.ForeignKeyConstraint(['author_id'], ['grano_account.id'], ),
sa.ForeignKeyConstraint(['project_id'], ['grano_project.id'], ),
sa.PrimaryKeyConstraint('id')
)
#op.drop_table('role')
def downgrade():
op.drop_table('grano_file')
| {
"repo_name": "clkao/grano",
"path": "alembic/versions/52f4346513a8_add_file_relation.py",
"copies": "1",
"size": "1062",
"license": "mit",
"hash": -1350846510885262800,
"line_mean": 29.3428571429,
"line_max": 68,
"alpha_frac": 0.6826741996,
"autogenerated": false,
"ratio": 3.2181818181818183,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.44008560177818185,
"avg_score": null,
"num_lines": null
} |
""" add file uuids
Revision ID: 1e0ff2269c43
Revises: 47ecebadac41
Create Date: 2015-02-04 17:39:38.648599
"""
# revision identifiers, used by Alembic.
revision = '1e0ff2269c43'
down_revision = '47ecebadac41'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def upgrade():
op.add_column('file', sa.Column('uuid', postgresql.UUID(as_uuid=True)))
op.execute('''UPDATE file SET uuid = uuid(split_part(path, '.', 1));''')
op.alter_column('file', 'uuid', nullable=False)
op.create_index(op.f('ix_file_uuid'), 'file', ['uuid'], unique=True)
op.drop_constraint(u'file_path_key', 'file', type_='unique')
op.drop_column('file', 'path')
def downgrade():
op.add_column('file', sa.Column('path', sa.VARCHAR(length=128), autoincrement=False))
op.execute('''UPDATE file SET path = concat(uuid, substring(filename from '\.[^.]*$'));''')
op.alter_column('file', 'path', nullable=False)
op.create_unique_constraint(u'file_path_key', 'file', ['path'])
op.drop_index(op.f('ix_file_uuid'), table_name='file')
op.drop_column('file', 'uuid')
| {
"repo_name": "pyfidelity/rest-seed",
"path": "backend/backrest/migrations/versions/1e0ff2269c43_add_file_uuids.py",
"copies": "1",
"size": "1117",
"license": "bsd-2-clause",
"hash": -2415287159661793300,
"line_mean": 32.8484848485,
"line_max": 95,
"alpha_frac": 0.6696508505,
"autogenerated": false,
"ratio": 3.02710027100271,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.419675112150271,
"avg_score": null,
"num_lines": null
} |
"""Add financial accounts and transactions
Revision ID: 0f5e47ad79bf
Revises: 2cd16ae229a1
Create Date: 2018-06-09 18:21:26.287275
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects.postgresql import ARRAY
# revision identifiers, used by Alembic.
revision = '0f5e47ad79bf'
down_revision = '2cd16ae229a1'
branch_labels = None
depends_on = None
financial_accounts_table_name = 'financial_accounts'
financial_transactions_table_name = 'financial_transactions'
def upgrade():
op.create_table(
financial_accounts_table_name,
sa.Column('id', sa.Integer, primary_key=True, autoincrement=True),
sa.Column('name', sa.String(256), nullable=False),
sa.Column('type', sa.String(32), nullable=False),
sa.Column('description', sa.Text(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True)
)
op.create_table(
financial_transactions_table_name,
sa.Column('id', sa.Integer, primary_key=True, autoincrement=True),
sa.Column('account_id', sa.Integer, nullable=False),
sa.Column('occurred_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('amount', sa.Float(precision=4), nullable=False),
sa.Column('name', sa.String(256), nullable=False),
sa.Column('tags', ARRAY(sa.String(256)), default=[]),
sa.Column('notes', sa.Text(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True)
)
def downgrade():
op.drop_table(financial_accounts_table_name)
op.drop_table(financial_transactions_table_name) | {
"repo_name": "charlesj/Apollo",
"path": "database/versions/0f5e47ad79bf_add_financial_accounts_and_transactions.py",
"copies": "1",
"size": "1929",
"license": "mit",
"hash": 3524037043406542300,
"line_mean": 36.8431372549,
"line_max": 77,
"alpha_frac": 0.6879212027,
"autogenerated": false,
"ratio": 3.4141592920353983,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9593659177490551,
"avg_score": 0.0016842634489693314,
"num_lines": 51
} |
"""add fingerprint_path to genomic_set_member
Revision ID: f73a5e7b1822
Revises: 0d5e58df7917
Create Date: 2020-11-09 12:17:28.477732
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'f73a5e7b1822'
down_revision = '0d5e58df7917'
branch_labels = None
depends_on = None
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('genomic_set_member', sa.Column('fingerprint_path', sa.String(length=255), nullable=True))
op.add_column('genomic_set_member_history', sa.Column('fingerprint_path', sa.String(length=255), nullable=True))
# ### end Alembic commands ###
def downgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('genomic_set_member', 'fingerprint_path')
op.drop_column('genomic_set_member_history', 'fingerprint_path')
# ### end Alembic commands ###
def upgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
| {
"repo_name": "all-of-us/raw-data-repository",
"path": "rdr_service/alembic/versions/f73a5e7b1822_add_fingerprint_path_to_genomic_set_.py",
"copies": "1",
"size": "1348",
"license": "bsd-3-clause",
"hash": -8008895209632775000,
"line_mean": 25.96,
"line_max": 116,
"alpha_frac": 0.6787833828,
"autogenerated": false,
"ratio": 3.3366336633663365,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9413644914118975,
"avg_score": 0.02035442640947228,
"num_lines": 50
} |
"""Add first structure
Revision ID: 41daf2bf458b
Revises: 552742dd018b
Create Date: 2015-02-09 15:24:54.607237
"""
# revision identifiers, used by Alembic.
revision = '41daf2bf458b'
down_revision = '552742dd018b'
import hashlib
from alembic import op
import sqlalchemy as sa
metadata = sa.MetaData()
users_table = sa.Table('users', metadata,
sa.Column('id', sa.Integer),
sa.Column('email', sa.Unicode(length=200)),
sa.Column('full_name', sa.Unicode(length=200)),
sa.Column('password', sa.Unicode(length=200)),
sa.Column('active', sa.Boolean)
)
def upgrade():
op.create_table('entities',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.Unicode(length=200), nullable=False),
sa.Column('logo', sa.LargeBinary(length=16777216), nullable=False),
sa.Column('logo_ext', sa.Unicode(length=4), nullable=False),
sa.Column('base_url', sa.Unicode(length=200), nullable=False),
sa.Column('link_url', sa.Unicode(length=300), nullable=False),
sa.Column('google_analytics_number', sa.Unicode(length=30), nullable=True),
sa.Column('start_port_number', sa.Integer(), nullable=True),
sa.Column('end_port_number', sa.Integer(), nullable=True),
sa.Column('deployed', sa.Boolean(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_table('tokens',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('token', sa.String(length=200), nullable=False),
sa.Column('date', sa.DateTime(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_table('users',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('email', sa.Unicode(length=200), nullable=False),
sa.Column('password', sa.Unicode(length=200), nullable=False),
sa.Column('full_name', sa.Unicode(length=200), nullable=False),
sa.Column('active', sa.Boolean(), nullable=False),
sa.Column('token_id', sa.Integer(), nullable=True),
sa.Column('entity_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['entity_id'], ['entities.id'], ),
sa.ForeignKeyConstraint(['token_id'], ['tokens.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.bulk_insert(users_table, [
{'id':1, 'email': u'admin@admin.com', 'full_name' : u'Administrator', 'password' : unicode(hashlib.sha1('password').hexdigest()), 'active' : True },
])
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('users')
op.drop_table('tokens')
op.drop_table('entities')
### end Alembic commands ###
| {
"repo_name": "porduna/weblabdeusto",
"path": "tools/wcloud/alembic/versions/41daf2bf458b_add_first_structure.py",
"copies": "4",
"size": "2564",
"license": "bsd-2-clause",
"hash": 5076644707450121000,
"line_mean": 36.1594202899,
"line_max": 161,
"alpha_frac": 0.6626365055,
"autogenerated": false,
"ratio": 3.3385416666666665,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6001178172166668,
"avg_score": null,
"num_lines": null
} |
"""Add first tables
Revision ID: 4d58a25d196b
Revises: 121482e6f837
Create Date: 2015-12-27 20:08:17.082959
"""
# revision identifiers, used by Alembic.
revision = '4d58a25d196b'
down_revision = '121482e6f837'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('inet_ether',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('mac', sa.String(length=32), nullable=False),
sa.Column('ip', sa.String(length=32), nullable=True),
sa.Column('access_type', sa.String(length=8), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('users',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('login', sa.String(length=32), nullable=True),
sa.Column('passwd', sa.String(length=128), nullable=True),
sa.Column('status', sa.Integer(), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('users')
op.drop_table('inet_ether')
### end Alembic commands ###
| {
"repo_name": "procool/mygw",
"path": "globals/alembic/versions/4d58a25d196b_add_first_tables.py",
"copies": "1",
"size": "1214",
"license": "bsd-2-clause",
"hash": 4397480432864277500,
"line_mean": 28.6097560976,
"line_max": 65,
"alpha_frac": 0.6663920923,
"autogenerated": false,
"ratio": 3.263440860215054,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.44298329525150537,
"avg_score": null,
"num_lines": null
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.