text stringlengths 0 1.05M | meta dict |
|---|---|
"""add category & post slug
Revision ID: 62362116d
Revises: 523db3c1572
Create Date: 2015-09-05 13:23:32.492785
"""
# revision identifiers, used by Alembic.
revision = '62362116d'
down_revision = '523db3c1572'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('blog_category', sa.Column('slug', sa.String(length=128), nullable=True))
op.create_index(op.f('ix_blog_category_slug'), 'blog_category', ['slug'], unique=True)
op.add_column('blog_post', sa.Column('slug', sa.String(length=128), nullable=True))
op.create_index(op.f('ix_blog_post_slug'), 'blog_post', ['slug'], unique=True)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_blog_post_slug'), table_name='blog_post')
op.drop_column('blog_post', 'slug')
op.drop_index(op.f('ix_blog_category_slug'), table_name='blog_category')
op.drop_column('blog_category', 'slug')
### end Alembic commands ###
| {
"repo_name": "fdgogogo/fangs",
"path": "backend/migrations/versions/62362116d_add_category_post_slug.py",
"copies": "1",
"size": "1075",
"license": "mit",
"hash": -721277974038514600,
"line_mean": 32.59375,
"line_max": 91,
"alpha_frac": 0.6744186047,
"autogenerated": false,
"ratio": 3.0979827089337175,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9237671408401934,
"avg_score": 0.006945981046356735,
"num_lines": 32
} |
"""Add category roles table
Revision ID: 4d263fa78830
Revises: 6a185fdcd4ee
Create Date: 2020-02-04 12:01:02.554724
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '4d263fa78830'
down_revision = '6a185fdcd4ee'
branch_labels = None
depends_on = None
def upgrade():
op.create_table(
'roles',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('category_id', sa.Integer(), nullable=False, index=True),
sa.Column('name', sa.String(), nullable=False),
sa.Column('code', sa.String(), nullable=False),
sa.Column('color', sa.String(), nullable=False),
sa.Index(None, 'category_id', 'code', unique=True),
sa.CheckConstraint('code = upper(code)', name='uppercase_code'),
sa.ForeignKeyConstraint(['category_id'], ['categories.categories.id']),
sa.PrimaryKeyConstraint('id'),
schema='categories'
)
op.create_table(
'role_members',
sa.Column('role_id', sa.Integer(), nullable=False, index=True),
sa.Column('user_id', sa.Integer(), nullable=False, index=True),
sa.ForeignKeyConstraint(['role_id'], ['categories.roles.id']),
sa.ForeignKeyConstraint(['user_id'], ['users.users.id']),
sa.PrimaryKeyConstraint('role_id', 'user_id'),
schema='categories'
)
def downgrade():
op.drop_table('role_members', schema='categories')
op.drop_table('roles', schema='categories')
| {
"repo_name": "indico/indico",
"path": "indico/migrations/versions/20200204_1201_4d263fa78830_add_category_roles_table.py",
"copies": "5",
"size": "1486",
"license": "mit",
"hash": 4441157934161903000,
"line_mean": 31.3043478261,
"line_max": 79,
"alpha_frac": 0.6406460296,
"autogenerated": false,
"ratio": 3.5465393794749405,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 46
} |
"""add category
Revision ID: 2f1a00b10f2a
Revises: 2d06f01737c2
Create Date: 2017-04-26 12:52:53.676000
"""
# revision identifiers, used by Alembic.
revision = '2f1a00b10f2a'
down_revision = '2d06f01737c2'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('categories',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=64), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('registrations',
sa.Column('post_id', sa.Integer(), nullable=True),
sa.Column('category_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['category_id'], ['posts.id'], ),
sa.ForeignKeyConstraint(['post_id'], ['categories.id'], )
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('registrations')
op.drop_table('categories')
### end Alembic commands ###
| {
"repo_name": "dreamyeah/dreamsflask",
"path": "migrations/versions/2f1a00b10f2a_add_category.py",
"copies": "1",
"size": "1079",
"license": "mit",
"hash": -2324169045352570000,
"line_mean": 16.6610169492,
"line_max": 63,
"alpha_frac": 0.6320667285,
"autogenerated": false,
"ratio": 3.2696969696969695,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.933481604231235,
"avg_score": 0.01338953117692396,
"num_lines": 37
} |
"""add cati to questionnaire response
Revision ID: 8ab7f6708ea3
Revises: b43e5f5c2905
Create Date: 2020-10-22 09:58:18.379524
"""
from alembic import op
import sqlalchemy as sa
import rdr_service.model.utils
from rdr_service.participant_enums import PhysicalMeasurementsStatus, QuestionnaireStatus, OrderStatus
from rdr_service.participant_enums import WithdrawalStatus, WithdrawalReason, SuspensionStatus, QuestionnaireDefinitionStatus
from rdr_service.participant_enums import EnrollmentStatus, Race, SampleStatus, OrganizationType, BiobankOrderStatus
from rdr_service.participant_enums import OrderShipmentTrackingStatus, OrderShipmentStatus
from rdr_service.participant_enums import MetricSetType, MetricsKey, GenderIdentity
from rdr_service.model.base import add_table_history_table, drop_table_history_table
from rdr_service.model.code import CodeType
from rdr_service.model.site_enums import SiteStatus, EnrollingStatus, DigitalSchedulingStatus, ObsoleteStatus
# revision identifiers, used by Alembic.
revision = '8ab7f6708ea3'
down_revision = 'b43e5f5c2905'
branch_labels = None
depends_on = None
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('questionnaire_response', sa.Column('non_participant_author', sa.String(length=80), nullable=True))
# ### end Alembic commands ###
def downgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('questionnaire_response', 'non_participant_author')
# ### end Alembic commands ###
def upgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
| {
"repo_name": "all-of-us/raw-data-repository",
"path": "rdr_service/alembic/versions/8ab7f6708ea3_add_cati_to_questionnaire_response.py",
"copies": "1",
"size": "1967",
"license": "bsd-3-clause",
"hash": -2965992015907104300,
"line_mean": 31.7833333333,
"line_max": 125,
"alpha_frac": 0.7539400102,
"autogenerated": false,
"ratio": 3.5959780621572213,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4849918072357221,
"avg_score": null,
"num_lines": null
} |
"""add cert_expiry column
Revision ID: 31a7742584
Revises: 5730af27a86
Create Date: 2017-09-07 22:50:57.260545
"""
# revision identifiers, used by Alembic.
revision = '31a7742584'
down_revision = '5730af27a86'
from alembic import op
import sqlalchemy as sa
from ca import app,db
def upgrade():
# SQLite cant add unique column
if "sqlite" == db.get_engine(app).name:
conn=op.get_bind()
conn.execute("ALTER TABLE request rename to request_old;")
conn.execute("CREATE TABLE request ( id VARCHAR(80) NOT NULL, email VARCHAR(120), generation_date DATE, cert_sn INT UNIQUE, cert_expire_date Date , PRIMARY KEY (id));")
conn.execute("INSERT INTO request SELECT id, email, generation_date, cert_sn, NULL from request_old;")
conn.execute("DROP TABLE request_old;")
else:
op.add_column('request', sa.Column('cert_expire_date', sa.Date(), nullable=True, unique=False))
def downgrade():
# SQLite dosn't support "drop Column"
if "sqlite" == db.get_engine(app).name:
conn=op.get_bind()
conn.execute("ALTER TABLE request RENAME TO request_old;")
conn.execute("CREATE TABLE request ( id VARCHAR(80) NOT NULL, email VARCHAR(120), generation_date DATE, cert_sn INT UNIQUE, PRIMARY KEY (id));")
conn.execute("INSERT INTO request SELECT id, email, generation_date, cert_sn from request_old;")
conn.execute("DROP TABLE request_old;")
else:
op.drop_column('request', 'cert_expire_date')
| {
"repo_name": "freifunk-berlin/ca.berlin.freifunk.net",
"path": "migrations/versions/31a7742584_add_cert_expiry_column.py",
"copies": "1",
"size": "1493",
"license": "mit",
"hash": -374412712810373200,
"line_mean": 37.2820512821,
"line_max": 176,
"alpha_frac": 0.6811788346,
"autogenerated": false,
"ratio": 3.5295508274231677,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9536738812335884,
"avg_score": 0.034798169937456644,
"num_lines": 39
} |
"""Add certified_files_history table and update certify_history table
Revision ID: 52b3c1a122ce
Revises: 204e2cf584cd
Create Date: 2017-06-05 12:37:10.724212
"""
# revision identifiers, used by Alembic.
revision = '52b3c1a122ce'
down_revision = '204e2cf584cd'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_data_broker():
### commands auto generated by Alembic - please adjust! ###
op.create_table('certified_files_history',
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('certified_files_history_id', sa.Integer(), nullable=False),
sa.Column('certify_history_id', sa.Integer(), nullable=True),
sa.Column('submission_id', sa.Integer(), nullable=True),
sa.Column('filename', sa.Text(), nullable=True),
sa.Column('file_type_id', sa.Integer(), nullable=True),
sa.Column('warning_filename', sa.Text(), nullable=True),
sa.Column('narrative', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['certify_history_id'], ['certify_history.certify_history_id'], name='fk_certify_history_certified_files_id'),
sa.ForeignKeyConstraint(['file_type_id'], ['file_type.file_type_id'], name='fk_certified_files_history_file_type'),
sa.ForeignKeyConstraint(['submission_id'], ['submission.submission_id'], name='fk_certified_files_history_submission_id'),
sa.PrimaryKeyConstraint('certified_files_history_id')
)
op.drop_constraint('fk_certify_history_submission_id', 'certify_history', type_='foreignkey')
op.create_foreign_key('fk_certify_history_submission_id', 'certify_history', 'submission', ['submission_id'], ['submission_id'])
### end Alembic commands ###
def downgrade_data_broker():
### commands auto generated by Alembic - please adjust! ###
op.drop_constraint('fk_certify_history_submission_id', 'certify_history', type_='foreignkey')
op.create_foreign_key('fk_certify_history_submission_id', 'certify_history', 'submission', ['submission_id'], ['submission_id'], ondelete='CASCADE')
op.drop_table('certified_files_history')
### end Alembic commands ###
| {
"repo_name": "fedspendingtransparency/data-act-broker-backend",
"path": "dataactcore/migrations/versions/52b3c1a122ce_add_certified_files_history_table_and_update_certify_history_table.py",
"copies": "1",
"size": "2380",
"license": "cc0-1.0",
"hash": 907267701580667000,
"line_mean": 40.0344827586,
"line_max": 152,
"alpha_frac": 0.687394958,
"autogenerated": false,
"ratio": 3.5051546391752577,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.958159571269846,
"avg_score": 0.02219077689535948,
"num_lines": 58
} |
"""Add certifying user to submission and certification history table
Revision ID: d7e2e541f6d6
Revises: 21d297a08e18
Create Date: 2017-03-27 10:29:36.234172
"""
# revision identifiers, used by Alembic.
revision = 'd7e2e541f6d6'
down_revision = '21d297a08e18'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_data_broker():
### commands auto generated by Alembic - please adjust! ###
op.create_table('certify_history',
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('certify_history_id', sa.Integer(), nullable=False),
sa.Column('submission_id', sa.Integer(), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['submission_id'], ['submission.submission_id'], name='fk_certify_history_submission_id', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['user_id'], ['users.user_id'], name='fk_certify_history_user', ondelete='SET NULL'),
sa.PrimaryKeyConstraint('certify_history_id')
)
op.add_column('submission', sa.Column('certifying_user_id', sa.Integer(), nullable=True))
op.create_foreign_key('fk_submission_certifying_user', 'submission', 'users', ['certifying_user_id'], ['user_id'], ondelete='SET NULL')
### end Alembic commands ###
def downgrade_data_broker():
### commands auto generated by Alembic - please adjust! ###
op.drop_constraint('fk_submission_certifying_user', 'submission', type_='foreignkey')
op.drop_column('submission', 'certifying_user_id')
op.drop_table('certify_history')
### end Alembic commands ###
| {
"repo_name": "fedspendingtransparency/data-act-broker-backend",
"path": "dataactcore/migrations/versions/d7e2e541f6d6_add_certifying_user_to_submission_and_.py",
"copies": "1",
"size": "1852",
"license": "cc0-1.0",
"hash": 2730912510827796500,
"line_mean": 33.9433962264,
"line_max": 142,
"alpha_frac": 0.6830453564,
"autogenerated": false,
"ratio": 3.4616822429906544,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.46447275993906545,
"avg_score": null,
"num_lines": null
} |
"""Add cfda program table
Revision ID: 844f2d3a614a
Revises: f15485f0092b
Create Date: 2017-05-15 17:04:50.407153
"""
# revision identifiers, used by Alembic.
revision = '844f2d3a614a'
down_revision = 'f15485f0092b'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_data_broker():
op.create_table('cfda_program',
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('cfda_program_id', sa.Integer(), nullable=False),
sa.Column('program_number', sa.Float(), nullable=False),
sa.Column('program_title', sa.Text(), nullable=True),
sa.Column('popular_name', sa.Text(), nullable=True),
sa.Column('federal_agency', sa.Text(), nullable=True),
sa.Column('authorization', sa.Text(), nullable=True),
sa.Column('objectives', sa.Text(), nullable=True),
sa.Column('types_of_assistance', sa.Text(), nullable=True),
sa.Column('uses_and_use_restrictions', sa.Text(), nullable=True),
sa.Column('applicant_eligibility', sa.Text(), nullable=True),
sa.Column('beneficiary_eligibility', sa.Text(), nullable=True),
sa.Column('credentials_documentation', sa.Text(), nullable=True),
sa.Column('preapplication_coordination', sa.Text(), nullable=True),
sa.Column('application_procedures', sa.Text(), nullable=True),
sa.Column('award_procedure', sa.Text(), nullable=True),
sa.Column('deadlines', sa.Text(), nullable=True),
sa.Column('range_of_approval_disapproval_time', sa.Text(), nullable=True),
sa.Column('website_address', sa.Text(), nullable=True),
sa.Column('formula_and_matching_requirements', sa.Text(), nullable=True),
sa.Column('length_and_time_phasing_of_assistance', sa.Text(), nullable=True),
sa.Column('reports', sa.Text(), nullable=True),
sa.Column('audits', sa.Text(), nullable=True),
sa.Column('records', sa.Text(), nullable=True),
sa.Column('account_identification', sa.Text(), nullable=True),
sa.Column('obligations', sa.Text(), nullable=True),
sa.Column('range_and_average_of_financial_assistance', sa.Text(), nullable=True),
sa.Column('appeals', sa.Text(), nullable=True),
sa.Column('renewals', sa.Text(), nullable=True),
sa.Column('program_accomplishments', sa.Text(), nullable=True),
sa.Column('regulations_guidelines_and_literature', sa.Text(), nullable=True),
sa.Column('regional_or_local_office', sa.Text(), nullable=True),
sa.Column('headquarters_office', sa.Text(), nullable=True),
sa.Column('related_programs', sa.Text(), nullable=True),
sa.Column('examples_of_funded_projects', sa.Text(), nullable=True),
sa.Column('criteria_for_selecting_proposals', sa.Text(), nullable=True),
sa.Column('url', sa.Text(), nullable=True),
sa.Column('recovery', sa.Text(), nullable=True),
sa.Column('omb_agency_code', sa.Text(), nullable=True),
sa.Column('omb_bureau_code', sa.Text(), nullable=True),
sa.Column('published_date', sa.Text(), nullable=True),
sa.Column('archived_date', sa.Text(), nullable=True),
sa.PrimaryKeyConstraint('cfda_program_id')
)
### end Alembic commands ###
def downgrade_data_broker():
op.drop_table('cfda_program')
### end Alembic commands ###
| {
"repo_name": "fedspendingtransparency/data-act-broker-backend",
"path": "dataactcore/migrations/versions/844f2d3a614a_add_cfda_program_table.py",
"copies": "1",
"size": "3424",
"license": "cc0-1.0",
"hash": -4236656436547167700,
"line_mean": 41.8,
"line_max": 85,
"alpha_frac": 0.6807827103,
"autogenerated": false,
"ratio": 3.2828379674017256,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.44636206777017257,
"avg_score": null,
"num_lines": null
} |
"""Add challenger and opponent score
Revision ID: 1ce0eb6f5f54
Revises: d172bd5e57c1
Create Date: 2019-01-03 18:42:42.514602
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '1ce0eb6f5f54'
down_revision = 'd172bd5e57c1'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('games', sa.Column('challenger_score', sa.Integer(), nullable=True))
op.add_column('games', sa.Column('opponent_score', sa.Integer(), nullable=True))
op.add_column('tournament_games', sa.Column('bot_a_score', sa.Integer(), nullable=True))
op.add_column('tournament_games', sa.Column('bot_b_score', sa.Integer(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('tournament_games', 'bot_b_score')
op.drop_column('tournament_games', 'bot_a_score')
op.drop_column('games', 'opponent_score')
op.drop_column('games', 'challenger_score')
# ### end Alembic commands ###
| {
"repo_name": "mitpokerbots/scrimmage",
"path": "migrations/versions/1ce0eb6f5f54_.py",
"copies": "1",
"size": "1115",
"license": "mit",
"hash": -5599842963999184000,
"line_mean": 31.7941176471,
"line_max": 92,
"alpha_frac": 0.6869955157,
"autogenerated": false,
"ratio": 3.123249299719888,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9303661824964005,
"avg_score": 0.0013165980911765348,
"num_lines": 34
} |
"""Add challenges
Revision ID: 49be2190c22d
Revises: 33de9025cc63
Create Date: 2017-11-08 17:34:20.134831+00:00
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = '49be2190c22d'
down_revision = 'cfaf0d9a46cc'
branch_labels = None
depends_on = None
def upgrade():
op.create_table(
"challenge",
sa.Column("id", sa.Integer, primary_key=True),
sa.Column("created", sa.DateTime,
nullable=False,
server_default=sa.sql.func.now()),
sa.Column("finished", sa.DateTime,
nullable=True),
sa.Column("num_games", sa.Integer,
default=0,
nullable=False),
sa.Column("status",
sa.Enum("created", "playing_game", "finished"),
default="created",
nullable=False),
sa.Column("most_recent_game_task",
sa.DateTime,
nullable=True),
sa.Column("issuer",
mysql.MEDIUMINT(display_width=8, unsigned=True),
nullable=False),
sa.Column("winner",
mysql.MEDIUMINT(display_width=8, unsigned=True),
nullable=True),
sa.ForeignKeyConstraint(['issuer'], ['user.id'],
name='challenge_issuer_fk',
ondelete='CASCADE'),
sa.ForeignKeyConstraint(['winner'], ['user.id'],
name='challenge_winner_fk',
ondelete='CASCADE'),
)
op.create_table(
"challenge_participant",
sa.Column("challenge_id",
sa.Integer,
primary_key=True),
sa.Column("user_id",
mysql.MEDIUMINT(display_width=8, unsigned=True),
primary_key=True),
sa.Column("points",
sa.Integer(),
default=0,
nullable=False),
sa.Column("ships_produced",
sa.Integer(),
default=0,
nullable=False),
sa.Column("attacks_made",
sa.Integer(),
default=0,
nullable=False),
sa.ForeignKeyConstraint(['challenge_id'], ['challenge.id'],
name='challenge_participant_fk',
ondelete='CASCADE'),
sa.ForeignKeyConstraint(['user_id'], ['user.id'],
name='challenge_participant_ibfk_2',
ondelete='CASCADE'),
)
op.add_column(
"game",
sa.Column("challenge_id",
sa.Integer,
nullable=True),
)
op.create_foreign_key('game_challenge_fk',
'game',
'challenge',
['challenge_id'],
['id'],
ondelete='CASCADE')
def downgrade():
op.drop_constraint("game_challenge_fk", "game", "foreignkey")
op.drop_column("game", "challenge_id")
op.drop_table("challenge_participant")
op.drop_table("challenge")
| {
"repo_name": "HaliteChallenge/Halite-II",
"path": "apiserver/alembic/versions/201711081734_49be2190c22d_add_challenges.py",
"copies": "1",
"size": "3282",
"license": "mit",
"hash": -2521048153644443000,
"line_mean": 31.82,
"line_max": 68,
"alpha_frac": 0.4820231566,
"autogenerated": false,
"ratio": 4.514442916093535,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5496466072693535,
"avg_score": null,
"num_lines": null
} |
"""add_change_request
Revision ID: 694bcef51b94
Revises: f78a2be4ddf9
Create Date: 2018-01-15 13:53:12.680830
"""
from alembic import op
import sqlalchemy as sa
import zeus
# revision identifiers, used by Alembic.
revision = "694bcef51b94"
down_revision = "f78a2be4ddf9"
branch_labels = ()
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"change_request",
sa.Column("number", sa.Integer(), nullable=False),
sa.Column("parent_revision_sha", sa.String(length=40), nullable=False),
sa.Column("head_revision_sha", sa.String(length=40), nullable=True),
sa.Column("message", sa.String(), nullable=False),
sa.Column("author_id", zeus.db.types.guid.GUID(), nullable=True),
sa.Column("provider", sa.String(), nullable=True),
sa.Column("external_id", sa.String(length=64), nullable=True),
sa.Column("url", sa.String(), nullable=True),
sa.Column("data", zeus.db.types.json.JSONEncodedDict(), nullable=True),
sa.Column("date_updated", sa.TIMESTAMP(timezone=True), nullable=True),
sa.Column("repository_id", zeus.db.types.guid.GUID(), nullable=False),
sa.Column("id", zeus.db.types.guid.GUID(), nullable=False),
sa.Column(
"date_created",
sa.TIMESTAMP(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
sa.ForeignKeyConstraint(["author_id"], ["author.id"]),
sa.ForeignKeyConstraint(
["repository_id", "head_revision_sha"],
["revision.repository_id", "revision.sha"],
),
sa.ForeignKeyConstraint(
["repository_id", "parent_revision_sha"],
["revision.repository_id", "revision.sha"],
),
sa.ForeignKeyConstraint(
["repository_id"], ["repository.id"], ondelete="CASCADE"
),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("repository_id", "number", name="unq_cr_number"),
sa.UniqueConstraint(
"repository_id", "provider", "external_id", name="unq_cr_provider"
),
)
op.create_index(
"idx_cr_head_revision",
"change_request",
["repository_id", "head_revision_sha"],
unique=False,
)
op.create_index(
"idx_cr_parent_revision",
"change_request",
["repository_id", "parent_revision_sha"],
unique=False,
)
op.create_index(
op.f("ix_change_request_author_id"),
"change_request",
["author_id"],
unique=False,
)
op.create_index(
op.f("ix_change_request_repository_id"),
"change_request",
["repository_id"],
unique=False,
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f("ix_change_request_repository_id"), table_name="change_request")
op.drop_index(op.f("ix_change_request_author_id"), table_name="change_request")
op.drop_index("idx_cr_parent_revision", table_name="change_request")
op.drop_index("idx_cr_head_revision", table_name="change_request")
op.drop_table("change_request")
# ### end Alembic commands ###
| {
"repo_name": "getsentry/zeus",
"path": "zeus/migrations/694bcef51b94_add_change_request.py",
"copies": "1",
"size": "3288",
"license": "apache-2.0",
"hash": -8872191650415611000,
"line_mean": 32.5510204082,
"line_max": 87,
"alpha_frac": 0.6040145985,
"autogenerated": false,
"ratio": 3.581699346405229,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4685713944905229,
"avg_score": null,
"num_lines": null
} |
"""add characters and work_characters table
Revision ID: 29bdbb279ab
Revises: 123119b63b1
Create Date: 2014-10-31 01:09:54.927692
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '29bdbb279ab'
down_revision = '123119b63b1'
def upgrade():
op.create_table(
'characters',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('original_character_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['id'], ['nameables.id']),
sa.ForeignKeyConstraint(['original_character_id'], ['characters.id']),
sa.PrimaryKeyConstraint('id')
)
op.create_table(
'work_characters',
sa.Column('work_id', sa.Integer(), nullable=False),
sa.Column('character_id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.ForeignKeyConstraint(['character_id'], ['characters.id']),
sa.ForeignKeyConstraint(['work_id'], ['works.id']),
sa.PrimaryKeyConstraint('work_id', 'character_id')
)
def downgrade():
op.drop_table('work_characters')
op.drop_table('characters')
| {
"repo_name": "clicheio/cliche",
"path": "cliche/migrations/versions/29bdbb279ab_add_characters_and_work_characters_table.py",
"copies": "2",
"size": "1185",
"license": "mit",
"hash": -3352938647296249300,
"line_mean": 28.625,
"line_max": 78,
"alpha_frac": 0.6540084388,
"autogenerated": false,
"ratio": 3.6349693251533743,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5288977763953374,
"avg_score": null,
"num_lines": null
} |
# add check if requests is installed
import requests
from .models import *
from .endpoints import *
from .errors import *
from functools import partial
from json import dumps
import os
import binascii
class API(object):
def __init__(self, username=None, password=None, device_token=None, DEBUG=False):
self.username = username
self.password = password
self._session_id = None
self.DEBUG = DEBUG
self.device_token = device_token or binascii.b2a_hex(os.urandom(32))
self._make_dynamic_methods()
if self.username and self.password:
self.user = self.login(
username=username, password=password, device_token=self.device_token)
def _make_dynamic_methods(self):
for endpoint in list(ENDPOINTS.keys()):
def _inner(endpoint, *args, **kwargs):
return self.api_call(endpoint, *args, **kwargs)
_inner.__name__ = endpoint
setattr(self, _inner.__name__, partial(_inner, endpoint))
def build_request_url(self, protocol, host, endpoint):
url = '%s://%s/%s' % (protocol, host, endpoint)
# encode url params
return url
def api_call(self, endpoint, *args, **kwargs):
metadata = ENDPOINTS[endpoint]
params = self.check_params(metadata, kwargs)
response = self.do_request(metadata, params)
if metadata['model'] is None:
return response
else:
model = metadata['model'].from_json(response)
model.connect_api(self)
return model
def check_params(self, metadata, kwargs):
missing_params = []
url_params = []
# page, size and anchor are data_params for get requests
for param in metadata['url_params']:
p = kwargs.get(param)
if p is None:
missing_params.append(param)
else:
url_params.append(p)
del kwargs[param]
if missing_params:
raise ParameterError(
'Missing URL parameters: [%s]' % ', '.join(missing_params))
# url_params shouldnt have default params, I guess
data_params = kwargs
if metadata.get('default_params', []) != []:
default_params = dict(metadata['default_params'])
data_params = dict(list(default_params.items()) + list(kwargs.items()))
missing_params = []
for param in metadata['required_params']:
p = data_params.get(param)
if p is None:
missing_params.append(param)
if missing_params:
raise ParameterError(
'Missing required parameters: [%s]' % ', '.join(missing_params))
# Check for unsupported params?
return {'url': url_params, 'data': data_params}
def do_request(self, metadata, params):
headers = HEADERS.copy()
if params['url'] != []:
endpoint = metadata['endpoint'] % tuple(params['url'])
else:
endpoint = metadata['endpoint']
host = API_HOST
# Upload methods, change host to specific host
if metadata.get('host'):
host = metadata['host']
url = self.build_request_url(PROTOCOL, host, endpoint)
built_params = built_data = None
built_data = data = params['data']
if metadata['request_type'] == 'get':
built_params = data
elif metadata['request_type'] == 'post':
if metadata.get('json'):
built_data = dumps(data)
headers['Content-Type'] = 'application/json; charset=utf-8'
elif data.get('filename'):
if data['filename'].split('.')[-1] == 'mp4':
headers['Content-Type'] = 'video/mp4'
else:
headers['Content-Type'] = 'image/jpeg'
built_data = open(data['filename'], 'rb')
if self._session_id:
headers['vine-session-id'] = self._session_id
if(self.DEBUG):
# pip install mitmproxy
# mitmproxy
http_proxy = "http://localhost:8080"
https_proxy = "http://localhost:8080"
proxies = {
"http": http_proxy,
"https": https_proxy,
}
# cafile='~/.mitmproxy/mitmproxy-ca-cert.pem'
cafile = False
response = requests.request(
metadata['request_type'], url, params=built_params, data=built_data, headers=headers, verify=cafile, proxies=proxies)
print('REQUESTED: %s [%s]' % (url, response.status_code))
else:
response = requests.request(
metadata['request_type'], url, params=built_params, data=built_data, headers=headers)
if response.headers.get('X-Upload-Key'):
return response.headers['X-Upload-Key']
if response.status_code in [200, 400, 404, 420]:
try:
json = response.json()
except:
raise VineError(
1000, 'Vine replied with non-json content:\n' + response.text)
if json['success'] is not True:
raise VineError(json['code'], json['error'])
return json['data']
else:
raise VineError(response.status_code, response.text)
def authenticate(self, user):
self.user = user
self._session_id = user.key
self._user_id = user.id
| {
"repo_name": "davoclavo/vinepy",
"path": "vinepy/api.py",
"copies": "1",
"size": "5507",
"license": "mit",
"hash": 6199355496939054000,
"line_mean": 32.7852760736,
"line_max": 133,
"alpha_frac": 0.555111676,
"autogenerated": false,
"ratio": 4.229646697388633,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5284758373388633,
"avg_score": null,
"num_lines": null
} |
"""Add Checklists
Revision ID: cdb2997e1c4b
Revises: 9422e46956fb
Create Date: 2017-11-24 18:56:32.227650
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'cdb2997e1c4b'
down_revision = '9422e46956fb'
branch_labels = None
depends_on = None
checklists_table_name = 'checklists'
checklist_items_table_name = 'checklist_items'
checklist_completion_table_name = 'checklist_completions'
checklist_completion_items_table_name = 'checklist_completion_items'
def upgrade():
op.create_table(
checklists_table_name,
sa.Column('id', sa.Integer, primary_key=True, autoincrement=True),
sa.Column('name', sa.String(256), nullable=False),
sa.Column('type', sa.String(32), nullable=False),
sa.Column('description', sa.Text(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True)
)
op.create_table(
checklist_items_table_name,
sa.Column('id', sa.Integer, primary_key=True, autoincrement=True),
sa.Column('checklist_id', sa.Integer, nullable=False),
sa.Column('name', sa.String(256), nullable=False),
sa.Column('type', sa.String(32), nullable=False),
sa.Column('description', sa.Text(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True)
)
op.create_table(
checklist_completion_table_name,
sa.Column('id', sa.Integer, primary_key=True, autoincrement=True),
sa.Column('checklist_id', sa.Integer, nullable=False),
sa.Column('notes', sa.Text(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True)
)
op.create_table(
checklist_completion_items_table_name,
sa.Column('id', sa.Integer, primary_key=True, autoincrement=True),
sa.Column('checklist_completion_id', sa.Integer, nullable=False),
sa.Column('checklist_item_id', sa.Integer, nullable=False),
sa.Column('completed', sa.Integer, nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True)
)
def downgrade():
op.drop_table(checklists_table_name)
op.drop_table(checklist_items_table_name)
op.drop_table(checklist_completion_table_name)
op.drop_table(checklist_completion_items_table_name)
| {
"repo_name": "charlesj/Apollo",
"path": "database/versions/cdb2997e1c4b_add_checklists.py",
"copies": "1",
"size": "2961",
"license": "mit",
"hash": -5991917717618695000,
"line_mean": 39.5616438356,
"line_max": 76,
"alpha_frac": 0.6788247214,
"autogenerated": false,
"ratio": 3.4191685912240186,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4597993312624019,
"avg_score": null,
"num_lines": null
} |
"""Add cited references to the front matter of each markdown page.
"""
import os
import re
import sys
import yaml
def get_frontmatter(f):
"""Return front matter from a markdown file in dictionary format.
"""
with open(f) as fp:
s = fp.read().partition("---")[2].partition("---")[0]
d = yaml.safe_load(s)
return d
def find_cites(f):
"""Return keys to cited papers.
"""
with open(f) as fp:
lst = re.findall(r"{{(.+?)}}", fp.read())
refs = []
for l in lst:
if "site.data.refs" in l:
refs.append(l.split(".")[3])
return sorted(set(refs))
def replace_frontmatter(f, d):
"""Replace the front matter with new front matter.
"""
with open(f) as fp:
s = fp.read().partition("---\n")[2].partition("---\n")[2]
with open(f, "w") as fw:
fw.write("---\n")
yaml.safe_dump(d, fw)
fw.write(f"---\n{s}")
def add_refs():
"""Add all references.
"""
posts = [p for p in os.listdir("../../_posts") if ".md" in p]
for p in posts:
f = f"../../_posts/{p}"
d = get_frontmatter(f)
r = find_cites(f)
if r:
d["include_references"] = True
d["references"] = r
replace_frontmatter(f, d)
if __name__ == "__main__":
add_refs()
| {
"repo_name": "sammosummo/sammosummo.github.io",
"path": "assets/scripts/add_refs.py",
"copies": "1",
"size": "1324",
"license": "mit",
"hash": 5774135024968989000,
"line_mean": 20.3548387097,
"line_max": 69,
"alpha_frac": 0.5151057402,
"autogenerated": false,
"ratio": 3.2135922330097086,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9224217686471357,
"avg_score": 0.0008960573476702509,
"num_lines": 62
} |
"""Add classification_type"""
# revision identifiers, used by Alembic.
revision = 'd616b0a80feb'
down_revision = 'ce2812d3ceb5'
import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import postgresql
def upgrade():
classification_type_enum = postgresql.ENUM('binary', 'multi-label', 'multi-class', name='classification_type_enum')
classification_type_enum.create(op.get_bind())
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('problem', sa.Column('classification_type', sa.Enum('binary', 'multi-label', 'multi-class', name='classification_type_enum'), server_default='binary', nullable=True))
op.execute('''
UPDATE "problem" SET classification_type = 'multi-class' WHERE (SELECT COUNT(*) FROM "problem_label" WHERE problem_id = problem.id) >= 2
''')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('problem', 'classification_type')
classification_type_enum = postgresql.ENUM('binary', 'multi-label', 'multi-class', name='classification_type_enum')
classification_type_enum.drop(op.get_bind())
# ### end Alembic commands ###
| {
"repo_name": "planbrothers/ml-annotate",
"path": "annotator/migrations/versions/d616b0a80feb_add_classification_type.py",
"copies": "1",
"size": "1209",
"license": "mit",
"hash": -5674323465546837000,
"line_mean": 42.1785714286,
"line_max": 184,
"alpha_frac": 0.6972704715,
"autogenerated": false,
"ratio": 3.778125,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.49753954715,
"avg_score": null,
"num_lines": null
} |
"""add class mixins
Revision ID: 3f80f0c8adf1
Revises: 427f38ad688f
Create Date: 2015-08-31 12:59:49.789227
"""
# revision identifiers, used by Alembic.
revision = '3f80f0c8adf1'
down_revision = '427f38ad688f'
from alembic import op
import sqlalchemy as sa
from purchasing.data.models import TRIGGER_TUPLES
def upgrade():
conn = op.get_bind()
### commands auto generated by Alembic - please adjust! ###
for table, column, when in TRIGGER_TUPLES:
conn.execute(sa.sql.text('''
DROP TRIGGER IF EXISTS tsv_{table}_{column}_trigger_insert_update ON {table}
'''.format(table=table, column=column)))
conn.execute(sa.sql.text('''
DROP TRIGGER IF EXISTS tsv_{table}_{column}_trigger_delete ON {table}
'''.format(table=table, column=column)))
conn.execute(sa.sql.text('''
DROP TRIGGER IF EXISTS tsv_{table}_{column}_trigger ON {table}
'''.format(table=table, column=column)))
op.add_column('app_status', sa.Column('created_at', sa.DateTime(), nullable=True))
op.add_column('app_status', sa.Column('created_by_id', sa.Integer(), nullable=True))
op.add_column('app_status', sa.Column('updated_at', sa.DateTime(), nullable=True))
op.add_column('app_status', sa.Column('updated_by_id', sa.Integer(), nullable=True))
op.create_foreign_key('created_by_id_fkey', 'app_status', 'users', ['created_by_id'], ['id'])
op.create_foreign_key('updated_by_id_fkey', 'app_status', 'users', ['updated_by_id'], ['id'])
op.add_column('category', sa.Column('created_at', sa.DateTime(), nullable=True))
op.add_column('category', sa.Column('created_by_id', sa.Integer(), nullable=True))
op.add_column('category', sa.Column('updated_at', sa.DateTime(), nullable=True))
op.add_column('category', sa.Column('updated_by_id', sa.Integer(), nullable=True))
op.create_foreign_key('created_by_id_fkey', 'category', 'users', ['created_by_id'], ['id'])
op.create_foreign_key('updated_by_id_fkey', 'category', 'users', ['updated_by_id'], ['id'])
op.add_column('company', sa.Column('created_at', sa.DateTime(), nullable=True))
op.add_column('company', sa.Column('created_by_id', sa.Integer(), nullable=True))
op.add_column('company', sa.Column('updated_at', sa.DateTime(), nullable=True))
op.add_column('company', sa.Column('updated_by_id', sa.Integer(), nullable=True))
op.create_foreign_key('updated_by_id_fkey', 'company', 'users', ['updated_by_id'], ['id'])
op.create_foreign_key('created_by_id_fkey', 'company', 'users', ['created_by_id'], ['id'])
op.add_column('company_contact', sa.Column('created_at', sa.DateTime(), nullable=True))
op.add_column('company_contact', sa.Column('created_by_id', sa.Integer(), nullable=True))
op.add_column('company_contact', sa.Column('updated_at', sa.DateTime(), nullable=True))
op.add_column('company_contact', sa.Column('updated_by_id', sa.Integer(), nullable=True))
op.create_foreign_key('updated_by_id_fkey', 'company_contact', 'users', ['updated_by_id'], ['id'])
op.create_foreign_key('created_by_id_fkey', 'company_contact', 'users', ['created_by_id'], ['id'])
op.add_column('contract', sa.Column('created_by_id', sa.Integer(), nullable=True))
op.add_column('contract', sa.Column('updated_by_id', sa.Integer(), nullable=True))
op.create_foreign_key('created_by_id_fkey', 'contract', 'users', ['created_by_id'], ['id'])
op.create_foreign_key('updated_by_id_fkey', 'contract', 'users', ['updated_by_id'], ['id'])
op.add_column('contract_note', sa.Column('created_by_id', sa.Integer(), nullable=True))
op.add_column('contract_note', sa.Column('updated_by_id', sa.Integer(), nullable=True))
op.create_foreign_key('created_by_id_fkey', 'contract_note', 'users', ['created_by_id'], ['id'])
op.create_foreign_key('updated_by_id_fkey', 'contract_note', 'users', ['updated_by_id'], ['id'])
op.add_column('contract_property', sa.Column('created_at', sa.DateTime(), nullable=True))
op.add_column('contract_property', sa.Column('created_by_id', sa.Integer(), nullable=True))
op.add_column('contract_property', sa.Column('updated_at', sa.DateTime(), nullable=True))
op.add_column('contract_property', sa.Column('updated_by_id', sa.Integer(), nullable=True))
op.create_foreign_key('updated_by_id_fkey', 'contract_property', 'users', ['updated_by_id'], ['id'])
op.create_foreign_key('created_by_id_fkey', 'contract_property', 'users', ['created_by_id'], ['id'])
op.add_column('contract_stage', sa.Column('created_by_id', sa.Integer(), nullable=True))
op.add_column('contract_stage', sa.Column('updated_by_id', sa.Integer(), nullable=True))
op.create_foreign_key('created_by_id_fkey', 'contract_stage', 'users', ['created_by_id'], ['id'])
op.create_foreign_key('updated_by_id_fkey', 'contract_stage', 'users', ['updated_by_id'], ['id'])
op.add_column('contract_stage_action_item', sa.Column('created_at', sa.DateTime(), nullable=True))
op.add_column('contract_stage_action_item', sa.Column('created_by_id', sa.Integer(), nullable=True))
op.add_column('contract_stage_action_item', sa.Column('updated_at', sa.DateTime(), nullable=True))
op.add_column('contract_stage_action_item', sa.Column('updated_by_id', sa.Integer(), nullable=True))
op.create_foreign_key('created_by_id_fkey', 'contract_stage_action_item', 'users', ['created_by_id'], ['id'])
op.create_foreign_key('updated_by_id_fkey', 'contract_stage_action_item', 'users', ['updated_by_id'], ['id'])
op.add_column('contract_type', sa.Column('created_at', sa.DateTime(), nullable=True))
op.add_column('contract_type', sa.Column('created_by_id', sa.Integer(), nullable=True))
op.add_column('contract_type', sa.Column('updated_at', sa.DateTime(), nullable=True))
op.add_column('contract_type', sa.Column('updated_by_id', sa.Integer(), nullable=True))
op.create_foreign_key('created_by_id_fkey', 'contract_type', 'users', ['created_by_id'], ['id'])
op.create_foreign_key('updated_by_id_fkey', 'contract_type', 'users', ['updated_by_id'], ['id'])
op.add_column('department', sa.Column('created_at', sa.DateTime(), nullable=True))
op.add_column('department', sa.Column('created_by_id', sa.Integer(), nullable=True))
op.add_column('department', sa.Column('updated_at', sa.DateTime(), nullable=True))
op.add_column('department', sa.Column('updated_by_id', sa.Integer(), nullable=True))
op.create_foreign_key('created_by_id_fkey', 'department', 'users', ['created_by_id'], ['id'])
op.create_foreign_key('updated_by_id_fkey', 'department', 'users', ['updated_by_id'], ['id'])
op.add_column('document', sa.Column('created_at', sa.DateTime(), nullable=True))
op.add_column('document', sa.Column('created_by_id', sa.Integer(), nullable=True))
op.add_column('document', sa.Column('updated_at', sa.DateTime(), nullable=True))
op.add_column('document', sa.Column('updated_by_id', sa.Integer(), nullable=True))
op.create_foreign_key('updated_by_id_fkey', 'document', 'users', ['updated_by_id'], ['id'])
op.create_foreign_key('created_by_id_fkey', 'document', 'users', ['created_by_id'], ['id'])
op.add_column('flow', sa.Column('created_at', sa.DateTime(), nullable=True))
op.add_column('flow', sa.Column('created_by_id', sa.Integer(), nullable=True))
op.add_column('flow', sa.Column('updated_at', sa.DateTime(), nullable=True))
op.add_column('flow', sa.Column('updated_by_id', sa.Integer(), nullable=True))
op.create_foreign_key('created_by_id_fkey', 'flow', 'users', ['created_by_id'], ['id'])
op.create_foreign_key('updated_by_id_fkey', 'flow', 'users', ['updated_by_id'], ['id'])
op.add_column('line_item', sa.Column('created_at', sa.DateTime(), nullable=True))
op.add_column('line_item', sa.Column('created_by_id', sa.Integer(), nullable=True))
op.add_column('line_item', sa.Column('updated_at', sa.DateTime(), nullable=True))
op.add_column('line_item', sa.Column('updated_by_id', sa.Integer(), nullable=True))
op.create_foreign_key('updated_by_id_fkey', 'line_item', 'users', ['updated_by_id'], ['id'])
op.create_foreign_key('created_by_id_fkey', 'line_item', 'users', ['created_by_id'], ['id'])
op.add_column('opportunity', sa.Column('updated_at', sa.DateTime(), nullable=True))
op.add_column('opportunity', sa.Column('updated_by_id', sa.Integer(), nullable=True))
op.create_foreign_key('updated_by_id_fkey', 'opportunity', 'users', ['updated_by_id'], ['id'])
op.add_column('opportunity_document', sa.Column('created_at', sa.DateTime(), nullable=True))
op.add_column('opportunity_document', sa.Column('created_by_id', sa.Integer(), nullable=True))
op.add_column('opportunity_document', sa.Column('updated_at', sa.DateTime(), nullable=True))
op.add_column('opportunity_document', sa.Column('updated_by_id', sa.Integer(), nullable=True))
op.create_foreign_key('updated_by_id_fkey', 'opportunity_document', 'users', ['updated_by_id'], ['id'])
op.create_foreign_key('created_by_id_fkey', 'opportunity_document', 'users', ['created_by_id'], ['id'])
op.add_column('roles', sa.Column('created_at', sa.DateTime(), nullable=True))
op.add_column('roles', sa.Column('created_by_id', sa.Integer(), nullable=True))
op.add_column('roles', sa.Column('updated_at', sa.DateTime(), nullable=True))
op.add_column('roles', sa.Column('updated_by_id', sa.Integer(), nullable=True))
op.create_foreign_key('created_by_id_fkey', 'roles', 'users', ['created_by_id'], ['id'])
op.create_foreign_key('updated_by_id_fkey', 'roles', 'users', ['updated_by_id'], ['id'])
op.add_column('stage', sa.Column('created_at', sa.DateTime(), nullable=True))
op.add_column('stage', sa.Column('created_by_id', sa.Integer(), nullable=True))
op.add_column('stage', sa.Column('updated_at', sa.DateTime(), nullable=True))
op.add_column('stage', sa.Column('updated_by_id', sa.Integer(), nullable=True))
op.create_foreign_key('created_by_id_fkey', 'stage', 'users', ['created_by_id'], ['id'])
op.create_foreign_key('updated_by_id_fkey', 'stage', 'users', ['updated_by_id'], ['id'])
op.add_column('stage_property', sa.Column('created_at', sa.DateTime(), nullable=True))
op.add_column('stage_property', sa.Column('created_by_id', sa.Integer(), nullable=True))
op.add_column('stage_property', sa.Column('updated_at', sa.DateTime(), nullable=True))
op.add_column('stage_property', sa.Column('updated_by_id', sa.Integer(), nullable=True))
op.create_foreign_key('created_by_id_fkey', 'stage_property', 'users', ['created_by_id'], ['id'])
op.create_foreign_key('updated_by_id_fkey', 'stage_property', 'users', ['updated_by_id'], ['id'])
op.add_column('users', sa.Column('created_by_id', sa.Integer(), nullable=True))
op.add_column('users', sa.Column('updated_at', sa.DateTime(), nullable=True))
op.add_column('users', sa.Column('updated_by_id', sa.Integer(), nullable=True))
op.create_foreign_key('updated_by_id_fkey', 'users', 'users', ['updated_by_id'], ['id'])
op.create_foreign_key('created_by_id_fkey', 'users', 'users', ['created_by_id'], ['id'])
op.add_column('vendor', sa.Column('created_by_id', sa.Integer(), nullable=True))
op.add_column('vendor', sa.Column('updated_at', sa.DateTime(), nullable=True))
op.add_column('vendor', sa.Column('updated_by_id', sa.Integer(), nullable=True))
op.create_foreign_key('created_by_id_fkey', 'vendor', 'users', ['created_by_id'], ['id'])
op.create_foreign_key('updated_by_id_fkey', 'vendor', 'users', ['updated_by_id'], ['id'])
### end Alembic commands ###
def downgrade():
conn = op.get_bind()
### commands auto generated by Alembic - please adjust! ###
for table, column, when in TRIGGER_TUPLES:
conn.execute(sa.sql.text('''
DROP TRIGGER IF EXISTS tsv_{table}_{column}_trigger_insert_update ON {table}
'''.format(table=table, column=column)))
conn.execute(sa.sql.text('''
DROP TRIGGER IF EXISTS tsv_{table}_{column}_trigger_delete ON {table}
'''.format(table=table, column=column)))
conn.execute(sa.sql.text('''
CREATE TRIGGER tsv_{table}_{column}_trigger_insert_update AFTER INSERT OR UPDATE OF {column}
ON {table}
FOR EACH ROW
{when}
EXECUTE PROCEDURE trig_refresh_search_view()
'''.format(table=table, column=column, when=when)))
conn.execute(sa.sql.text('''
CREATE TRIGGER tsv_{table}_{column}_trigger_delete AFTER DELETE
ON {table}
FOR EACH ROW
EXECUTE PROCEDURE trig_refresh_search_view()
'''.format(table=table, column=column)))
op.drop_constraint('updated_by_id_fkey', 'vendor', type_='foreignkey')
op.drop_constraint('created_by_id_fkey', 'vendor', type_='foreignkey')
op.drop_column('vendor', 'updated_by_id')
op.drop_column('vendor', 'updated_at')
op.drop_column('vendor', 'created_by_id')
op.drop_constraint('created_by_id_fkey', 'users', type_='foreignkey')
op.drop_constraint('updated_by_id_fkey', 'users', type_='foreignkey')
op.drop_column('users', 'updated_by_id')
op.drop_column('users', 'updated_at')
op.drop_column('users', 'created_by_id')
op.drop_constraint('updated_by_id_fkey', 'stage_property', type_='foreignkey')
op.drop_constraint('created_by_id_fkey', 'stage_property', type_='foreignkey')
op.drop_column('stage_property', 'updated_by_id')
op.drop_column('stage_property', 'updated_at')
op.drop_column('stage_property', 'created_by_id')
op.drop_column('stage_property', 'created_at')
op.drop_constraint('updated_by_id_fkey', 'stage', type_='foreignkey')
op.drop_constraint('created_by_id_fkey', 'stage', type_='foreignkey')
op.drop_column('stage', 'updated_by_id')
op.drop_column('stage', 'updated_at')
op.drop_column('stage', 'created_by_id')
op.drop_column('stage', 'created_at')
op.drop_constraint('updated_by_id_fkey', 'roles', type_='foreignkey')
op.drop_constraint('created_by_id_fkey', 'roles', type_='foreignkey')
op.drop_column('roles', 'updated_by_id')
op.drop_column('roles', 'updated_at')
op.drop_column('roles', 'created_by_id')
op.drop_column('roles', 'created_at')
op.drop_constraint('created_by_id_fkey', 'opportunity_document', type_='foreignkey')
op.drop_constraint('updated_by_id_fkey', 'opportunity_document', type_='foreignkey')
op.drop_column('opportunity_document', 'updated_by_id')
op.drop_column('opportunity_document', 'updated_at')
op.drop_column('opportunity_document', 'created_by_id')
op.drop_column('opportunity_document', 'created_at')
op.drop_constraint('updated_by_id_fkey', 'opportunity', type_='foreignkey')
op.drop_column('opportunity', 'updated_by_id')
op.drop_column('opportunity', 'updated_at')
op.drop_constraint('created_by_id_fkey', 'line_item', type_='foreignkey')
op.drop_constraint('updated_by_id_fkey', 'line_item', type_='foreignkey')
op.drop_column('line_item', 'updated_by_id')
op.drop_column('line_item', 'updated_at')
op.drop_column('line_item', 'created_by_id')
op.drop_column('line_item', 'created_at')
op.drop_constraint('updated_by_id_fkey', 'flow', type_='foreignkey')
op.drop_constraint('created_by_id_fkey', 'flow', type_='foreignkey')
op.drop_column('flow', 'updated_by_id')
op.drop_column('flow', 'updated_at')
op.drop_column('flow', 'created_by_id')
op.drop_column('flow', 'created_at')
op.drop_constraint('created_by_id_fkey', 'document', type_='foreignkey')
op.drop_constraint('updated_by_id_fkey', 'document', type_='foreignkey')
op.drop_column('document', 'updated_by_id')
op.drop_column('document', 'updated_at')
op.drop_column('document', 'created_by_id')
op.drop_column('document', 'created_at')
op.drop_constraint('updated_by_id_fkey', 'department', type_='foreignkey')
op.drop_constraint('created_by_id_fkey', 'department', type_='foreignkey')
op.drop_column('department', 'updated_by_id')
op.drop_column('department', 'updated_at')
op.drop_column('department', 'created_by_id')
op.drop_column('department', 'created_at')
op.drop_constraint('updated_by_id_fkey', 'contract_type', type_='foreignkey')
op.drop_constraint('created_by_id_fkey', 'contract_type', type_='foreignkey')
op.drop_column('contract_type', 'updated_by_id')
op.drop_column('contract_type', 'updated_at')
op.drop_column('contract_type', 'created_by_id')
op.drop_column('contract_type', 'created_at')
op.drop_constraint('updated_by_id_fkey', 'contract_stage_action_item', type_='foreignkey')
op.drop_constraint('created_by_id_fkey', 'contract_stage_action_item', type_='foreignkey')
op.drop_column('contract_stage_action_item', 'updated_by_id')
op.drop_column('contract_stage_action_item', 'updated_at')
op.drop_column('contract_stage_action_item', 'created_by_id')
op.drop_column('contract_stage_action_item', 'created_at')
op.drop_constraint('updated_by_id_fkey', 'contract_stage', type_='foreignkey')
op.drop_constraint('created_by_id_fkey', 'contract_stage', type_='foreignkey')
op.drop_column('contract_stage', 'updated_by_id')
op.drop_column('contract_stage', 'created_by_id')
op.drop_constraint('created_by_id_fkey', 'contract_property', type_='foreignkey')
op.drop_constraint('updated_by_id_fkey', 'contract_property', type_='foreignkey')
op.drop_column('contract_property', 'updated_by_id')
op.drop_column('contract_property', 'updated_at')
op.drop_column('contract_property', 'created_by_id')
op.drop_column('contract_property', 'created_at')
op.drop_constraint('updated_by_id_fkey', 'contract_note', type_='foreignkey')
op.drop_constraint('created_by_id_fkey', 'contract_note', type_='foreignkey')
op.drop_column('contract_note', 'updated_by_id')
op.drop_column('contract_note', 'created_by_id')
op.drop_constraint('updated_by_id_fkey', 'contract', type_='foreignkey')
op.drop_constraint('created_by_id_fkey', 'contract', type_='foreignkey')
op.drop_column('contract', 'updated_by_id')
op.drop_column('contract', 'created_by_id')
op.drop_constraint('created_by_id_fkey', 'company_contact', type_='foreignkey')
op.drop_constraint('updated_by_id_fkey', 'company_contact', type_='foreignkey')
op.drop_column('company_contact', 'updated_by_id')
op.drop_column('company_contact', 'updated_at')
op.drop_column('company_contact', 'created_by_id')
op.drop_column('company_contact', 'created_at')
op.drop_constraint('created_by_id_fkey', 'company', type_='foreignkey')
op.drop_constraint('updated_by_id_fkey', 'company', type_='foreignkey')
op.drop_column('company', 'updated_by_id')
op.drop_column('company', 'updated_at')
op.drop_column('company', 'created_by_id')
op.drop_column('company', 'created_at')
op.drop_constraint('updated_by_id_fkey', 'category', type_='foreignkey')
op.drop_constraint('created_by_id_fkey', 'category', type_='foreignkey')
op.drop_column('category', 'updated_by_id')
op.drop_column('category', 'updated_at')
op.drop_column('category', 'created_by_id')
op.drop_column('category', 'created_at')
op.drop_constraint('updated_by_id_fkey', 'app_status', type_='foreignkey')
op.drop_constraint('created_by_id_fkey', 'app_status', type_='foreignkey')
op.drop_column('app_status', 'updated_by_id')
op.drop_column('app_status', 'updated_at')
op.drop_column('app_status', 'created_by_id')
op.drop_column('app_status', 'created_at')
### end Alembic commands ###
| {
"repo_name": "codeforamerica/pittsburgh-purchasing-suite",
"path": "migrations/versions/3f80f0c8adf1_add_class_mixins.py",
"copies": "3",
"size": "19718",
"license": "bsd-3-clause",
"hash": -6193870083165707000,
"line_mean": 66.0680272109,
"line_max": 113,
"alpha_frac": 0.6599553707,
"autogenerated": false,
"ratio": 3.153870761356366,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5313826132056365,
"avg_score": null,
"num_lines": null
} |
"""Add client and client parameter
Revision ID: 54e60d31349a
Revises: 4c23f9943036
Create Date: 2013-12-23 22:18:59.616842
"""
# revision identifiers, used by Alembic.
revision = '54e60d31349a'
down_revision = '4c23f9943036'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('ExperimentClientParameter',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('experiment_id', sa.Integer(), nullable=False),
sa.Column('parameter_name', sa.String(length=255), nullable=False),
sa.Column('parameter_type', sa.String(length=15), nullable=False),
sa.Column('value', sa.String(length=600), nullable=False),
sa.ForeignKeyConstraint(['experiment_id'], ['Experiment.id'], ),
sa.PrimaryKeyConstraint('id'),
mysql_engine='InnoDB'
)
op.add_column(u'Experiment', sa.Column('client', sa.String(length=255), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column(u'Experiment', 'client')
op.drop_table('ExperimentClientParameter')
### end Alembic commands ###
| {
"repo_name": "zstars/weblabdeusto",
"path": "server/src/weblab/db/upgrade/regular/versions/54e60d31349a_add_client_and_clien.py",
"copies": "1",
"size": "1225",
"license": "bsd-2-clause",
"hash": -4790025516263877000,
"line_mean": 32.1081081081,
"line_max": 91,
"alpha_frac": 0.6759183673,
"autogenerated": false,
"ratio": 3.5714285714285716,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.47473469387285716,
"avg_score": null,
"num_lines": null
} |
"""add cli_tv_corres and cli_wiki_corres table
Revision ID: 26c5e973581
Revises: 6ea60f0db6
Create Date: 2014-11-18 20:36:21.269893
"""
from alembic import op
import sqlalchemy as sa
revision = '26c5e973581'
down_revision = '6ea60f0db6'
def upgrade():
op.create_table(
'cli_tv_corres',
sa.Column('cli_id', sa.Integer(), nullable=False),
sa.Column('tv_namespace', sa.String(), nullable=False),
sa.Column('tv_name', sa.String(), nullable=False),
sa.Column('confidence', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['cli_id'], ['works.id']),
sa.ForeignKeyConstraint(
['tv_namespace', 'tv_name'],
['tvtropes_entities.namespace', 'tvtropes_entities.name']
),
sa.PrimaryKeyConstraint('cli_id', 'tv_namespace', 'tv_name')
)
op.create_table(
'cli_wiki_corres',
sa.Column('cli_id', sa.Integer(), nullable=False),
sa.Column('wiki_name', sa.String(), nullable=False),
sa.Column('confidence', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['cli_id'], ['works.id']),
sa.ForeignKeyConstraint(['wiki_name'], ['wikipedia_entities.name']),
sa.PrimaryKeyConstraint('cli_id', 'wiki_name')
)
def downgrade():
op.drop_table('cli_wiki_corres')
op.drop_table('cli_tv_corres')
| {
"repo_name": "clicheio/cliche",
"path": "cliche/migrations/versions/26c5e973581_add_cli_tv_corres_and_cli_wiki_corres_.py",
"copies": "2",
"size": "1355",
"license": "mit",
"hash": 1534809570174017800,
"line_mean": 30.511627907,
"line_max": 76,
"alpha_frac": 0.6221402214,
"autogenerated": false,
"ratio": 3.321078431372549,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4943218652772549,
"avg_score": null,
"num_lines": null
} |
"""Add collection tables (dataset, reaction_dataset)
Revision ID: 129ff3ce9247
Revises: e32b61e2516f
Create Date: 2019-08-19 16:35:19.100113
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.orm.session import Session
from qcfractal.storage_sockets.models import CollectionORM, DatasetORM, ReactionDatasetORM
# revision identifiers, used by Alembic.
revision = "129ff3ce9247"
down_revision = "e32b61e2516f"
branch_labels = None
depends_on = None
def upgrade():
print("Start schema migration...")
### commands auto generated by Alembic - please adjust! ###
op.create_table(
"dataset",
sa.Column("default_benchmark", sa.String(), nullable=True),
sa.Column("default_keywords", sa.JSON(), nullable=True),
sa.Column("default_driver", sa.String(), nullable=True),
sa.Column("default_units", sa.String(), nullable=True),
sa.Column("alias_keywords", sa.JSON(), nullable=True),
sa.Column("default_program", sa.String(), nullable=True),
sa.Column("contributed_values", sa.JSON(), nullable=True),
sa.Column("history_keys", sa.JSON(), nullable=True),
sa.Column("history", sa.JSON(), nullable=True),
sa.Column("id", sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(["id"], ["collection.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"reaction_dataset",
sa.Column("default_benchmark", sa.String(), nullable=True),
sa.Column("default_keywords", sa.JSON(), nullable=True),
sa.Column("default_driver", sa.String(), nullable=True),
sa.Column("default_units", sa.String(), nullable=True),
sa.Column("alias_keywords", sa.JSON(), nullable=True),
sa.Column("default_program", sa.String(), nullable=True),
sa.Column("contributed_values", sa.JSON(), nullable=True),
sa.Column("history_keys", sa.JSON(), nullable=True),
sa.Column("history", sa.JSON(), nullable=True),
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("ds_type", sa.String(), nullable=True),
sa.ForeignKeyConstraint(["id"], ["collection.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"dataset_entry",
sa.Column("dataset_id", sa.Integer(), nullable=False),
sa.Column("molecule_id", sa.Integer(), nullable=False),
sa.Column("name", sa.String(), nullable=False),
sa.Column("comment", sa.String(), nullable=True),
sa.Column("local_results", sa.JSON(), nullable=True),
sa.ForeignKeyConstraint(["dataset_id"], ["dataset.id"], ondelete="cascade"),
sa.ForeignKeyConstraint(["molecule_id"], ["molecule.id"], ondelete="cascade"),
sa.PrimaryKeyConstraint("dataset_id", "molecule_id"),
)
op.create_table(
"reaction_dataset_entry",
sa.Column("reaction_dataset_id", sa.Integer(), nullable=False),
sa.Column("attributes", sa.JSON(), nullable=True),
sa.Column("name", sa.String(), nullable=False),
sa.Column("reaction_results", sa.JSON(), nullable=True),
sa.Column("stoichiometry", sa.JSON(), nullable=True),
sa.Column("extras", sa.JSON(), nullable=True),
sa.ForeignKeyConstraint(["reaction_dataset_id"], ["reaction_dataset.id"], ondelete="cascade"),
sa.PrimaryKeyConstraint("reaction_dataset_id", "name"),
)
op.add_column("collection", sa.Column("collection_type", sa.String(), nullable=True))
op.add_column("collection", sa.Column("provenance", sa.JSON(), nullable=True))
op.create_index("ix_collection_type", "collection", ["collection_type"], unique=False)
### end Alembic commands ###
# ------------ copy data
print("Start Data migration...")
migrate_collections()
def migrate_collections():
session = Session(bind=op.get_bind())
collection_map = {"dataset": DatasetORM, "reactiondataset": ReactionDatasetORM}
collections = session.query(CollectionORM)
for collection in collections:
print(f"collection: id:{collection.id}, lname: {collection.lname}")
if collection.collection in collection_map.keys():
collection_class = collection_map[collection.collection]
else:
continue
fields = collection.to_dict(exclude=["id"])
session.query(CollectionORM).filter_by(id=collection.id).delete(synchronize_session=False)
session.commit()
dataset = collection_class(**fields)
session.add(dataset)
session.commit()
dataset.update_relations(**fields) # will use related attr
session.commit()
# print(f'dataset.id: {dataset.id},\n dataset.records: {len(dataset.records)}')
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index("ix_collection_type", table_name="collection")
op.drop_column("collection", "collection_type")
op.drop_column("collection", "provenance")
op.drop_table("reaction_dataset_entry")
op.drop_table("dataset_entry")
op.drop_table("reaction_dataset")
op.drop_table("dataset")
# ### end Alembic commands ###
| {
"repo_name": "psi4/mongo_qcdb",
"path": "qcfractal/alembic/versions/129ff3ce9247_add_collection_tables_dataset_reaction_.py",
"copies": "2",
"size": "5191",
"license": "bsd-3-clause",
"hash": 3008443771780684000,
"line_mean": 40.1984126984,
"line_max": 102,
"alpha_frac": 0.6465035639,
"autogenerated": false,
"ratio": 3.8537490720118783,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0012492258821638885,
"num_lines": 126
} |
"""Add column archive flag
Revision ID: 6cb3c668d65
Revises: 36d972f11e3e
Create Date: 2014-03-03 16:07:47.321713
"""
# revision identifiers, used by Alembic.
revision = '6cb3c668d65'
down_revision = '36d972f11e3e'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('column', sa.Column('archive', sa.Boolean, server_default='false'))
op.add_column('board', sa.Column('archive', sa.Integer, server_default='0'))
op.add_column('board', sa.Column('archived', sa.Boolean, server_default='false'))
op.add_column('board', sa.Column('weighting_cards', sa.Integer, server_default='0'))
op.add_column('board', sa.Column('weights', sa.Text, server_default=''))
op.add_column('card', sa.Column('weight', sa.Text, server_default=''))
def downgrade():
op.drop_column('column', 'archive')
op.drop_column('board', 'archive')
op.drop_column('board', 'archived')
op.drop_column('board', 'weighting_cards')
op.drop_column('board', 'weights')
op.drop_column('card', 'weight')
| {
"repo_name": "droodle/kansha",
"path": "kansha/alembic/versions/6cb3c668d65_add_column_archive_flag.py",
"copies": "4",
"size": "1038",
"license": "bsd-3-clause",
"hash": 2316699984747822600,
"line_mean": 31.4375,
"line_max": 88,
"alpha_frac": 0.6820809249,
"autogenerated": false,
"ratio": 3.052941176470588,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5735022101370589,
"avg_score": null,
"num_lines": null
} |
"""add column created_at for farmer, group, agroup
Revision ID: c7b7fb8e6536
Revises: 91b7a3d00a9e
Create Date: 2018-04-12 01:18:01.226407
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'c7b7fb8e6536'
down_revision = '91b7a3d00a9e'
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('associate_group', sa.Column('created_at', sa.Integer(),
nullable=True))
op.add_column('farmer', sa.Column('created_at', sa.Integer(),
nullable=True))
op.add_column('group', sa.Column('created_at', sa.Integer(),
nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('group', 'created_at')
op.drop_column('farmer', 'created_at')
op.drop_column('associate_group', 'created_at')
# ### end Alembic commands ###
| {
"repo_name": "HaManhDong/pgscm",
"path": "migrations/versions/c7b7fb8e6536_add_created_at_column.py",
"copies": "2",
"size": "1042",
"license": "apache-2.0",
"hash": -8329967926515488000,
"line_mean": 31.5625,
"line_max": 74,
"alpha_frac": 0.6007677543,
"autogenerated": false,
"ratio": 3.508417508417508,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 32
} |
"""Add column for profile picture type to User
Revision ID: f37d509e221c
Revises: c997dc927fbc
Create Date: 2020-09-04 15:43:18.413156
"""
from enum import Enum
import sqlalchemy as sa
from alembic import op
from werkzeug.http import http_date
from indico.core.db.sqlalchemy import PyIntEnum
from indico.util.date_time import now_utc
# revision identifiers, used by Alembic.
revision = 'f37d509e221c'
down_revision = 'c997dc927fbc'
branch_labels = None
depends_on = None
class _ProfilePictureSource(int, Enum):
standard = 0
identicon = 1
gravatar = 2
custom = 3
def upgrade():
op.add_column('users',
sa.Column('picture_source', PyIntEnum(_ProfilePictureSource), nullable=False, server_default='0'),
schema='users')
op.alter_column('users', 'picture_source', server_default=None, schema='users')
op.execute('UPDATE users.users SET picture_source = 3 WHERE picture IS NOT NULL')
op.execute('''
UPDATE users.users
SET picture_metadata = picture_metadata || '{"lastmod": "%s"}'::jsonb
WHERE picture_source = 3 AND NOT (picture_metadata ? 'lastmod')
''' % http_date(now_utc()))
def downgrade():
op.drop_column('users', 'picture_source', schema='users')
| {
"repo_name": "indico/indico",
"path": "indico/migrations/versions/20200904_1543_f37d509e221c_add_user_profile_picture_source_column.py",
"copies": "4",
"size": "1260",
"license": "mit",
"hash": -1705451510387511300,
"line_mean": 26.3913043478,
"line_max": 116,
"alpha_frac": 0.6841269841,
"autogenerated": false,
"ratio": 3.490304709141274,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6174431693241273,
"avg_score": null,
"num_lines": null
} |
"""add column report_consent_removal_date to genomic_set_member
Revision ID: 2e1d3f329efd
Revises: 1ea7864c251e
Create Date: 2020-09-25 15:51:21.977008
"""
from alembic import op
import sqlalchemy as sa
import rdr_service.model.utils
from rdr_service.participant_enums import PhysicalMeasurementsStatus, QuestionnaireStatus, OrderStatus
from rdr_service.participant_enums import WithdrawalStatus, WithdrawalReason, SuspensionStatus, QuestionnaireDefinitionStatus
from rdr_service.participant_enums import EnrollmentStatus, Race, SampleStatus, OrganizationType, BiobankOrderStatus
from rdr_service.participant_enums import OrderShipmentTrackingStatus, OrderShipmentStatus
from rdr_service.participant_enums import MetricSetType, MetricsKey, GenderIdentity
from rdr_service.model.base import add_table_history_table, drop_table_history_table
from rdr_service.model.code import CodeType
from rdr_service.model.site_enums import SiteStatus, EnrollingStatus, DigitalSchedulingStatus, ObsoleteStatus
# revision identifiers, used by Alembic.
revision = '2e1d3f329efd'
down_revision = '1ea7864c251e'
branch_labels = None
depends_on = None
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('genomic_set_member', sa.Column('report_consent_removal_date', sa.DateTime(), nullable=True))
op.add_column('genomic_set_member_history',
sa.Column('report_consent_removal_date', sa.DateTime(), nullable=True))
# ### end Alembic commands ###
def downgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('genomic_set_member', 'report_consent_removal_date')
op.drop_column('genomic_set_member_history', 'report_consent_removal_date')
# ### end Alembic commands ###
def upgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
| {
"repo_name": "all-of-us/raw-data-repository",
"path": "rdr_service/alembic/versions/2e1d3f329efd_add_column_report_consent_removal_date_.py",
"copies": "1",
"size": "2206",
"license": "bsd-3-clause",
"hash": 7623654461418293000,
"line_mean": 34.0158730159,
"line_max": 125,
"alpha_frac": 0.7447869447,
"autogenerated": false,
"ratio": 3.523961661341853,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4768748606041853,
"avg_score": null,
"num_lines": null
} |
"""add_columns_for_feedback_workflows
Revision ID: cab7fdee2895
Revises: 1b3c958942cb
Create Date: 2020-12-02 10:41:37.735608
"""
from alembic import op
import sqlalchemy as sa
import rdr_service.model.utils
from rdr_service.genomic_enums import GenomicContaminationCategory
# revision identifiers, used by Alembic.
revision = 'cab7fdee2895'
down_revision = '1b3c958942cb'
branch_labels = None
depends_on = None
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('genomic_gc_validation_metrics', sa.Column('contamination_category',
rdr_service.model.utils.Enum(GenomicContaminationCategory),
nullable=True))
op.add_column('genomic_manifest_feedback', sa.Column('ignore', sa.SmallInteger(), nullable=False))
op.add_column('genomic_manifest_file', sa.Column('ignore', sa.SmallInteger(), nullable=False))
op.create_unique_constraint('_file_path_ignore_uc', 'genomic_manifest_file', ['file_path', 'ignore'])
op.add_column('genomic_set_member', sa.Column('aw1_file_processed_id', sa.Integer(), nullable=True))
op.add_column('genomic_set_member_history', sa.Column('aw1_file_processed_id', sa.Integer(), nullable=True))
op.add_column('genomic_set_member', sa.Column('aw2_file_processed_id', sa.Integer(), nullable=True))
op.add_column('genomic_set_member_history', sa.Column('aw2_file_processed_id', sa.Integer(), nullable=True))
op.add_column('genomic_set_member', sa.Column('aw2f_file_processed_id', sa.Integer(), nullable=True))
op.add_column('genomic_set_member_history', sa.Column('aw2f_file_processed_id', sa.Integer(), nullable=True))
op.create_foreign_key(None, 'genomic_set_member', 'genomic_file_processed', ['aw2_file_processed_id'], ['id'])
op.create_foreign_key(None, 'genomic_set_member', 'genomic_file_processed', ['aw2f_file_processed_id'], ['id'])
op.create_foreign_key(None, 'genomic_set_member', 'genomic_file_processed', ['aw1_file_processed_id'], ['id'])
# ### end Alembic commands ###
def downgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, 'genomic_set_member', type_='foreignkey')
op.drop_constraint(None, 'genomic_set_member', type_='foreignkey')
op.drop_constraint(None, 'genomic_set_member', type_='foreignkey')
op.drop_column('genomic_set_member', 'aw2f_file_processed_id')
op.drop_column('genomic_set_member_history', 'aw2f_file_processed_id')
op.drop_column('genomic_set_member', 'aw2_file_processed_id')
op.drop_column('genomic_set_member_history', 'aw2_file_processed_id')
op.drop_column('genomic_set_member', 'aw1_file_processed_id')
op.drop_column('genomic_set_member_history', 'aw1_file_processed_id')
op.drop_constraint('_file_path_ignore_uc', 'genomic_manifest_file', type_='unique')
op.drop_column('genomic_manifest_file', 'ignore')
op.drop_column('genomic_manifest_feedback', 'ignore')
op.drop_column('genomic_gc_validation_metrics', 'contamination_category')
# ### end Alembic commands ###
def upgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
| {
"repo_name": "all-of-us/raw-data-repository",
"path": "rdr_service/alembic/versions/cab7fdee2895_add_columns_for_feedback_workflows.py",
"copies": "1",
"size": "3595",
"license": "bsd-3-clause",
"hash": -8977980445236380000,
"line_mean": 43.3827160494,
"line_max": 120,
"alpha_frac": 0.6773296245,
"autogenerated": false,
"ratio": 3.201246660730187,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4378576285230187,
"avg_score": null,
"num_lines": null
} |
"""Add columns for program codes
Revision ID: 1b741c9123f6
Revises: eefba82b42c5
Create Date: 2019-08-08 11:13:05.922963
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '1b741c9123f6'
down_revision = 'eefba82b42c5'
branch_labels = None
depends_on = None
def upgrade():
op.add_column('contributions', sa.Column('code', sa.String(), nullable=False, server_default=''), schema='events')
op.add_column('subcontributions', sa.Column('code', sa.String(), nullable=False, server_default=''),
schema='events')
op.add_column('session_blocks', sa.Column('code', sa.String(), nullable=False, server_default=''), schema='events')
op.add_column('session_types', sa.Column('code', sa.String(), nullable=False, server_default=''), schema='events')
op.alter_column('contributions', 'code', server_default=None, schema='events')
op.alter_column('subcontributions', 'code', server_default=None, schema='events')
op.alter_column('session_blocks', 'code', server_default=None, schema='events')
op.alter_column('session_types', 'code', server_default=None, schema='events')
def downgrade():
op.drop_column('session_types', 'code', schema='events')
op.drop_column('session_blocks', 'code', schema='events')
op.drop_column('subcontributions', 'code', schema='events')
op.drop_column('contributions', 'code', schema='events')
| {
"repo_name": "pferreir/indico",
"path": "indico/migrations/versions/20190918_1722_1b741c9123f6_add_columns_for_program_codes.py",
"copies": "7",
"size": "1431",
"license": "mit",
"hash": -3970828503271026700,
"line_mean": 39.8857142857,
"line_max": 119,
"alpha_frac": 0.6960167715,
"autogenerated": false,
"ratio": 3.351288056206089,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.7547304827706088,
"avg_score": null,
"num_lines": null
} |
"""add columns to AwardProcurement
Revision ID: cd1025ac9399
Revises: a767facf8ea8
Create Date: 2017-09-18 15:05:52.772251
"""
# revision identifiers, used by Alembic.
revision = 'cd1025ac9399'
down_revision = 'a767facf8ea8'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_data_broker():
### commands auto generated by Alembic - please adjust! ###
op.add_column('award_procurement', sa.Column('base_and_all_options_value', sa.Text(), nullable=True))
op.add_column('award_procurement', sa.Column('base_exercised_options_val', sa.Text(), nullable=True))
### end Alembic commands ###
def downgrade_data_broker():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('award_procurement', 'base_exercised_options_val')
op.drop_column('award_procurement', 'base_and_all_options_value')
### end Alembic commands ###
| {
"repo_name": "fedspendingtransparency/data-act-broker-backend",
"path": "dataactcore/migrations/versions/cd1025ac9399_add_columns_to_AwardProcurement.py",
"copies": "1",
"size": "1073",
"license": "cc0-1.0",
"hash": 4157892748399210000,
"line_mean": 24.5476190476,
"line_max": 105,
"alpha_frac": 0.6999068034,
"autogenerated": false,
"ratio": 3.291411042944785,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4491317846344785,
"avg_score": null,
"num_lines": null
} |
"""Add columns to detached_award_procurement and move data
Revision ID: 1fc4844837cf
Revises: 9960bbbe4d92
Create Date: 2017-09-12 11:19:41.697007
"""
# revision identifiers, used by Alembic.
revision = '1fc4844837cf'
down_revision = '9960bbbe4d92'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
from datetime import datetime
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_data_broker():
### commands auto generated by Alembic - please adjust! ###
op.add_column('detached_award_procurement', sa.Column('base_and_all_options_value', sa.Text(), nullable=True))
op.add_column('detached_award_procurement', sa.Column('base_exercised_options_val', sa.Text(), nullable=True))
op.add_column('detached_award_procurement', sa.Column('total_obligated_amount', sa.Text(), nullable=True))
op.execute("""
UPDATE detached_award_procurement
SET base_and_all_options_value = potential_total_value_awar,
base_exercised_options_val = current_total_value_award,
potential_total_value_awar = NULL,
current_total_value_award = NULL
""")
### end Alembic commands ###
def downgrade_data_broker():
### commands auto generated by Alembic - please adjust! ###
op.execute("""
UPDATE detached_award_procurement
SET potential_total_value_awar = base_and_all_options_value,
current_total_value_award = base_exercised_options_val
""")
op.drop_column('detached_award_procurement', 'total_obligated_amount')
op.drop_column('detached_award_procurement', 'base_exercised_options_val')
op.drop_column('detached_award_procurement', 'base_and_all_options_value')
### end Alembic commands ###
| {
"repo_name": "fedspendingtransparency/data-act-broker-backend",
"path": "dataactcore/migrations/versions/1fc4844837cf_add_columns_to_detached_award_procurement.py",
"copies": "1",
"size": "1870",
"license": "cc0-1.0",
"hash": 7969601154145411000,
"line_mean": 31.8070175439,
"line_max": 114,
"alpha_frac": 0.6855614973,
"autogenerated": false,
"ratio": 3.4501845018450186,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9524011641963483,
"avg_score": 0.022346871436306977,
"num_lines": 57
} |
"""add columns to gc metrics for wgs
Revision ID: 9a0873b51fe0
Revises: 235693878327
Create Date: 2020-05-05 10:54:18.411657
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '9a0873b51fe0'
down_revision = '235693878327'
branch_labels = None
depends_on = None
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('genomic_gc_validation_metrics', sa.Column('crai_md5_received', sa.SmallInteger(), nullable=False))
op.add_column('genomic_gc_validation_metrics', sa.Column('crai_received', sa.SmallInteger(), nullable=False))
op.add_column('genomic_gc_validation_metrics', sa.Column('cram_md5_received', sa.SmallInteger(), nullable=False))
op.add_column('genomic_gc_validation_metrics', sa.Column('cram_received', sa.SmallInteger(), nullable=False))
op.add_column('genomic_gc_validation_metrics', sa.Column('hf_vcf_md5_received', sa.SmallInteger(), nullable=False))
op.add_column('genomic_gc_validation_metrics', sa.Column('hf_vcf_received', sa.SmallInteger(), nullable=False))
op.add_column('genomic_gc_validation_metrics', sa.Column('hf_vcf_tbi_received', sa.SmallInteger(), nullable=False))
op.add_column('genomic_gc_validation_metrics', sa.Column('raw_vcf_md5_received', sa.SmallInteger(), nullable=False))
op.add_column('genomic_gc_validation_metrics', sa.Column('raw_vcf_received', sa.SmallInteger(), nullable=False))
op.add_column('genomic_gc_validation_metrics', sa.Column('raw_vcf_tbi_received', sa.SmallInteger(), nullable=False))
op.add_column('genomic_gc_validation_metrics', sa.Column('sex_ploidy', sa.String(length=10), nullable=True))
# ### end Alembic commands ###
def downgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('genomic_gc_validation_metrics', 'sex_ploidy')
op.drop_column('genomic_gc_validation_metrics', 'raw_vcf_tbi_received')
op.drop_column('genomic_gc_validation_metrics', 'raw_vcf_received')
op.drop_column('genomic_gc_validation_metrics', 'raw_vcf_md5_received')
op.drop_column('genomic_gc_validation_metrics', 'hf_vcf_tbi_received')
op.drop_column('genomic_gc_validation_metrics', 'hf_vcf_received')
op.drop_column('genomic_gc_validation_metrics', 'hf_vcf_md5_received')
op.drop_column('genomic_gc_validation_metrics', 'cram_received')
op.drop_column('genomic_gc_validation_metrics', 'cram_md5_received')
op.drop_column('genomic_gc_validation_metrics', 'crai_received')
op.drop_column('genomic_gc_validation_metrics', 'crai_md5_received')
# ### end Alembic commands ###
def upgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
| {
"repo_name": "all-of-us/raw-data-repository",
"path": "rdr_service/alembic/versions/9a0873b51fe0_add_columns_to_gc_metrics_for_wgs.py",
"copies": "1",
"size": "3070",
"license": "bsd-3-clause",
"hash": 8805206493456047000,
"line_mean": 44.1470588235,
"line_max": 120,
"alpha_frac": 0.7071661238,
"autogenerated": false,
"ratio": 3.1487179487179486,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4355884072517948,
"avg_score": null,
"num_lines": null
} |
"""add columns to genomic_set_member_history
Revision ID: 4a4457c6b497
Revises: 5a1b1f7b4761
Create Date: 2019-09-19 15:05:24.773303
"""
from alembic import op
import sqlalchemy as sa
import rdr_service.model.utils
from sqlalchemy.dialects import mysql
from rdr_service.participant_enums import PhysicalMeasurementsStatus, QuestionnaireStatus, OrderStatus
from rdr_service.participant_enums import WithdrawalStatus, WithdrawalReason, SuspensionStatus, QuestionnaireDefinitionStatus
from rdr_service.participant_enums import EnrollmentStatus, Race, SampleStatus, OrganizationType, BiobankOrderStatus
from rdr_service.participant_enums import OrderShipmentTrackingStatus, OrderShipmentStatus
from rdr_service.participant_enums import MetricSetType, MetricsKey, GenderIdentity
from rdr_service.model.base import add_table_history_table, drop_table_history_table
from rdr_service.model.code import CodeType
from rdr_service.model.site_enums import SiteStatus, EnrollingStatus, DigitalSchedulingStatus, ObsoleteStatus
# revision identifiers, used by Alembic.
revision = '4a4457c6b497'
down_revision = '5a1b1f7b4761'
branch_labels = None
depends_on = None
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('genomic_set_member_history', sa.Column('sample_id', sa.String(length=80), nullable=True))
op.add_column('genomic_set_member_history', sa.Column('sample_type', sa.String(length=50), nullable=True))
# ### end Alembic commands ###
def downgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('genomic_set_member_history', 'sample_type')
op.drop_column('genomic_set_member_history', 'sample_id')
# ### end Alembic commands ###
def upgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
| {
"repo_name": "all-of-us/raw-data-repository",
"path": "rdr_service/alembic/versions/4a4457c6b497_add_columns_to_genomic_set_member_.py",
"copies": "1",
"size": "2168",
"license": "bsd-3-clause",
"hash": -6552462095227639000,
"line_mean": 33.9677419355,
"line_max": 125,
"alpha_frac": 0.75,
"autogenerated": false,
"ratio": 3.4743589743589745,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4724358974358974,
"avg_score": null,
"num_lines": null
} |
"""Add columns to review questions and review ratings tables.
Revision ID: 2af245be72a6
Revises: 566d5de4e0e5
Create Date: 2017-11-24 11:38:33.292283
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '2af245be72a6'
down_revision = '566d5de4e0e5'
branch_labels = None
depends_on = None
tables = (('event_abstracts', 'abstract_review_ratings', 'abstract_review_questions'),
('event_paper_reviewing', 'review_ratings', 'review_questions'))
def upgrade():
for schema, ratings_table, questions_table in tables:
op.alter_column(questions_table, 'text', new_column_name='title', schema=schema)
op.add_column(questions_table, sa.Column('field_type', sa.String(), nullable=False, server_default='rating'),
schema=schema)
op.alter_column(questions_table, 'field_type', server_default=None, schema=schema)
op.add_column(questions_table, sa.Column('is_required', sa.Boolean(), nullable=False, server_default='true'),
schema=schema)
op.alter_column(questions_table, 'is_required', server_default=None, schema=schema)
op.add_column(questions_table, sa.Column('field_data', sa.JSON(), nullable=False, server_default='{}'),
schema=schema)
op.alter_column(questions_table, 'field_data', server_default=None, schema=schema)
op.add_column(questions_table, sa.Column('description', sa.Text(), nullable=False, server_default=''),
schema=schema)
op.alter_column(questions_table, 'description', server_default=None, schema=schema)
op.execute('ALTER TABLE {}.{} ALTER COLUMN "value" TYPE JSON USING to_json(value)'.format(schema,
ratings_table))
def downgrade():
for schema, ratings_table, questions_table in tables:
op.alter_column(questions_table, 'title', new_column_name='text', schema=schema)
op.execute("DELETE FROM {0}.{1} WHERE question_id IN(SELECT id FROM {0}.{2} "
"WHERE field_type != 'rating' OR NOT is_required)".format(schema, ratings_table, questions_table))
op.execute(f"DELETE FROM {schema}.{questions_table} WHERE field_type != 'rating'")
op.execute('ALTER TABLE {}.{} ALTER COLUMN "value" TYPE INT USING value::TEXT::INT'.format(schema,
ratings_table))
op.drop_column(questions_table, 'field_type', schema=schema)
op.drop_column(questions_table, 'is_required', schema=schema)
op.drop_column(questions_table, 'field_data', schema=schema)
op.drop_column(questions_table, 'description', schema=schema)
| {
"repo_name": "pferreir/indico",
"path": "indico/migrations/versions/20171124_1138_2af245be72a6_review_questions_models.py",
"copies": "3",
"size": "2824",
"license": "mit",
"hash": -4371829818530321400,
"line_mean": 52.2830188679,
"line_max": 117,
"alpha_frac": 0.6274787535,
"autogenerated": false,
"ratio": 3.8162162162162163,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.003394507193788133,
"num_lines": 53
} |
"""add column title in PageChunk
Revision ID: bb087e4e53
Revises: 38331aa4875
Create Date: 2015-08-10 13:29:19.578492
"""
# revision identifiers, used by Alembic.
revision = 'bb087e4e53'
down_revision = '12fe8d110f7'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('pagechunks', sa.Column('title', sa.Text(), nullable=True))
op.alter_column('pagechunks', 'name',
existing_type=sa.TEXT(),
nullable=False)
op.create_unique_constraint('pagechunks_title_key', 'pagechunks', ['title'])
op.execute(
'update pagechunks set title = name'
)
op.alter_column('pagechunks', 'title', nullable=False)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_constraint('pagechunks_title_key', 'pagechunks', type_='unique')
op.alter_column('pagechunks', 'name',
existing_type=sa.TEXT(),
nullable=True)
op.drop_column('pagechunks', 'title')
### end Alembic commands ###
| {
"repo_name": "uaprom-summer-2015/Meowth",
"path": "migrations/versions/2015_08_10_bb08_add_column_title_in_pagechunk.py",
"copies": "2",
"size": "1127",
"license": "bsd-3-clause",
"hash": 67512947643661750,
"line_mean": 28.6578947368,
"line_max": 80,
"alpha_frac": 0.6477373558,
"autogenerated": false,
"ratio": 3.5440251572327046,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5191762513032705,
"avg_score": null,
"num_lines": null
} |
"""add column to dv order history
Revision ID: e66a20e7af08
Revises: d9742926014b
Create Date: 2020-04-02 10:45:14.534093
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'e66a20e7af08'
down_revision = 'd9742926014b'
branch_labels = None
depends_on = None
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('biobank_dv_order_history', sa.Column('is_test_sample', sa.Boolean(), nullable=True))
# ### end Alembic commands ###
def downgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('biobank_dv_order_history', 'is_test_sample')
# ### end Alembic commands ###
def upgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
| {
"repo_name": "all-of-us/raw-data-repository",
"path": "rdr_service/alembic/versions/e66a20e7af08_add_column_to_dv_order_history.py",
"copies": "1",
"size": "1149",
"license": "bsd-3-clause",
"hash": 4740731373696554000,
"line_mean": 22.9375,
"line_max": 103,
"alpha_frac": 0.6649260226,
"autogenerated": false,
"ratio": 3.440119760479042,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.46050457830790426,
"avg_score": null,
"num_lines": null
} |
"""Add comment data to post
Revision ID: 2e09867c0c38
Revises: edffeaf7cd85
Create Date: 2016-07-01 22:27:29.462016
"""
# revision identifiers, used by Alembic.
revision = '2e09867c0c38'
down_revision = 'edffeaf7cd85'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_development():
### commands auto generated by Alembic - please adjust! ###
op.add_column('posts', sa.Column('comment_data', mysql.LONGTEXT(), nullable=True))
op.add_column('posts', sa.Column('comments_queried_at', sa.DateTime(), nullable=True))
op.add_column('posts', sa.Column('created_at', sa.DateTime(), nullable=True))
### end Alembic commands ###
def downgrade_development():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('posts', 'created_at')
op.drop_column('posts', 'comments_queried_at')
op.drop_column('posts', 'comment_data')
### end Alembic commands ###
def upgrade_test():
### commands auto generated by Alembic - please adjust! ###
op.add_column('posts', sa.Column('comment_data', mysql.LONGTEXT(), nullable=True))
op.add_column('posts', sa.Column('comments_queried_at', sa.DateTime(), nullable=True))
op.add_column('posts', sa.Column('created_at', sa.DateTime(), nullable=True))
### end Alembic commands ###
def downgrade_test():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('posts', 'created_at')
op.drop_column('posts', 'comments_queried_at')
op.drop_column('posts', 'comment_data')
### end Alembic commands ###
def upgrade_production():
### commands auto generated by Alembic - please adjust! ###
op.add_column('posts', sa.Column('comment_data', mysql.LONGTEXT(), nullable=True))
op.add_column('posts', sa.Column('comments_queried_at', sa.DateTime(), nullable=True))
op.add_column('posts', sa.Column('created_at', sa.DateTime(), nullable=True))
### end Alembic commands ###
def downgrade_production():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('posts', 'created_at')
op.drop_column('posts', 'comments_queried_at')
op.drop_column('posts', 'comment_data')
### end Alembic commands ###
| {
"repo_name": "c4fcm/CivilServant",
"path": "alembic/versions/2e09867c0c38_add_comment_data_to_post.py",
"copies": "1",
"size": "2434",
"license": "mit",
"hash": -7308444354977739000,
"line_mean": 31.0263157895,
"line_max": 90,
"alpha_frac": 0.6721446179,
"autogenerated": false,
"ratio": 3.4921090387374463,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9561190325600075,
"avg_score": 0.0206126662074744,
"num_lines": 76
} |
"""add comment model
Revision ID: 3544b5b6c685
Revises: 2bc76810d9be
Create Date: 2017-05-27 23:36:29.370417
"""
# revision identifiers, used by Alembic.
revision = '3544b5b6c685'
down_revision = '2bc76810d9be'
from alembic import op
import sqlalchemy as sa
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('comments',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('body', sa.Text(), nullable=True),
sa.Column('timestamp', sa.DateTime(), nullable=True),
sa.Column('author_id', sa.Integer(), nullable=True),
sa.Column('body_html', sa.Text(), nullable=True),
sa.Column('disabled', sa.Boolean(), nullable=True),
sa.Column('post_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['author_id'], ['users.id'], ),
sa.ForeignKeyConstraint(['post_id'], ['posts.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(u'ix_comments_timestamp', 'comments', ['timestamp'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(u'ix_comments_timestamp', table_name='comments')
op.drop_table('comments')
# ### end Alembic commands ###
| {
"repo_name": "delitamakanda/socialite",
"path": "migrations/versions/3544b5b6c685_add_comment_model.py",
"copies": "1",
"size": "1254",
"license": "mit",
"hash": 5437608424366214000,
"line_mean": 31.1538461538,
"line_max": 86,
"alpha_frac": 0.663476874,
"autogenerated": false,
"ratio": 3.370967741935484,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4534444615935484,
"avg_score": null,
"num_lines": null
} |
"""add Comment Model
Revision ID: b5764e2504
Revises: 2a466b0d1ce
Create Date: 2015-08-13 00:56:50.021402
"""
# revision identifiers, used by Alembic.
revision = 'b5764e2504'
down_revision = '2a466b0d1ce'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('comments',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('body', sa.Text(), nullable=True),
sa.Column('body_html', sa.Text(), nullable=True),
sa.Column('timestamp', sa.DateTime(), nullable=True),
sa.Column('disabled', sa.Boolean(), nullable=True),
sa.Column('author_id', sa.Integer(), nullable=True),
sa.Column('post_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['author_id'], ['users.id'], ),
sa.ForeignKeyConstraint(['post_id'], ['posts.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_comments_timestamp'), 'comments', ['timestamp'], unique=False)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_comments_timestamp'), table_name='comments')
op.drop_table('comments')
### end Alembic commands ###
| {
"repo_name": "wangjun/Flask-blog",
"path": "migrations/versions/b5764e2504_add_comment_model.py",
"copies": "2",
"size": "1250",
"license": "apache-2.0",
"hash": -613709788796704300,
"line_mean": 31.0512820513,
"line_max": 91,
"alpha_frac": 0.664,
"autogenerated": false,
"ratio": 3.3875338753387534,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5051533875338754,
"avg_score": null,
"num_lines": null
} |
"""add comment
Revision ID: 024328124c71
Revises: f045592adab0
Create Date: 2017-10-08 22:59:10.027938
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '024328124c71'
down_revision = 'f045592adab0'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('comments',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('body', sa.Text(), nullable=True),
sa.Column('timestamp', sa.DateTime(), nullable=True),
sa.Column('author_id', sa.Integer(), nullable=True),
sa.Column('post_id', sa.Integer(), nullable=True),
sa.Column('disabled', sa.Boolean(), nullable=True),
sa.ForeignKeyConstraint(['author_id'], ['users.id'], ),
sa.ForeignKeyConstraint(['post_id'], ['posts.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_comments_timestamp'), 'comments', ['timestamp'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_comments_timestamp'), table_name='comments')
op.drop_table('comments')
# ### end Alembic commands ###
| {
"repo_name": "mikkylok/mikky.lu",
"path": "migrations/versions/024328124c71_add_comment.py",
"copies": "1",
"size": "1243",
"license": "mit",
"hash": 2137930792398449400,
"line_mean": 30.075,
"line_max": 91,
"alpha_frac": 0.6645213194,
"autogenerated": false,
"ratio": 3.4148351648351647,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4579356484235165,
"avg_score": null,
"num_lines": null
} |
"""add comment
Revision ID: 29cc8fa45322
Revises: cd3b2c4fb282
Create Date: 2017-05-09 10:47:02.844188
"""
# revision identifiers, used by Alembic.
revision = '29cc8fa45322'
down_revision = 'cd3b2c4fb282'
from alembic import op
import sqlalchemy as sa
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('comments',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('body', sa.Text(), nullable=True),
sa.Column('body_html', sa.Text(), nullable=True),
sa.Column('timestamp', sa.DateTime(), nullable=True),
sa.Column('disabled', sa.Boolean(), nullable=True),
sa.Column('author_id', sa.Integer(), nullable=True),
sa.Column('article_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['article_id'], ['articles.id'], ),
sa.ForeignKeyConstraint(['author_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_comments_timestamp'), 'comments', ['timestamp'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_comments_timestamp'), table_name='comments')
op.drop_table('comments')
# ### end Alembic commands ###
| {
"repo_name": "mapan1984/Hidden-Island",
"path": "migrations/versions/29cc8fa45322_add_comment.py",
"copies": "1",
"size": "1267",
"license": "mit",
"hash": 547538123379412350,
"line_mean": 31.4871794872,
"line_max": 91,
"alpha_frac": 0.6629834254,
"autogenerated": false,
"ratio": 3.4059139784946235,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.45688974038946234,
"avg_score": null,
"num_lines": null
} |
"""add comment
Revision ID: 8780bcb67912
Revises: 75f16d1e1054
Create Date: 2017-08-22 07:48:28.337730
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '8780bcb67912'
down_revision = '75f16d1e1054'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('comments',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('title', sa.String(length=64), nullable=True),
sa.Column('body', sa.Text(), nullable=True),
sa.Column('timestamp', sa.DateTime(), nullable=True),
sa.Column('author_id', sa.Integer(), nullable=True),
sa.Column('body_html', sa.Text(), nullable=True),
sa.Column('disabled', sa.Boolean(), nullable=True),
sa.Column('post_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['author_id'], ['users.id'], ),
sa.ForeignKeyConstraint(['post_id'], ['posts.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_comments_timestamp'), 'comments', ['timestamp'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_comments_timestamp'), table_name='comments')
op.drop_table('comments')
# ### end Alembic commands ###
| {
"repo_name": "weqopy/blog_instance",
"path": "migrations/versions/8780bcb67912_add_comment.py",
"copies": "1",
"size": "1358",
"license": "mit",
"hash": -8163630073170212000,
"line_mean": 31.3333333333,
"line_max": 91,
"alpha_frac": 0.6634756996,
"autogenerated": false,
"ratio": 3.369727047146402,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4533202746746402,
"avg_score": null,
"num_lines": null
} |
"""Add comments
Revision ID: 656d01d1f646
Revises: 6abd67fa89f4
Create Date: 2017-02-09 11:05:12.752000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '656d01d1f646'
down_revision = '6abd67fa89f4'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('posts',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('body', sa.Text(), nullable=True),
sa.Column('body_html', sa.Text(), nullable=True),
sa.Column('timestamp', sa.DateTime(), nullable=True),
sa.Column('author_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['author_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_posts_timestamp'), 'posts', ['timestamp'], unique=False)
op.create_table('comments',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('body', sa.Text(), nullable=True),
sa.Column('body_html', sa.Text(), nullable=True),
sa.Column('timestamp', sa.DateTime(), nullable=True),
sa.Column('disabled', sa.Boolean(), nullable=True),
sa.Column('author_id', sa.Integer(), nullable=True),
sa.Column('post_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['author_id'], ['users.id'], ),
sa.ForeignKeyConstraint(['post_id'], ['posts.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_comments_timestamp'), 'comments', ['timestamp'], unique=False)
op.drop_table('post')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('post',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('body', sa.TEXT(), nullable=True),
sa.Column('timestamp', sa.DATETIME(), nullable=True),
sa.Column('author_id', sa.INTEGER(), nullable=True),
sa.Column('body_html', sa.TEXT(), nullable=True),
sa.ForeignKeyConstraint(['author_id'], [u'users.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.drop_index(op.f('ix_comments_timestamp'), table_name='comments')
op.drop_table('comments')
op.drop_index(op.f('ix_posts_timestamp'), table_name='posts')
op.drop_table('posts')
# ### end Alembic commands ###
| {
"repo_name": "hedm0423/flaskdemo",
"path": "migrations/versions/656d01d1f646_add_comments.py",
"copies": "1",
"size": "2299",
"license": "mit",
"hash": -606836389356693900,
"line_mean": 35.4920634921,
"line_max": 91,
"alpha_frac": 0.6502827316,
"autogenerated": false,
"ratio": 3.370967741935484,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9482067999387943,
"avg_score": 0.007836494829508161,
"num_lines": 63
} |
"""add comments table
Revision ID: 147d42584675
Revises: e061a6b8cd6
Create Date: 2016-01-27 14:35:23.637144
"""
# revision identifiers, used by Alembic.
revision = '147d42584675'
down_revision = 'e061a6b8cd6'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('comments',
sa.Column('id', sa.String(), nullable=False),
sa.Column('created_utc', sa.Float(), nullable=False),
sa.Column('parent_id', sa.String(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.Column('submission_id', sa.Integer(), nullable=False),
sa.Column('author', sa.String(), nullable=False),
sa.Column('body', sa.UnicodeText(), nullable=False),
sa.Column('ups', sa.Integer(), nullable=False),
sa.Column('downs', sa.Integer(), nullable=False),
sa.Column('score', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['submission_id'], ['submissions.id'], ),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('comments')
### end Alembic commands ###
| {
"repo_name": "PsyBorgs/redditanalyser",
"path": "migrate/versions/147d42584675_add_comments_table.py",
"copies": "1",
"size": "1252",
"license": "mit",
"hash": 3740480169448699400,
"line_mean": 29.5365853659,
"line_max": 69,
"alpha_frac": 0.6701277955,
"autogenerated": false,
"ratio": 3.46814404432133,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9589104316404544,
"avg_score": 0.009833504683357027,
"num_lines": 41
} |
"""Add comments to site updates
Revision ID: 9270baf773a5
Revises: 7f0e262d6370
Create Date: 2019-01-21 16:41:17.360120
"""
# revision identifiers, used by Alembic.
revision = '9270baf773a5'
down_revision = '7f0e262d6370'
from alembic import op # lgtm[py/unused-import]
import sqlalchemy as sa # lgtm[py/unused-import]
from sqlalchemy.dialects import postgresql
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('siteupdatecomment',
sa.Column('commentid', sa.Integer(), nullable=False),
sa.Column('userid', sa.Integer(), nullable=False),
sa.Column('targetid', sa.Integer(), nullable=False),
sa.Column('parentid', sa.Integer(), nullable=True),
sa.Column('content', sa.String(length=10000), nullable=False),
sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text(u'now()'), nullable=False),
sa.Column('hidden_at', postgresql.TIMESTAMP(timezone=True), nullable=True),
sa.Column('hidden_by', sa.Integer(), nullable=True),
sa.CheckConstraint(u'hidden_by IS NULL OR hidden_at IS NOT NULL', name='siteupdatecomment_hidden_check'),
sa.ForeignKeyConstraint(['hidden_by'], ['login.userid'], name='siteupdatecomment_hidden_by_fkey', ondelete='SET NULL'),
sa.ForeignKeyConstraint(['targetid', 'parentid'], ['siteupdatecomment.targetid', 'siteupdatecomment.commentid'], name='siteupdatecomment_parentid_fkey'),
sa.ForeignKeyConstraint(['targetid'], ['siteupdate.updateid'], name='siteupdatecomment_targetid_fkey'),
sa.ForeignKeyConstraint(['userid'], ['login.userid'], name='siteupdatecomment_userid_fkey'),
sa.PrimaryKeyConstraint('commentid'),
sa.UniqueConstraint('targetid', 'commentid')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('siteupdatecomment')
# ### end Alembic commands ###
| {
"repo_name": "Weasyl/weasyl",
"path": "libweasyl/libweasyl/alembic/versions/9270baf773a5_add_comments_to_site_updates.py",
"copies": "1",
"size": "1926",
"license": "apache-2.0",
"hash": 3403893047684357600,
"line_mean": 44.8571428571,
"line_max": 157,
"alpha_frac": 0.712357217,
"autogenerated": false,
"ratio": 3.5210237659963437,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.47333809829963436,
"avg_score": null,
"num_lines": null
} |
"""add_committee_meeting_attendance_table
Revision ID: 3daa1030c816
Revises: 5647a4255cdc
Create Date: 2015-07-20 13:41:37.293770
"""
# revision identifiers, used by Alembic.
revision = '3daa1030c816'
down_revision = '5647a4255cdc'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('committee_meeting_attendance',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('alternate_member', sa.Boolean(), nullable=True),
sa.Column('chairperson', sa.Boolean(), nullable=False),
sa.Column('meeting_id', sa.Integer(), nullable=False),
sa.Column('member_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['meeting_id'], ['event.id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(['member_id'], ['member.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.add_column('event', sa.Column('actual_end_time', sa.Time(timezone=True), nullable=True))
op.add_column('event', sa.Column('actual_start_time', sa.Time(timezone=True), nullable=True))
op.add_column('event', sa.Column('pmg_monitor', sa.String(length=255), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('event', 'pmg_monitor')
op.drop_column('event', 'actual_start_time')
op.drop_column('event', 'actual_end_time')
op.drop_table('committee_meeting_attendance')
### end Alembic commands ###
| {
"repo_name": "Code4SA/pmg-cms-2",
"path": "migrations/versions/3daa1030c816_add_committee_meeting_attendance_table.py",
"copies": "1",
"size": "1569",
"license": "apache-2.0",
"hash": -6941836718736297000,
"line_mean": 34.6590909091,
"line_max": 97,
"alpha_frac": 0.6876991714,
"autogenerated": false,
"ratio": 3.359743040685225,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4547442212085225,
"avg_score": null,
"num_lines": null
} |
"""add communication methods
Revision ID: 445e50628f6b
Revises: c7f05adcf7d9
Create Date: 2018-04-09 18:14:44.367390
"""
# revision identifiers, used by Alembic.
revision = "445e50628f6b"
down_revision = "22b97712d5db"
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table(
"communication_methods",
sa.Column("response_id", sa.Integer(), nullable=False),
sa.Column("method_id", sa.Integer(), nullable=False),
sa.Column(
"method_type",
sa.Enum("letters", "emails", name="communication_method_type"),
nullable=True,
),
sa.ForeignKeyConstraint(["method_id"], ["responses.id"]),
sa.ForeignKeyConstraint(["response_id"], ["responses.id"]),
sa.PrimaryKeyConstraint("response_id", "method_id"),
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table("communication_methods")
### end Alembic commands ###
| {
"repo_name": "CityOfNewYork/NYCOpenRecords",
"path": "migrations/versions/445e50628f6b_add_communication_methods.py",
"copies": "1",
"size": "1084",
"license": "apache-2.0",
"hash": 7857095906998746000,
"line_mean": 27.5263157895,
"line_max": 75,
"alpha_frac": 0.639298893,
"autogenerated": false,
"ratio": 3.712328767123288,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.48516276601232877,
"avg_score": null,
"num_lines": null
} |
"""Add complectation models
Revision ID: a24972a141ba
Revises: 4b22022a86f5
Create Date: 2016-11-10 14:25:08.823283
"""
# revision identifiers, used by Alembic.
revision = 'a24972a141ba'
down_revision = '4b22022a86f5'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('auto_bodytype',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name')
)
op.create_table('auto_drivetype',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name')
)
op.create_table('auto_energysource',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name')
)
op.create_table('auto_enginecylindersposition',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name')
)
op.create_table('auto_engineposition',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name')
)
op.create_table('auto_gearboxtype',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name')
)
op.create_table('auto_steeramplifier',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name')
)
op.create_table('auto_originbodytype',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.Column('origin', sa.String(), nullable=False),
sa.Column('real_instance', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['real_instance'], ['auto_bodytype.id'], ),
sa.PrimaryKeyConstraint('id', 'origin'),
sa.UniqueConstraint('name', 'origin')
)
op.create_index(op.f('ix_auto_originbodytype_origin'), 'auto_originbodytype', ['origin'], unique=False)
op.create_table('auto_origindrivetype',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.Column('origin', sa.String(), nullable=False),
sa.Column('real_instance', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['real_instance'], ['auto_drivetype.id'], ),
sa.PrimaryKeyConstraint('id', 'origin'),
sa.UniqueConstraint('name', 'origin')
)
op.create_index(op.f('ix_auto_origindrivetype_origin'), 'auto_origindrivetype', ['origin'], unique=False)
op.create_table('auto_originenergysource',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.Column('origin', sa.String(), nullable=False),
sa.Column('real_instance', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['real_instance'], ['auto_energysource.id'], ),
sa.PrimaryKeyConstraint('id', 'origin'),
sa.UniqueConstraint('name', 'origin')
)
op.create_index(op.f('ix_auto_originenergysource_origin'), 'auto_originenergysource', ['origin'], unique=False)
op.create_table('auto_originenginecylindersposition',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.Column('origin', sa.String(), nullable=False),
sa.Column('real_instance', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['real_instance'], ['auto_enginecylindersposition.id'], ),
sa.PrimaryKeyConstraint('id', 'origin'),
sa.UniqueConstraint('name', 'origin')
)
op.create_index(op.f('ix_auto_originenginecylindersposition_origin'), 'auto_originenginecylindersposition', ['origin'], unique=False)
op.create_table('auto_originengineposition',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.Column('origin', sa.String(), nullable=False),
sa.Column('real_instance', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['real_instance'], ['auto_engineposition.id'], ),
sa.PrimaryKeyConstraint('id', 'origin'),
sa.UniqueConstraint('name', 'origin')
)
op.create_index(op.f('ix_auto_originengineposition_origin'), 'auto_originengineposition', ['origin'], unique=False)
op.create_table('auto_origingearboxtype',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.Column('origin', sa.String(), nullable=False),
sa.Column('real_instance', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['real_instance'], ['auto_gearboxtype.id'], ),
sa.PrimaryKeyConstraint('id', 'origin'),
sa.UniqueConstraint('name', 'origin')
)
op.create_index(op.f('ix_auto_origingearboxtype_origin'), 'auto_origingearboxtype', ['origin'], unique=False)
op.create_table('auto_originsteeramplifier',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.Column('origin', sa.String(), nullable=False),
sa.Column('real_instance', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['real_instance'], ['auto_steeramplifier.id'], ),
sa.PrimaryKeyConstraint('id', 'origin'),
sa.UniqueConstraint('name', 'origin')
)
op.create_index(op.f('ix_auto_originsteeramplifier_origin'), 'auto_originsteeramplifier', ['origin'], unique=False)
op.create_table('auto_body',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('complectation_id', sa.Integer(), nullable=False),
sa.Column('body_type_id', sa.Integer(), nullable=True),
sa.Column('doors', sa.Integer(), nullable=True),
sa.Column('seats', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['body_type_id'], ['auto_bodytype.id'], ),
sa.ForeignKeyConstraint(['complectation_id'], ['auto_complectation.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('auto_dimensions',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('complectation_id', sa.Integer(), nullable=False),
sa.Column('length', sa.Integer(), nullable=True),
sa.Column('width', sa.Integer(), nullable=True),
sa.Column('height', sa.Integer(), nullable=True),
sa.Column('clearance', sa.Integer(), nullable=True),
sa.Column('curb_weight', sa.Integer(), nullable=True),
sa.Column('max_allowed_weight', sa.Integer(), nullable=True),
sa.Column('trunk_volume', sa.Integer(), nullable=True),
sa.Column('fuel_tank_volume', sa.Integer(), nullable=True),
sa.Column('wheel_base', sa.Integer(), nullable=True),
sa.Column('bearing_capacity', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['complectation_id'], ['auto_complectation.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('auto_dynamiccharacteristics',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('complectation_id', sa.Integer(), nullable=False),
sa.Column('max_velocity', sa.Integer(), nullable=True),
sa.Column('acceleration_time_to_100', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['complectation_id'], ['auto_complectation.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('auto_engine',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('complectation_id', sa.Integer(), nullable=False),
sa.Column('position_id', sa.Integer(), nullable=True),
sa.Column('energy_source_id', sa.Integer(), nullable=True),
sa.Column('volume', sa.Integer(), nullable=True),
sa.Column('cylinders', sa.Integer(), nullable=True),
sa.Column('cylinders_position_id', sa.Integer(), nullable=True),
sa.Column('valves_count', sa.Integer(), nullable=True),
sa.Column('co2_emission', sa.Integer(), nullable=True),
sa.Column('euro_toxicity_norms', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['complectation_id'], ['auto_complectation.id'], ),
sa.ForeignKeyConstraint(['cylinders_position_id'], ['auto_enginecylindersposition.id'], ),
sa.ForeignKeyConstraint(['energy_source_id'], ['auto_energysource.id'], ),
sa.ForeignKeyConstraint(['position_id'], ['auto_engineposition.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('auto_steering',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('complectation_id', sa.Integer(), nullable=False),
sa.Column('amplifier_id', sa.Integer(), nullable=True),
sa.Column('spread_diameter', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['amplifier_id'], ['auto_steeramplifier.id'], ),
sa.ForeignKeyConstraint(['complectation_id'], ['auto_complectation.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('auto_transmission',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('complectation_id', sa.Integer(), nullable=False),
sa.Column('gearbox_type_id', sa.Integer(), nullable=True),
sa.Column('gears_count', sa.Integer(), nullable=True),
sa.Column('drive_type_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['complectation_id'], ['auto_complectation.id'], ),
sa.ForeignKeyConstraint(['drive_type_id'], ['auto_drivetype.id'], ),
sa.ForeignKeyConstraint(['gearbox_type_id'], ['auto_gearboxtype.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('auto_enginefuelrate',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('mixed', sa.Integer(), nullable=False),
sa.Column('urban', sa.Integer(), nullable=True),
sa.Column('extra_urban', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['id'], ['auto_engine.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('auto_enginepower',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('horses', sa.Integer(), nullable=True),
sa.Column('rotations_start', sa.Integer(), nullable=True),
sa.Column('rotations_end', sa.Integer(), nullable=True),
sa.Column('max_torque', sa.Integer(), nullable=True),
sa.Column('max_torque_rotations_start', sa.Integer(), nullable=True),
sa.Column('max_torque_rotations_end', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['id'], ['auto_engine.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('auto_originbody',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('complectation_id', sa.Integer(), nullable=False),
sa.Column('body_type_id', sa.Integer(), nullable=True),
sa.Column('doors', sa.Integer(), nullable=True),
sa.Column('seats', sa.Integer(), nullable=True),
sa.Column('origin', sa.String(), nullable=False),
sa.Column('real_instance', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['body_type_id', 'origin'], ['auto_originbodytype.id', 'auto_originbodytype.origin'], ),
sa.ForeignKeyConstraint(['complectation_id', 'origin'], ['auto_origincomplectation.id', 'auto_origincomplectation.origin'], ),
sa.ForeignKeyConstraint(['real_instance'], ['auto_body.id'], ),
sa.PrimaryKeyConstraint('id', 'origin')
)
op.create_index(op.f('ix_auto_originbody_origin'), 'auto_originbody', ['origin'], unique=False)
op.create_table('auto_origindimensions',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('complectation_id', sa.Integer(), nullable=False),
sa.Column('length', sa.Integer(), nullable=True),
sa.Column('width', sa.Integer(), nullable=True),
sa.Column('height', sa.Integer(), nullable=True),
sa.Column('clearance', sa.Integer(), nullable=True),
sa.Column('curb_weight', sa.Integer(), nullable=True),
sa.Column('max_allowed_weight', sa.Integer(), nullable=True),
sa.Column('trunk_volume', sa.Integer(), nullable=True),
sa.Column('fuel_tank_volume', sa.Integer(), nullable=True),
sa.Column('wheel_base', sa.Integer(), nullable=True),
sa.Column('bearing_capacity', sa.Integer(), nullable=True),
sa.Column('origin', sa.String(), nullable=False),
sa.Column('real_instance', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['complectation_id', 'origin'], ['auto_origincomplectation.id', 'auto_origincomplectation.origin'], ),
sa.ForeignKeyConstraint(['real_instance'], ['auto_dimensions.id'], ),
sa.PrimaryKeyConstraint('id', 'origin')
)
op.create_index(op.f('ix_auto_origindimensions_origin'), 'auto_origindimensions', ['origin'], unique=False)
op.create_table('auto_origindynamiccharacteristics',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('complectation_id', sa.Integer(), nullable=False),
sa.Column('max_velocity', sa.Integer(), nullable=True),
sa.Column('acceleration_time_to_100', sa.Integer(), nullable=True),
sa.Column('origin', sa.String(), nullable=False),
sa.Column('real_instance', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['complectation_id', 'origin'], ['auto_origincomplectation.id', 'auto_origincomplectation.origin'], ),
sa.ForeignKeyConstraint(['real_instance'], ['auto_dynamiccharacteristics.id'], ),
sa.PrimaryKeyConstraint('id', 'origin')
)
op.create_index(op.f('ix_auto_origindynamiccharacteristics_origin'), 'auto_origindynamiccharacteristics', ['origin'], unique=False)
op.create_table('auto_originengine',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('complectation_id', sa.Integer(), nullable=False),
sa.Column('position_id', sa.Integer(), nullable=True),
sa.Column('energy_source_id', sa.Integer(), nullable=True),
sa.Column('volume', sa.Integer(), nullable=True),
sa.Column('cylinders', sa.Integer(), nullable=True),
sa.Column('cylinders_position_id', sa.Integer(), nullable=True),
sa.Column('valves_count', sa.Integer(), nullable=True),
sa.Column('co2_emission', sa.Integer(), nullable=True),
sa.Column('euro_toxicity_norms', sa.Integer(), nullable=True),
sa.Column('origin', sa.String(), nullable=False),
sa.Column('real_instance', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['complectation_id', 'origin'], ['auto_origincomplectation.id', 'auto_origincomplectation.origin'], ),
sa.ForeignKeyConstraint(['cylinders_position_id', 'origin'], ['auto_originenginecylindersposition.id', 'auto_originenginecylindersposition.origin'], ),
sa.ForeignKeyConstraint(['energy_source_id', 'origin'], ['auto_originenergysource.id', 'auto_originenergysource.origin'], ),
sa.ForeignKeyConstraint(['position_id', 'origin'], ['auto_originengineposition.id', 'auto_originengineposition.origin'], ),
sa.ForeignKeyConstraint(['real_instance'], ['auto_engine.id'], ),
sa.PrimaryKeyConstraint('id', 'origin')
)
op.create_index(op.f('ix_auto_originengine_origin'), 'auto_originengine', ['origin'], unique=False)
op.create_table('auto_originsteering',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('complectation_id', sa.Integer(), nullable=False),
sa.Column('amplifier_id', sa.Integer(), nullable=True),
sa.Column('spread_diameter', sa.Integer(), nullable=True),
sa.Column('origin', sa.String(), nullable=False),
sa.Column('real_instance', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['amplifier_id', 'origin'], ['auto_originsteeramplifier.id', 'auto_originsteeramplifier.origin'], ),
sa.ForeignKeyConstraint(['complectation_id', 'origin'], ['auto_origincomplectation.id', 'auto_origincomplectation.origin'], ),
sa.ForeignKeyConstraint(['real_instance'], ['auto_steering.id'], ),
sa.PrimaryKeyConstraint('id', 'origin')
)
op.create_index(op.f('ix_auto_originsteering_origin'), 'auto_originsteering', ['origin'], unique=False)
op.create_table('auto_origintransmission',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('complectation_id', sa.Integer(), nullable=False),
sa.Column('gearbox_type_id', sa.Integer(), nullable=True),
sa.Column('gears_count', sa.Integer(), nullable=True),
sa.Column('drive_type_id', sa.Integer(), nullable=True),
sa.Column('origin', sa.String(), nullable=False),
sa.Column('real_instance', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['complectation_id', 'origin'], ['auto_origincomplectation.id', 'auto_origincomplectation.origin'], ),
sa.ForeignKeyConstraint(['drive_type_id', 'origin'], ['auto_origindrivetype.id', 'auto_origindrivetype.origin'], ),
sa.ForeignKeyConstraint(['gearbox_type_id', 'origin'], ['auto_origingearboxtype.id', 'auto_origingearboxtype.origin'], ),
sa.ForeignKeyConstraint(['real_instance'], ['auto_transmission.id'], ),
sa.PrimaryKeyConstraint('id', 'origin')
)
op.create_index(op.f('ix_auto_origintransmission_origin'), 'auto_origintransmission', ['origin'], unique=False)
op.create_table('auto_originenginefuelrate',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('mixed', sa.Integer(), nullable=False),
sa.Column('urban', sa.Integer(), nullable=True),
sa.Column('extra_urban', sa.Integer(), nullable=True),
sa.Column('origin', sa.String(), nullable=False),
sa.Column('real_instance', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['id', 'origin'], ['auto_originengine.id', 'auto_originengine.origin'], ),
sa.ForeignKeyConstraint(['real_instance'], ['auto_enginefuelrate.id'], ),
sa.PrimaryKeyConstraint('id', 'origin')
)
op.create_index(op.f('ix_auto_originenginefuelrate_origin'), 'auto_originenginefuelrate', ['origin'], unique=False)
op.create_table('auto_originenginepower',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('horses', sa.Integer(), nullable=True),
sa.Column('rotations_start', sa.Integer(), nullable=True),
sa.Column('rotations_end', sa.Integer(), nullable=True),
sa.Column('max_torque', sa.Integer(), nullable=True),
sa.Column('max_torque_rotations_start', sa.Integer(), nullable=True),
sa.Column('max_torque_rotations_end', sa.Integer(), nullable=True),
sa.Column('origin', sa.String(), nullable=False),
sa.Column('real_instance', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['id', 'origin'], ['auto_originengine.id', 'auto_originengine.origin'], ),
sa.ForeignKeyConstraint(['real_instance'], ['auto_enginepower.id'], ),
sa.PrimaryKeyConstraint('id', 'origin')
)
op.create_index(op.f('ix_auto_originenginepower_origin'), 'auto_originenginepower', ['origin'], unique=False)
op.drop_column('auto_complectation', 'updated_at')
op.drop_column('auto_complectation', 'created_at')
op.drop_column('auto_origincomplectation', 'updated_at')
op.drop_column('auto_origincomplectation', 'created_at')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('auto_origincomplectation', sa.Column('created_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True))
op.add_column('auto_origincomplectation', sa.Column('updated_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True))
op.add_column('auto_complectation', sa.Column('created_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True))
op.add_column('auto_complectation', sa.Column('updated_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True))
op.drop_index(op.f('ix_auto_originenginepower_origin'), table_name='auto_originenginepower')
op.drop_table('auto_originenginepower')
op.drop_index(op.f('ix_auto_originenginefuelrate_origin'), table_name='auto_originenginefuelrate')
op.drop_table('auto_originenginefuelrate')
op.drop_index(op.f('ix_auto_origintransmission_origin'), table_name='auto_origintransmission')
op.drop_table('auto_origintransmission')
op.drop_index(op.f('ix_auto_originsteering_origin'), table_name='auto_originsteering')
op.drop_table('auto_originsteering')
op.drop_index(op.f('ix_auto_originengine_origin'), table_name='auto_originengine')
op.drop_table('auto_originengine')
op.drop_index(op.f('ix_auto_origindynamiccharacteristics_origin'), table_name='auto_origindynamiccharacteristics')
op.drop_table('auto_origindynamiccharacteristics')
op.drop_index(op.f('ix_auto_origindimensions_origin'), table_name='auto_origindimensions')
op.drop_table('auto_origindimensions')
op.drop_index(op.f('ix_auto_originbody_origin'), table_name='auto_originbody')
op.drop_table('auto_originbody')
op.drop_table('auto_enginepower')
op.drop_table('auto_enginefuelrate')
op.drop_table('auto_transmission')
op.drop_table('auto_steering')
op.drop_table('auto_engine')
op.drop_table('auto_dynamiccharacteristics')
op.drop_table('auto_dimensions')
op.drop_table('auto_body')
op.drop_index(op.f('ix_auto_originsteeramplifier_origin'), table_name='auto_originsteeramplifier')
op.drop_table('auto_originsteeramplifier')
op.drop_index(op.f('ix_auto_origingearboxtype_origin'), table_name='auto_origingearboxtype')
op.drop_table('auto_origingearboxtype')
op.drop_index(op.f('ix_auto_originengineposition_origin'), table_name='auto_originengineposition')
op.drop_table('auto_originengineposition')
op.drop_index(op.f('ix_auto_originenginecylindersposition_origin'), table_name='auto_originenginecylindersposition')
op.drop_table('auto_originenginecylindersposition')
op.drop_index(op.f('ix_auto_originenergysource_origin'), table_name='auto_originenergysource')
op.drop_table('auto_originenergysource')
op.drop_index(op.f('ix_auto_origindrivetype_origin'), table_name='auto_origindrivetype')
op.drop_table('auto_origindrivetype')
op.drop_index(op.f('ix_auto_originbodytype_origin'), table_name='auto_originbodytype')
op.drop_table('auto_originbodytype')
op.drop_table('auto_steeramplifier')
op.drop_table('auto_gearboxtype')
op.drop_table('auto_engineposition')
op.drop_table('auto_enginecylindersposition')
op.drop_table('auto_energysource')
op.drop_table('auto_drivetype')
op.drop_table('auto_bodytype')
### end Alembic commands ###
| {
"repo_name": "beslave/auto-collector",
"path": "migrations/versions/a24972a141ba_add_complectation_models.py",
"copies": "1",
"size": "22401",
"license": "mit",
"hash": 6149137853297273000,
"line_mean": 54.5856079404,
"line_max": 155,
"alpha_frac": 0.6872907459,
"autogenerated": false,
"ratio": 3.4142661179698215,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9536868854295169,
"avg_score": 0.0129376019149307,
"num_lines": 403
} |
"""add_completed_failed_jobstate
Revision ID: b3290c1bf67a
Revises: fe30109949aa
Create Date: 2021-06-15 01:41:14.660466
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'b3290c1bf67a'
down_revision = 'fe30109949aa'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
bind = op.get_bind()
version = bind.execute('select version()')
# 'drop check' is invalid if mysql version is less than 8
if version is not None and version.fetchall()[0][0] > '8.0.0':
op.execute('ALTER TABLE job_v2 drop check jobstate')
op.alter_column('job_v2', 'state', nullable=False, comment='state', type_=sa.Enum('INVALID', 'STOPPED', 'WAITING', 'STARTED', 'COMPLETED', 'FAILED', name='jobstate', native_enum=False))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
bind = op.get_bind()
version = bind.execute('select version()')
# 'drop check' is invalid if mysql version is less than 8
if version is not None and version.fetchall()[0][0] > '8.0.0':
op.execute('ALTER TABLE job_v2 drop check jobstate')
op.alter_column('job_v2', 'state', nullable=False, comment='state', type_=sa.Enum('INVALID', 'STOPPED', 'WAITING', 'STARTED', name='jobstate', native_enum=False))
# ### end Alembic commands ###
| {
"repo_name": "bytedance/fedlearner",
"path": "web_console_v2/api/migrations/versions/b3290c1bf67a_add_completed_failed_jobstate.py",
"copies": "1",
"size": "1432",
"license": "apache-2.0",
"hash": -2280304786468540700,
"line_mean": 36.6842105263,
"line_max": 189,
"alpha_frac": 0.6717877095,
"autogenerated": false,
"ratio": 3.3536299765807964,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.952393726741674,
"avg_score": 0.00029608373281138557,
"num_lines": 38
} |
"""Add completed field to requests_log
Revision ID: 30000d39475f
Revises: 39e65f2c8112
Create Date: 2020-01-23 08:28:14.110807
"""
from alembic import op
import sqlalchemy as sa
import model.utils
from rdr_service.participant_enums import PhysicalMeasurementsStatus, QuestionnaireStatus, OrderStatus
from rdr_service.participant_enums import WithdrawalStatus, WithdrawalReason, SuspensionStatus, QuestionnaireDefinitionStatus
from rdr_service.participant_enums import EnrollmentStatus, Race, SampleStatus, OrganizationType, BiobankOrderStatus
from rdr_service.participant_enums import OrderShipmentTrackingStatus, OrderShipmentStatus
from rdr_service.participant_enums import MetricSetType, MetricsKey, GenderIdentity
from rdr_service.model.base import add_table_history_table, drop_table_history_table
from rdr_service.model.code import CodeType
from rdr_service.model.site_enums import SiteStatus, EnrollingStatus, DigitalSchedulingStatus, ObsoleteStatus
# revision identifiers, used by Alembic.
revision = '30000d39475f'
down_revision = '39e65f2c8112'
branch_labels = None
depends_on = None
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('requests_log', sa.Column('complete', sa.Boolean(), nullable=True, default=0))
op.execute('update requests_log set complete = 1')
# ### end Alembic commands ###
def downgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('requests_log', 'complete')
# ### end Alembic commands ###
def upgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
| {
"repo_name": "all-of-us/raw-data-repository",
"path": "rdr_service/alembic/versions/30000d39475f_add_completed_field_to_requests_log.py",
"copies": "1",
"size": "1966",
"license": "bsd-3-clause",
"hash": 3408815629133025300,
"line_mean": 31.2295081967,
"line_max": 125,
"alpha_frac": 0.7466937945,
"autogenerated": false,
"ratio": 3.667910447761194,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9821745020989676,
"avg_score": 0.018571844254303492,
"num_lines": 61
} |
"""Add confessions tables and 1689
Revision ID: 69e765223549
Revises: e7aff6447fbe
Create Date: 2017-10-10 20:13:10.688722
"""
from alembic import op
import sqlalchemy as sa
from pathlib import Path
from json import load
from collections import OrderedDict
# revision identifiers, used by Alembic.
revision = '69e765223549'
down_revision = 'e7aff6447fbe'
branch_labels = None
depends_on = None
with (Path(__file__).resolve().parent / f'{revision}_1689.json').open() as f:
lbcf_data = load(f, object_pairs_hook=lambda x: OrderedDict(x))
def upgrade():
confessions = op.create_table('confessions',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('command', sa.String, unique=True),
sa.Column('name', sa.String))
confession_chapters = op.create_table('confession_chapters',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('confession_id', sa.Integer,
sa.ForeignKey('confessions.id')),
sa.Column('chapter_number', sa.Integer),
sa.Column('title', sa.String))
confession_paragraphs = op.create_table('confession_paragraphs',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('confession_id', sa.Integer,
sa.ForeignKey('confessions.id')),
sa.Column('chapter_number', sa.Integer),
sa.Column('paragraph_number', sa.Integer),
sa.Column('text', sa.Text))
conn = op.get_bind()
result = conn.execute(confessions.insert(),
dict(command='1689', name='The 1689 London Baptist Confession of Faith'))
confession_id = result.inserted_primary_key[0]
for chapter_str, chapter in lbcf_data['chapters'].items():
chapter_number = int(chapter_str)
conn.execute(confession_chapters.insert(),
dict(confession_id=confession_id, chapter_number=chapter_number,
title=chapter['title']))
conn.execute(confession_paragraphs.insert(), *[
dict(confession_id=confession_id, chapter_number=chapter_number,
paragraph_number=int(paragraph_str), text=text) for paragraph_str, text in
chapter['paragraphs'].items()
])
def downgrade():
op.drop_table('confession_paragraphs')
op.drop_table('confession_chapters')
op.drop_table('confessions')
| {
"repo_name": "bryanforbes/Erasmus",
"path": "alembic/versions/69e765223549_add_confessions_tables_and_1689.py",
"copies": "1",
"size": "2825",
"license": "bsd-3-clause",
"hash": -7814788955319518000,
"line_mean": 40.5441176471,
"line_max": 99,
"alpha_frac": 0.5408849558,
"autogenerated": false,
"ratio": 4.286798179059181,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0022023629985659264,
"num_lines": 68
} |
"""Add 'confess*' tables
Revision ID: 0f780af615ef
Revises: ac322afa9c4d
Create Date: 2017-10-14 14:23:35.966456
"""
from alembic import op # type: ignore
import sqlalchemy as sa # type: ignore
from typing import Tuple, List, Dict, Any # noqa
from mypy_extensions import TypedDict
from pathlib import Path
from json import load
from collections import OrderedDict
# revision identifiers, used by Alembic.
revision = '0f780af615ef'
down_revision = 'ac322afa9c4d'
branch_labels = None
depends_on = None
class ChapterJSON(TypedDict):
title: str
paragraphs: Dict[str, str]
class ConfessionJSON(TypedDict):
title: str
chapters: Dict[str, ChapterJSON]
class QAJSON(TypedDict):
title: str
questions: List[List[str]]
def _get_paragraph_records(id: str, data: ConfessionJSON) -> Tuple[List[Dict[str, Any]], List[Dict[str, Any]]]:
chapters = [] # type: List[Dict[str, Any]]
paragraphs = [] # type: List[Dict[str, Any]]
for chapter_str, chapter in data['chapters'].items():
chapter_number = int(chapter_str)
chapters.append({'confess_id': id, 'chapter_number': chapter_number,
'chapter_title': chapter['title']})
paragraphs += [
{'confess_id': id, 'chapter_number': chapter_number,
'paragraph_number': int(paragraph_str), 'text': text}
for paragraph_str, text in chapter['paragraphs'].items()
]
return (chapters, paragraphs)
def _get_qa_records(id: str, data: QAJSON) -> List[Dict[str, Any]]:
qas = [] # type: List[Dict[str, Any]]
for index in range(len(data['questions'])):
question, answer = data['questions'][index]
qas.append({'confess_id': id, 'question_number': index + 1,
'question_text': question, 'answer_text': answer})
return qas
def upgrade():
types = op.create_table('confess_types',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('value', sa.String(20), unique=True, nullable=False))
confesses = op.create_table('confesses',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('command', sa.String, unique=True, nullable=False),
sa.Column('name', sa.String, nullable=False),
sa.Column('type_id', sa.Integer,
sa.ForeignKey('confess_types.id'), nullable=False))
chapters = op.create_table('confess_chapters',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('confess_id', sa.Integer,
sa.ForeignKey('confesses.id'), nullable=False),
sa.Column('chapter_number', sa.Integer, nullable=False),
sa.Column('chapter_title', sa.String, nullable=False))
paragraphs = op.create_table('confess_paragraphs',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('confess_id', sa.Integer,
sa.ForeignKey('confesses.id'), nullable=False),
sa.Column('chapter_number', sa.Integer),
sa.Column('paragraph_number', sa.Integer, nullable=False),
sa.Column('text', sa.Text, nullable=False))
op.create_index('confess_paragraphs_text_idx', 'confess_paragraphs',
[sa.text("to_tsvector('english', text)")],
postgresql_using='gin')
qas = op.create_table('confess_qas',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('confess_id', sa.Integer,
sa.ForeignKey('confesses.id'), nullable=False),
sa.Column('question_number', sa.Integer, nullable=False),
sa.Column('question_text', sa.Text, nullable=False),
sa.Column('answer_text', sa.Text, nullable=False))
op.create_index('confess_qas_text_idx', 'confess_qas',
[sa.text("to_tsvector('english', question_text || ' ' || answer_text)")],
postgresql_using='gin')
op.bulk_insert(types, [
{'id': 1, 'value': 'ARTICLES'},
{'id': 2, 'value': 'CHAPTERS'},
{'id': 3, 'value': 'QA'}
])
op.bulk_insert(confesses, [
{'id': 1, 'command': '1689', 'type_id': 2,
'name': 'The 1689 London Baptist Confession of Faith'},
{'id': 2, 'command': 'wcf', 'type_id': 2,
'name': 'The Westminster Confession of Faith'},
{'id': 3, 'command': 'wsc', 'type_id': 3,
'name': 'The Wesminster Shorter Catechism'},
{'id': 4, 'command': 'wlc', 'type_id': 3,
'name': 'The Wesminster Longer Catechism'},
{'id': 5, 'command': 'hc', 'type_id': 3,
'name': 'The Heidelberg Catechism'},
])
with (Path(__file__).resolve().parent / '69e765223549_1689.json').open() as f:
lbcf_data = load(f, object_pairs_hook=lambda x: OrderedDict(x))
with (Path(__file__).resolve().parent / 'a1dbd23261c3_wcf.json').open() as f:
wcf_data = load(f, object_pairs_hook=lambda x: OrderedDict(x))
with (Path(__file__).resolve().parent / f'{revision}_wsc.json').open() as f:
wsc_data = load(f, object_pairs_hook=lambda x: OrderedDict(x))
with (Path(__file__).resolve().parent / f'{revision}_wlc.json').open() as f:
wlc_data = load(f, object_pairs_hook=lambda x: OrderedDict(x))
with (Path(__file__).resolve().parent / f'{revision}_hc.json').open() as f:
hc_data = load(f, object_pairs_hook=lambda x: OrderedDict(x))
lbcf_chapters, lbcf_paragraphs = _get_paragraph_records(1, lbcf_data)
wcf_chapters, wcf_paragraphs = _get_paragraph_records(2, wcf_data)
op.bulk_insert(chapters, lbcf_chapters + wcf_chapters)
op.bulk_insert(paragraphs, lbcf_paragraphs + wcf_paragraphs)
op.bulk_insert(qas, _get_qa_records(3, wsc_data))
op.bulk_insert(qas, _get_qa_records(4, wlc_data))
op.bulk_insert(qas, _get_qa_records(5, hc_data))
def downgrade():
op.drop_table('confess_qas')
op.drop_table('confess_paragraphs')
op.drop_table('confess_chapters')
op.drop_table('confesses')
op.drop_table('confess_types')
| {
"repo_name": "bryanforbes/Erasmus",
"path": "alembic/versions/0f780af615ef_add_confess_tables.py",
"copies": "1",
"size": "6510",
"license": "bsd-3-clause",
"hash": -51497230431483520,
"line_mean": 39.6875,
"line_max": 111,
"alpha_frac": 0.5634408602,
"autogenerated": false,
"ratio": 3.4962406015037595,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.45596814617037595,
"avg_score": null,
"num_lines": null
} |
"""Add config on relations between federated schedulers and experiments
Revision ID: 35756029a48a
Revises: 4b8e6be32ada
Create Date: 2013-12-30 14:39:42.765444
"""
# revision identifiers, used by Alembic.
revision = '35756029a48a'
down_revision = '4b8e6be32ada'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('SchedulerExternalExperimentEntry',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('experiment_id', sa.Integer(), nullable=False),
sa.Column('scheduler_id', sa.Integer(), nullable=False),
sa.Column('config', sa.String(length=1024)),
sa.ForeignKeyConstraint(['experiment_id'], ['Experiment.id'], ),
sa.ForeignKeyConstraint(['scheduler_id'], ['Scheduler.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('experiment_id','scheduler_id'),
mysql_engine='InnoDB'
)
op.drop_table(u'ExperimentExternalScheduler')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table(u'ExperimentExternalScheduler',
sa.Column(u'experiment_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=False),
sa.Column(u'scheduler_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=False),
sa.ForeignKeyConstraint(['experiment_id'], [u'Experiment.id'], name=u'ExperimentExternalScheduler_ibfk_1'),
sa.ForeignKeyConstraint(['scheduler_id'], [u'Scheduler.id'], name=u'ExperimentExternalScheduler_ibfk_2'),
sa.PrimaryKeyConstraint(u'experiment_id', u'scheduler_id'),
mysql_default_charset=u'latin1',
mysql_engine=u'InnoDB'
)
op.drop_table('SchedulerExternalExperimentEntry')
### end Alembic commands ###
| {
"repo_name": "zstars/weblabdeusto",
"path": "server/src/weblab/db/upgrade/regular/versions/35756029a48a_add_config_on_relati.py",
"copies": "1",
"size": "1836",
"license": "bsd-2-clause",
"hash": -5125062208030766000,
"line_mean": 38.9130434783,
"line_max": 111,
"alpha_frac": 0.7189542484,
"autogenerated": false,
"ratio": 3.6141732283464565,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.48331274767464566,
"avg_score": null,
"num_lines": null
} |
"""Add connected objects to revisions
Revision ID: 4e989ef86619
Revises: 262bbe790f4c
Create Date: 2016-01-28 11:15:12.300329
"""
# pylint: disable=invalid-name
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '4e989ef86619' # pylint: disable=invalid-name
down_revision = '37b2a060bdd6' # pylint: disable=invalid-name
def upgrade():
""" Add extra fields and indexes to revisions table """
op.add_column('revisions', sa.Column('source_type', sa.String(length=250)))
op.add_column('revisions', sa.Column('source_id', sa.Integer()))
op.add_column('revisions',
sa.Column('destination_type', sa.String(length=250)))
op.add_column('revisions', sa.Column('destination_id', sa.Integer()))
op.create_index('fk_revisions_resource', 'revisions',
['resource_type', 'resource_id'], unique=False)
op.create_index('fk_revisions_source', 'revisions',
['source_type', 'source_id'], unique=False)
op.create_index('fk_revisions_destination', 'revisions',
['destination_type', 'destination_id'], unique=False)
def downgrade():
""" Remove indexes and fields from revisions """
op.drop_index('fk_revisions_resource', table_name='revisions')
op.drop_index('fk_revisions_source', table_name='revisions')
op.drop_index('fk_revisions_destination', table_name='revisions')
op.drop_column('revisions', 'source_type')
op.drop_column('revisions', 'source_id')
op.drop_column('revisions', 'destination_type')
op.drop_column('revisions', 'destination_id')
| {
"repo_name": "jmakov/ggrc-core",
"path": "src/ggrc/migrations/versions/20160128111512_4e989ef86619_add_connected_objects_to_revisions.py",
"copies": "3",
"size": "1591",
"license": "apache-2.0",
"hash": -6109603267211870000,
"line_mean": 33.5869565217,
"line_max": 77,
"alpha_frac": 0.6882463859,
"autogenerated": false,
"ratio": 3.4967032967032967,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5684949682603296,
"avg_score": null,
"num_lines": null
} |
"""Add connected objects to revisions
Revision ID: 4e989ef86619
Revises: 37b2a060bdd6
Create Date: 2016-01-28 11:15:12.300329
"""
# pylint: disable=invalid-name
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '4e989ef86619' # pylint: disable=invalid-name
down_revision = '37b2a060bdd6' # pylint: disable=invalid-name
def upgrade():
""" Add extra fields and indexes to revisions table """
op.add_column('revisions', sa.Column('source_type', sa.String(length=250)))
op.add_column('revisions', sa.Column('source_id', sa.Integer()))
op.add_column('revisions',
sa.Column('destination_type', sa.String(length=250)))
op.add_column('revisions', sa.Column('destination_id', sa.Integer()))
op.create_index('fk_revisions_resource', 'revisions',
['resource_type', 'resource_id'], unique=False)
op.create_index('fk_revisions_source', 'revisions',
['source_type', 'source_id'], unique=False)
op.create_index('fk_revisions_destination', 'revisions',
['destination_type', 'destination_id'], unique=False)
def downgrade():
""" Remove indexes and fields from revisions """
op.drop_index('fk_revisions_resource', table_name='revisions')
op.drop_index('fk_revisions_source', table_name='revisions')
op.drop_index('fk_revisions_destination', table_name='revisions')
op.drop_column('revisions', 'source_type')
op.drop_column('revisions', 'source_id')
op.drop_column('revisions', 'destination_type')
op.drop_column('revisions', 'destination_id')
| {
"repo_name": "AleksNeStu/ggrc-core",
"path": "src/ggrc/migrations/versions/20160128111512_4e989ef86619_add_connected_objects_to_revisions.py",
"copies": "14",
"size": "1591",
"license": "apache-2.0",
"hash": -405046603849958660,
"line_mean": 33.5869565217,
"line_max": 77,
"alpha_frac": 0.6882463859,
"autogenerated": false,
"ratio": 3.4967032967032967,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0064905750641232616,
"num_lines": 46
} |
"""add consent expired status
Revision ID: 4331eeb400da
Revises: 1c35e6439d1c
Create Date: 2020-07-10 11:54:45.714491
"""
from alembic import op
import sqlalchemy as sa
import rdr_service.model.utils
from rdr_service.participant_enums import ConsentExpireStatus
# revision identifiers, used by Alembic.
revision = '4331eeb400da'
down_revision = '1c35e6439d1c'
branch_labels = None
depends_on = None
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('participant_summary', sa.Column('ehr_consent_expire_status',
rdr_service.model.utils.Enum(ConsentExpireStatus), nullable=True))
op.add_column('participant_summary', sa.Column('ehr_consent_expire_time',
rdr_service.model.utils.UTCDateTime(), nullable=True))
op.add_column('participant_summary', sa.Column('ehr_consent_expire_authored',
rdr_service.model.utils.UTCDateTime(), nullable=True))
# ### end Alembic commands ###
def downgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('participant_summary', 'ehr_consent_expire_authored')
op.drop_column('participant_summary', 'ehr_consent_expire_time')
op.drop_column('participant_summary', 'ehr_consent_expire_status')
# ### end Alembic commands ###
def upgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
| {
"repo_name": "all-of-us/raw-data-repository",
"path": "rdr_service/alembic/versions/4331eeb400da_add_consent_expired_status.py",
"copies": "1",
"size": "1853",
"license": "bsd-3-clause",
"hash": 7773623756055923000,
"line_mean": 30.9482758621,
"line_max": 117,
"alpha_frac": 0.6470588235,
"autogenerated": false,
"ratio": 3.6765873015873014,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.48236461250873014,
"avg_score": null,
"num_lines": null
} |
"""add contact_information table and extend suppliers
Revision ID: 550715127385
Revises: 3d5aabf7d291
Create Date: 2015-04-13 17:42:52.688791
"""
# revision identifiers, used by Alembic.
revision = '550715127385'
down_revision = '3d5aabf7d291'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('contact_information',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('supplier_id', sa.Integer(), nullable=True),
sa.Column('contact_name', sa.String(), nullable=False),
sa.Column('phone_number', sa.String(), nullable=True),
sa.Column('email', sa.String(), nullable=False),
sa.Column('website', sa.String(), nullable=True),
sa.Column('address1', sa.String(), nullable=True),
sa.Column('address2', sa.String(), nullable=True),
sa.Column('city', sa.String(), nullable=True),
sa.Column('country', sa.String(), nullable=True),
sa.Column('postcode', sa.String(), nullable=False),
sa.ForeignKeyConstraint(['supplier_id'], ['suppliers.supplier_id'], ),
sa.PrimaryKeyConstraint('id')
)
op.add_column(u'suppliers', sa.Column('clients', postgresql.JSON(), nullable=True))
op.add_column(u'suppliers', sa.Column('duns_number', sa.String(), nullable=True))
op.add_column(u'suppliers', sa.Column('esourcing_id', sa.String(), nullable=True))
op.add_column(u'suppliers', sa.Column('description', sa.String(), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column(u'suppliers', 'description')
op.drop_column(u'suppliers', 'esourcing_id')
op.drop_column(u'suppliers', 'duns_number')
op.drop_column(u'suppliers', 'clients')
op.drop_table(u'contact_information')
### end Alembic commands ###
| {
"repo_name": "alphagov/digitalmarketplace-api",
"path": "migrations/versions/550715127385_add_contact_information_table_and_extend_suppliers.py",
"copies": "3",
"size": "1910",
"license": "mit",
"hash": 339406055021119000,
"line_mean": 38.7916666667,
"line_max": 87,
"alpha_frac": 0.6853403141,
"autogenerated": false,
"ratio": 3.3217391304347825,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.011576847266758158,
"num_lines": 48
} |
"""Add contents_hash
Revision ID: 515f518eff57
Revises: 218fd78e07e8
Create Date: 2017-07-25 15:21:18.613141
"""
# revision identifiers, used by Alembic.
revision = '515f518eff57'
down_revision = '218fd78e07e8'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('RepositoryApps', sa.Column('contents_hash', sa.Unicode(length=255), nullable=True))
op.add_column('RepositoryApps', sa.Column('last_processed_contents_hash', sa.Unicode(length=255), nullable=True))
op.add_column('RepositoryApps', sa.Column('last_processed_downloaded_hash', sa.Unicode(length=255), nullable=True))
op.create_index(u'ix_RepositoryApps_contents_hash', 'RepositoryApps', ['contents_hash'], unique=False)
op.create_index(u'ix_RepositoryApps_last_processed_contents_hash', 'RepositoryApps', ['last_processed_contents_hash'], unique=False)
op.create_index(u'ix_RepositoryApps_last_processed_downloaded_hash', 'RepositoryApps', ['last_processed_downloaded_hash'], unique=False)
op.drop_index(u'ix_RepositoryApps_last_processed_hash', table_name='RepositoryApps')
op.drop_column('RepositoryApps', u'last_processed_hash')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('RepositoryApps', sa.Column(u'last_processed_hash', mysql.VARCHAR(length=255), nullable=True))
op.create_index(u'ix_RepositoryApps_last_processed_hash', 'RepositoryApps', [u'last_processed_hash'], unique=False)
op.drop_index(u'ix_RepositoryApps_last_processed_downloaded_hash', table_name='RepositoryApps')
op.drop_index(u'ix_RepositoryApps_last_processed_contents_hash', table_name='RepositoryApps')
op.drop_index(u'ix_RepositoryApps_contents_hash', table_name='RepositoryApps')
op.drop_column('RepositoryApps', 'last_processed_downloaded_hash')
op.drop_column('RepositoryApps', 'last_processed_contents_hash')
op.drop_column('RepositoryApps', 'contents_hash')
### end Alembic commands ###
| {
"repo_name": "morelab/appcomposer",
"path": "alembic/versions/515f518eff57_add_contents_hash.py",
"copies": "3",
"size": "2113",
"license": "bsd-2-clause",
"hash": 8785403678726433000,
"line_mean": 51.825,
"line_max": 140,
"alpha_frac": 0.7415996214,
"autogenerated": false,
"ratio": 3.317111459968603,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.009522711877842288,
"num_lines": 40
} |
"""Add context columns for RBAC
Revision ID: 4b22d3a098c7
Revises: 52a791eb9a71
Create Date: 2013-06-19 22:39:29.563353
"""
# revision identifiers, used by Alembic.
revision = '4b22d3a098c7'
down_revision = '52a791eb9a71'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('categories', sa.Column('context_id', sa.Integer(), nullable=True))
op.add_column('categorizations', sa.Column('context_id', sa.Integer(), nullable=True))
op.add_column('control_assessments', sa.Column('context_id', sa.Integer(), nullable=True))
op.add_column('control_controls', sa.Column('context_id', sa.Integer(), nullable=True))
op.add_column('control_risks', sa.Column('context_id', sa.Integer(), nullable=True))
op.add_column('control_sections', sa.Column('context_id', sa.Integer(), nullable=True))
op.add_column('controls', sa.Column('context_id', sa.Integer(), nullable=True))
op.add_column('cycles', sa.Column('context_id', sa.Integer(), nullable=True))
op.add_column('data_assets', sa.Column('context_id', sa.Integer(), nullable=True))
op.add_column('directives', sa.Column('context_id', sa.Integer(), nullable=True))
op.add_column('documents', sa.Column('context_id', sa.Integer(), nullable=True))
op.add_column('facilities', sa.Column('context_id', sa.Integer(), nullable=True))
op.add_column('helps', sa.Column('context_id', sa.Integer(), nullable=True))
op.add_column('markets', sa.Column('context_id', sa.Integer(), nullable=True))
op.add_column('meetings', sa.Column('context_id', sa.Integer(), nullable=True))
op.add_column('object_documents', sa.Column('context_id', sa.Integer(), nullable=True))
op.add_column('object_people', sa.Column('context_id', sa.Integer(), nullable=True))
op.add_column('options', sa.Column('context_id', sa.Integer(), nullable=True))
op.add_column('org_groups', sa.Column('context_id', sa.Integer(), nullable=True))
op.add_column('pbc_lists', sa.Column('context_id', sa.Integer(), nullable=True))
op.add_column('people', sa.Column('context_id', sa.Integer(), nullable=True))
op.add_column('population_samples', sa.Column('context_id', sa.Integer(), nullable=True))
op.add_column('products', sa.Column('context_id', sa.Integer(), nullable=True))
op.add_column('program_directives', sa.Column('context_id', sa.Integer(), nullable=True))
op.add_column('programs', sa.Column('context_id', sa.Integer(), nullable=True))
op.add_column('projects', sa.Column('context_id', sa.Integer(), nullable=True))
op.add_column('relationship_types', sa.Column('context_id', sa.Integer(), nullable=True))
op.add_column('relationships', sa.Column('context_id', sa.Integer(), nullable=True))
op.add_column('requests', sa.Column('context_id', sa.Integer(), nullable=True))
op.add_column('responses', sa.Column('context_id', sa.Integer(), nullable=True))
op.add_column('risk_risky_attributes', sa.Column('context_id', sa.Integer(), nullable=True))
op.add_column('risks', sa.Column('context_id', sa.Integer(), nullable=True))
op.add_column('risky_attributes', sa.Column('context_id', sa.Integer(), nullable=True))
op.add_column('sections', sa.Column('context_id', sa.Integer(), nullable=True))
op.add_column('system_controls', sa.Column('context_id', sa.Integer(), nullable=True))
op.add_column('system_systems', sa.Column('context_id', sa.Integer(), nullable=True))
op.add_column('systems', sa.Column('context_id', sa.Integer(), nullable=True))
op.add_column('transactions', sa.Column('context_id', sa.Integer(), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('transactions', 'context_id')
op.drop_column('systems', 'context_id')
op.drop_column('system_systems', 'context_id')
op.drop_column('system_controls', 'context_id')
op.drop_column('sections', 'context_id')
op.drop_column('risky_attributes', 'context_id')
op.drop_column('risks', 'context_id')
op.drop_column('risk_risky_attributes', 'context_id')
op.drop_column('responses', 'context_id')
op.drop_column('requests', 'context_id')
op.drop_column('relationships', 'context_id')
op.drop_column('relationship_types', 'context_id')
op.drop_column('projects', 'context_id')
op.drop_column('programs', 'context_id')
op.drop_column('program_directives', 'context_id')
op.drop_column('products', 'context_id')
op.drop_column('population_samples', 'context_id')
op.drop_column('people', 'context_id')
op.drop_column('pbc_lists', 'context_id')
op.drop_column('org_groups', 'context_id')
op.drop_column('options', 'context_id')
op.drop_column('object_people', 'context_id')
op.drop_column('object_documents', 'context_id')
op.drop_column('meetings', 'context_id')
op.drop_column('markets', 'context_id')
op.drop_column('helps', 'context_id')
op.drop_column('facilities', 'context_id')
op.drop_column('documents', 'context_id')
op.drop_column('directives', 'context_id')
op.drop_column('data_assets', 'context_id')
op.drop_column('cycles', 'context_id')
op.drop_column('controls', 'context_id')
op.drop_column('control_sections', 'context_id')
op.drop_column('control_risks', 'context_id')
op.drop_column('control_controls', 'context_id')
op.drop_column('control_assessments', 'context_id')
op.drop_column('categorizations', 'context_id')
op.drop_column('categories', 'context_id')
### end Alembic commands ###
| {
"repo_name": "hamyuan/ggrc-self-test",
"path": "src/migrations/versions/20130619223929_4b22d3a098c7_add_context_columns_.py",
"copies": "3",
"size": "5657",
"license": "apache-2.0",
"hash": 8831557109293343000,
"line_mean": 55.57,
"line_max": 96,
"alpha_frac": 0.678451476,
"autogenerated": false,
"ratio": 3.2605187319884728,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.006132498833914923,
"num_lines": 100
} |
"""Add contributed values table
Revision ID: 159ba85908fd
Revises: d5d88ac1d291
Create Date: 2019-11-01 15:39:50.970246
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy import orm
from sqlalchemy.dialects import postgresql
from qcfractal.storage_sockets.models.sql_base import MsgpackExt
from qcfractal.storage_sockets.models.collections_models import ContributedValuesORM
import numpy as np
# revision identifiers, used by Alembic.
revision = "159ba85908fd"
down_revision = "d5d88ac1d291"
branch_labels = None
depends_on = None
def migrate_contributed_values_data():
bind = op.get_bind()
session = orm.Session(bind=bind)
# Dataset and reaction datasets tables
ds_ids_data = session.execute("select id, contributed_values_data from dataset;").fetchall()
print(f"Migrating datasets with ids: {[ds[0] for ds in ds_ids_data]}")
rds_ids_data = session.execute("select id, contributed_values_data from reaction_dataset;").fetchall()
print(f"Migrating reaction datasets with ids: {[ds[0] for ds in rds_ids_data]}")
ds_ids_data.extend(rds_ids_data)
for ds in ds_ids_data:
(ds_id, ds_contrib) = ds
if ds_contrib is None:
continue
for key, dict_values in ds_contrib.items():
idx, vals = [], []
for key, value in dict_values["values"].items():
idx.append(key)
vals.append(value)
dict_values["values"] = np.array(vals)
dict_values["index"] = np.array(idx)
cv = ContributedValuesORM(**dict_values)
cv.collection_id = ds_id
session.add(cv)
session.commit()
def upgrade():
# rename old column with data
op.alter_column("dataset", "contributed_values", new_column_name="contributed_values_data")
op.alter_column("reaction_dataset", "contributed_values", new_column_name="contributed_values_data")
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"contributed_values",
sa.Column("name", sa.String(), nullable=False),
sa.Column("collection_id", sa.Integer(), nullable=False),
sa.Column("citations", sa.JSON(), nullable=True),
sa.Column("theory_level", sa.JSON(), nullable=False),
sa.Column("theory_level_details", sa.JSON(), nullable=True),
sa.Column("comments", sa.String(), nullable=True),
sa.Column("values", MsgpackExt(), nullable=False),
sa.Column("index", MsgpackExt(), nullable=False),
sa.Column("external_url", sa.String(), nullable=True),
sa.Column("doi", sa.String(), nullable=True),
sa.Column("units", sa.String(), nullable=False),
sa.Column("values_structure", sa.JSON(), nullable=True, default=lambda: {}),
sa.ForeignKeyConstraint(["collection_id"], ["collection.id"], ondelete="cascade"),
sa.PrimaryKeyConstraint("name", "collection_id"),
)
op.alter_column("contributed_values", "values_structure", server_default=None, nullable=False)
migrate_contributed_values_data()
op.drop_column("dataset", "contributed_values_data")
op.drop_column("reaction_dataset", "contributed_values_data")
def downgrade():
# ### Won't work on production data because data will be lost ###
op.add_column(
"reaction_dataset",
sa.Column("contributed_values", postgresql.JSON(astext_type=sa.Text()), autoincrement=False, nullable=True),
)
op.add_column(
"dataset",
sa.Column("contributed_values", postgresql.JSON(astext_type=sa.Text()), autoincrement=False, nullable=True),
)
op.drop_table("contributed_values")
| {
"repo_name": "psi4/DatenQM",
"path": "qcfractal/alembic/versions/159ba85908fd_add_contributed_values_table.py",
"copies": "2",
"size": "3667",
"license": "bsd-3-clause",
"hash": -723100594814345300,
"line_mean": 33.9238095238,
"line_max": 116,
"alpha_frac": 0.6593946005,
"autogenerated": false,
"ratio": 3.6235177865612647,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5282912387061264,
"avg_score": null,
"num_lines": null
} |
"""add core minus pm column
Revision ID: 011b5659ae29
Revises: cb4bddc0f5f8
Create Date: 2021-02-25 13:53:35.885347
"""
from alembic import op
import sqlalchemy as sa
import rdr_service.model.utils
from rdr_service.participant_enums import PhysicalMeasurementsStatus, QuestionnaireStatus, OrderStatus
from rdr_service.participant_enums import WithdrawalStatus, WithdrawalReason, SuspensionStatus, QuestionnaireDefinitionStatus
from rdr_service.participant_enums import EnrollmentStatus, Race, SampleStatus, OrganizationType, BiobankOrderStatus
from rdr_service.participant_enums import OrderShipmentTrackingStatus, OrderShipmentStatus
from rdr_service.participant_enums import MetricSetType, MetricsKey, GenderIdentity
from rdr_service.model.base import add_table_history_table, drop_table_history_table
from rdr_service.model.code import CodeType
from rdr_service.model.site_enums import SiteStatus, EnrollingStatus, DigitalSchedulingStatus, ObsoleteStatus
# revision identifiers, used by Alembic.
revision = '011b5659ae29'
down_revision = 'cb4bddc0f5f8'
branch_labels = None
depends_on = None
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('participant_summary', sa.Column('enrollment_status_core_minus_pm_time', rdr_service.model.utils.UTCDateTime(), nullable=True))
# ### end Alembic commands ###
def downgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('participant_summary', 'enrollment_status_core_minus_pm_time')
# ### end Alembic commands ###
def upgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
| {
"repo_name": "all-of-us/raw-data-repository",
"path": "rdr_service/alembic/versions/011b5659ae29_add_core_minus_pm_column.py",
"copies": "1",
"size": "1996",
"license": "bsd-3-clause",
"hash": -8149705162655026000,
"line_mean": 32.2666666667,
"line_max": 145,
"alpha_frac": 0.753507014,
"autogenerated": false,
"ratio": 3.609403254972875,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4862910268972875,
"avg_score": null,
"num_lines": null
} |
"""Add cork tables
Revision ID: 39675efc6d59
Revises: 3053811d54d4
Create Date: 2014-10-26 02:10:19.454966
"""
# revision identifiers, used by Alembic.
revision = '39675efc6d59'
down_revision = '3053811d54d4'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table(
'roles',
sa.Column('role', sa.String(128), primary_key=True),
sa.Column('level', sa.Integer, nullable=False)
)
op.create_table(
'users',
sa.Column('username', sa.String(128), primary_key=True),
sa.Column('role', sa.String(128), sa.ForeignKey('roles.role')),
sa.Column('hash', sa.String(256), nullable=False),
sa.Column('email_addr', sa.String(128)),
sa.Column('desc', sa.String(128)),
sa.Column('creation_date', sa.String(128), nullable=False),
sa.Column('last_login', sa.String(128), nullable=False)
)
op.create_table(
'register',
sa.Column('code', sa.String(128), primary_key=True),
sa.Column('username', sa.String(128), nullable=False),
sa.Column('role', sa.String(128), sa.ForeignKey('roles.role')),
sa.Column('hash', sa.String(256), nullable=False),
sa.Column('email_addr', sa.String(128)),
sa.Column('desc', sa.String(128)),
sa.Column('creation_date', sa.String(128), nullable=False)
)
def downgrade():
op.drop_table('users')
op.drop_table('roles')
op.drop_table('register')
| {
"repo_name": "jlutz777/FreeStore",
"path": "alembic/versions/39675efc6d59_add_cork_tables.py",
"copies": "1",
"size": "1485",
"license": "mit",
"hash": 8949251809118152000,
"line_mean": 29.9375,
"line_max": 71,
"alpha_frac": 0.6127946128,
"autogenerated": false,
"ratio": 3.2709251101321586,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.43837197229321584,
"avg_score": null,
"num_lines": null
} |
"""add correlator config detailed tables
Revision ID: 77c082c87844
Revises: 7463268309ab
Create Date: 2021-01-15 21:26:15.737527+00:00
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '77c082c87844'
down_revision = '7463268309ab'
branch_labels = None
depends_on = None
def upgrade():
op.create_table('correlator_config_active_snap',
sa.Column('config_hash', sa.String(), nullable=False),
sa.Column('hostname', sa.String(), nullable=False),
sa.ForeignKeyConstraint(['config_hash'], ['correlator_config_file.config_hash'], ),
sa.PrimaryKeyConstraint('config_hash', 'hostname')
)
op.create_table('correlator_config_input_index',
sa.Column('config_hash', sa.String(), nullable=False),
sa.Column('correlator_index', sa.Integer(), nullable=False),
sa.Column('hostname', sa.String(), nullable=False),
sa.Column('antenna_index_position', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['config_hash'], ['correlator_config_file.config_hash'], ),
sa.PrimaryKeyConstraint('config_hash', 'correlator_index')
)
op.create_table('correlator_config_params',
sa.Column('config_hash', sa.String(), nullable=False),
sa.Column('parameter', sa.String(), nullable=False),
sa.Column('value', sa.String(), nullable=False),
sa.ForeignKeyConstraint(['config_hash'], ['correlator_config_file.config_hash'], ),
sa.PrimaryKeyConstraint('config_hash', 'parameter')
)
op.create_table('correlator_config_phase_switch_index',
sa.Column('config_hash', sa.String(), nullable=False),
sa.Column('hostname', sa.String(), nullable=False),
sa.Column('phase_switch_index', sa.Integer(), nullable=False),
sa.Column('antpol_index_position', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['config_hash'], ['correlator_config_file.config_hash'], ),
sa.PrimaryKeyConstraint('config_hash', 'hostname', 'phase_switch_index')
)
def downgrade():
op.drop_table('correlator_config_phase_switch_index')
op.drop_table('correlator_config_params')
op.drop_table('correlator_config_input_index')
op.drop_table('correlator_config_active_snap')
| {
"repo_name": "HERA-Team/hera_mc",
"path": "alembic/versions/77c082c87844_add_correlator_config_detailed_tables.py",
"copies": "1",
"size": "2323",
"license": "bsd-2-clause",
"hash": 2667599026912523000,
"line_mean": 41.2363636364,
"line_max": 91,
"alpha_frac": 0.6805854498,
"autogenerated": false,
"ratio": 3.5411585365853657,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4721743986385366,
"avg_score": null,
"num_lines": null
} |
"""add correlator control
Revision ID: 9460cc78cb50
Revises: 40a641ef2f52
Create Date: 2018-11-01 17:34:57.968829+00:00
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '9460cc78cb50'
down_revision = '40a641ef2f52'
branch_labels = None
depends_on = None
def upgrade():
op.create_table('correlator_control_command',
sa.Column('time', sa.BigInteger(), nullable=False),
sa.Column('command', sa.String(), nullable=False),
sa.PrimaryKeyConstraint('time', 'command')
)
op.create_table('correlator_control_state',
sa.Column('time', sa.BigInteger(), nullable=False),
sa.Column('state_type', sa.String(), nullable=False),
sa.Column('state', sa.Boolean(), nullable=False),
sa.PrimaryKeyConstraint('time', 'state_type')
)
op.create_table('correlator_take_data_arguments',
sa.Column('time', sa.BigInteger(), nullable=False),
sa.Column('command', sa.String(), nullable=False),
sa.Column('starttime_sec', sa.BigInteger(), nullable=False),
sa.Column('starttime_ms', sa.Integer(), nullable=False),
sa.Column('duration', sa.Float(), nullable=False),
sa.Column('acclen_spectra', sa.Integer(), nullable=False),
sa.Column('integration_time', sa.Float(), nullable=False),
sa.Column('tag', sa.String(), nullable=False),
sa.ForeignKeyConstraint(['time', 'command'],
['correlator_control_command.time',
'correlator_control_command.command'], ),
sa.PrimaryKeyConstraint('time', 'command')
)
def downgrade():
op.drop_table('correlator_take_data_arguments')
op.drop_table('correlator_control_state')
op.drop_table('correlator_control_command')
| {
"repo_name": "HERA-Team/Monitor_and_Control",
"path": "alembic/versions/9460cc78cb50_add_correlator_control.py",
"copies": "2",
"size": "2134",
"license": "bsd-2-clause",
"hash": 330969051075695400,
"line_mean": 41.68,
"line_max": 86,
"alpha_frac": 0.565135895,
"autogenerated": false,
"ratio": 4.151750972762646,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5716886867762646,
"avg_score": null,
"num_lines": null
} |
"""Add county and zip columns to DetachedAwardProcurement table and zip columns to PublishedAwardFinancialAssistance table
Revision ID: 605bcaf99c01
Revises: 0974293b64c3
Create Date: 2017-12-14 12:34:21.808704
"""
# revision identifiers, used by Alembic.
revision = '605bcaf99c01'
down_revision = '0974293b64c3'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_data_broker():
### commands auto generated by Alembic - please adjust! ###
op.add_column('detached_award_procurement', sa.Column('legal_entity_county_code', sa.Text(), nullable=True))
op.add_column('detached_award_procurement', sa.Column('legal_entity_county_name', sa.Text(), nullable=True))
op.add_column('detached_award_procurement', sa.Column('legal_entity_zip5', sa.Text(), nullable=True))
op.add_column('detached_award_procurement', sa.Column('legal_entity_zip_last4', sa.Text(), nullable=True))
op.add_column('detached_award_procurement', sa.Column('place_of_perform_county_co', sa.Text(), nullable=True))
op.add_column('detached_award_procurement', sa.Column('place_of_performance_zip5', sa.Text(), nullable=True))
op.add_column('detached_award_procurement', sa.Column('place_of_perform_zip_last4', sa.Text(), nullable=True))
op.add_column('published_award_financial_assistance', sa.Column('place_of_perfor_state_code', sa.Text(), nullable=True))
op.add_column('published_award_financial_assistance', sa.Column('place_of_performance_zip5', sa.Text(), nullable=True))
op.add_column('published_award_financial_assistance', sa.Column('place_of_perform_zip_last4', sa.Text(), nullable=True))
### end Alembic commands ###
def downgrade_data_broker():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('published_award_financial_assistance', 'place_of_performance_zip5')
op.drop_column('published_award_financial_assistance', 'place_of_perform_zip_last4')
op.drop_column('published_award_financial_assistance', 'place_of_perfor_state_code')
op.drop_column('detached_award_procurement', 'place_of_performance_zip5')
op.drop_column('detached_award_procurement', 'place_of_perform_zip_last4')
op.drop_column('detached_award_procurement', 'place_of_perform_county_co')
op.drop_column('detached_award_procurement', 'legal_entity_zip_last4')
op.drop_column('detached_award_procurement', 'legal_entity_zip5')
op.drop_column('detached_award_procurement', 'legal_entity_county_name')
op.drop_column('detached_award_procurement', 'legal_entity_county_code')
### end Alembic commands ###
| {
"repo_name": "fedspendingtransparency/data-act-broker-backend",
"path": "dataactcore/migrations/versions/605bcaf99c01_add_county_and_zip_columns_to_.py",
"copies": "1",
"size": "2771",
"license": "cc0-1.0",
"hash": 3821535990633558500,
"line_mean": 46.775862069,
"line_max": 124,
"alpha_frac": 0.7278960664,
"autogenerated": false,
"ratio": 3.1596351197263397,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.43875311861263394,
"avg_score": null,
"num_lines": null
} |
"""add crash table
Revision ID: 8c237d2acbc4
Revises: eec6e5667b87
Create Date: 2020-12-01 15:04:15.172873
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '8c237d2acbc4'
down_revision = 'eec6e5667b87'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('crash',
sa.Column('time', sa.Integer(), nullable=False),
sa.Column('trial_id', sa.Integer(), nullable=False),
sa.Column('crash_key', sa.String(), nullable=False),
sa.Column('crash_type', sa.String(), nullable=False),
sa.Column('crash_address', sa.String(), nullable=False),
sa.Column('crash_state', sa.String(), nullable=False),
sa.Column('crash_stacktrace', sa.String(), nullable=False),
sa.Column('crash_testcase', sa.String(), nullable=False),
sa.ForeignKeyConstraint(['time', 'trial_id'], ['snapshot.time', 'snapshot.trial_id'], ),
sa.PrimaryKeyConstraint('time', 'trial_id', 'crash_key')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('crash')
# ### end Alembic commands ###
| {
"repo_name": "google/fuzzbench",
"path": "database/alembic/versions/8c237d2acbc4_add_crash_table.py",
"copies": "1",
"size": "1224",
"license": "apache-2.0",
"hash": -5435424850881376000,
"line_mean": 30.3846153846,
"line_max": 92,
"alpha_frac": 0.6674836601,
"autogenerated": false,
"ratio": 3.3534246575342466,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4520908317634246,
"avg_score": null,
"num_lines": null
} |
"""Add create_at and updated_at to all models
Revision ID: 150e17e5c774
Revises: 734b944fd3a7
Create Date: 2016-08-16 21:31:48.166801
"""
# revision identifiers, used by Alembic.
revision = '150e17e5c774'
down_revision = '734b944fd3a7'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('auto_advertisement', sa.Column('created_at', sa.DateTime(), nullable=True))
op.add_column('auto_advertisement', sa.Column('updated_at', sa.DateTime(), nullable=True))
op.add_column('auto_brand', sa.Column('created_at', sa.DateTime(), nullable=True))
op.add_column('auto_brand', sa.Column('updated_at', sa.DateTime(), nullable=True))
op.add_column('auto_complectation', sa.Column('created_at', sa.DateTime(), nullable=True))
op.add_column('auto_complectation', sa.Column('updated_at', sa.DateTime(), nullable=True))
op.add_column('auto_model', sa.Column('created_at', sa.DateTime(), nullable=True))
op.add_column('auto_model', sa.Column('updated_at', sa.DateTime(), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('auto_model', 'updated_at')
op.drop_column('auto_model', 'created_at')
op.drop_column('auto_complectation', 'updated_at')
op.drop_column('auto_complectation', 'created_at')
op.drop_column('auto_brand', 'updated_at')
op.drop_column('auto_brand', 'created_at')
op.drop_column('auto_advertisement', 'updated_at')
op.drop_column('auto_advertisement', 'created_at')
### end Alembic commands ###
| {
"repo_name": "beslave/auto-collector",
"path": "migrations/versions/150e17e5c774_add_create_at_and_updated_at_to_all_.py",
"copies": "1",
"size": "1692",
"license": "mit",
"hash": 7749794242053784000,
"line_mean": 39.2857142857,
"line_max": 94,
"alpha_frac": 0.6897163121,
"autogenerated": false,
"ratio": 3.1924528301886794,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.43821691422886794,
"avg_score": null,
"num_lines": null
} |
""" Add create_date field to product and set value for existing products.
Revision ID: 67607ed6ab04
Revises: b9dc56c47ef4
Create Date: 2017-06-27 22:46:44.079629
"""
from datetime import datetime
from alembic import op
from sqlalchemy import func
from sqlalchemy.sql import text
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '67607ed6ab04'
down_revision = 'b9dc56c47ef4'
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('product', sa.Column('create_date', sa.DateTime(), nullable=True))
results = op.get_bind().execute(text("""
select prd.id, min(po.order_date) from purchase_order po, product prd, purchase_order_line pol
where pol.product_id = prd.id and po.id = pol.purchase_order_id
group by prd.id
""")).fetchall()
for r in results:
sup_id = r[0]
po_date = r[1]
sql = "update product set create_date = '{0}' where id={1}".format(po_date, sup_id)
op.get_bind().execute(text(sql))
results = op.get_bind().execute(text("""
select p.id, min(so.order_date) from sales_order so, sales_order_line sol,
product p where so.id = sol.sales_order_id and
sol.product_id = p.id group by p.id;
""")).fetchall()
for r in results:
sup_id = r[0]
so_date = r[1]
sql = "update product set create_date = '{0}' where id={1} and create_date is null".format(so_date, sup_id)
op.get_bind().execute(text(sql))
op.get_bind().execute(text("update product set create_date = '{0}' where create_date is null".format(datetime.now())))
op.alter_column('product', 'create_date', existing_type=sa.DateTime(), nullable=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('product', 'create_date')
# ### end Alembic commands ###
| {
"repo_name": "betterlife/psi",
"path": "psi/migrations/versions/42_67607ed6ab04_.py",
"copies": "2",
"size": "1904",
"license": "mit",
"hash": -9000880527301407000,
"line_mean": 34.2592592593,
"line_max": 122,
"alpha_frac": 0.655987395,
"autogenerated": false,
"ratio": 3.299826689774697,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.49558140847746973,
"avg_score": null,
"num_lines": null
} |
""" Add create date field to supplier model, make role.is_system and shipping.type_id not null.
Revision ID: e1f806a716b9
Revises: 052340beb7b5
Create Date: 2017-05-25 08:12:35.839903
"""
# revision identifiers, used by Alembic.
revision = 'e1f806a716b9'
down_revision = '052340beb7b5'
from alembic import op
import sqlalchemy as sa
from sqlalchemy import func
from sqlalchemy.sql import text
from datetime import datetime
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column('role', 'is_system', existing_type=sa.BOOLEAN(),
nullable=False, existing_server_default=sa.text(u'true'))
op.alter_column('shipping', 'type_id', existing_type=sa.INTEGER(), nullable=False)
op.add_column('supplier', sa.Column('create_date', sa.DateTime(), nullable=True))
results = op.get_bind().execute(text("""
select sup.id, min(so.order_date) from sales_order so, supplier sup,
sales_order_line sol, product p where so.id = sol.sales_order_id and
sol.product_id = p.id and p.supplier_id = sup.id group by sup.id;
""")).fetchall()
for r in results:
sup_id = r[0]
so_date = r[1]
op.get_bind().execute(text("update supplier set create_date = '{0}' where id={1}".format(so_date, sup_id)))
results = op.get_bind().execute(text("""
select sup.id, min(po.order_date) from purchase_order po, supplier sup
where po.supplier_id = sup.id group by sup.id
""")).fetchall()
for r in results:
sup_id = r[0]
po_date = r[1]
op.get_bind().execute(text("update supplier set create_date = '{0}' where id={1} and create_date is null".format(po_date, sup_id)))
op.get_bind().execute(text("update supplier set create_date = '{0}' where create_date is null".format(datetime.now())))
op.alter_column('supplier', 'create_date', existing_type=sa.DateTime(), nullable=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('supplier', 'create_date')
op.alter_column('shipping', 'type_id',
existing_type=sa.INTEGER(),
nullable=True)
op.alter_column('role', 'is_system',
existing_type=sa.BOOLEAN(),
nullable=True,
existing_server_default=sa.text(u'true'))
# ### end Alembic commands ###
| {
"repo_name": "betterlife/psi",
"path": "psi/migrations/versions/40_e1f806a716b9_.py",
"copies": "2",
"size": "2397",
"license": "mit",
"hash": -3942635319918389000,
"line_mean": 37.6612903226,
"line_max": 139,
"alpha_frac": 0.6470588235,
"autogenerated": false,
"ratio": 3.4,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5047058823499999,
"avg_score": null,
"num_lines": null
} |
"""Add created_by, end_time, start_time, and url to events
Revision ID: d4d0517e8385
Revises: 5f6cb6217ac5
Create Date: 2017-05-16 11:35:58.884254
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'd4d0517e8385'
down_revision = '5f6cb6217ac5'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('event', sa.Column('created_by', sa.String(), nullable=True))
op.add_column('event', sa.Column('end_time', sa.TIMESTAMP(timezone=True), nullable=True))
op.add_column('event', sa.Column('start_time', sa.TIMESTAMP(timezone=True), nullable=True))
op.add_column('event', sa.Column('url', sa.String(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('event', 'url')
op.drop_column('event', 'start_time')
op.drop_column('event', 'end_time')
op.drop_column('event', 'created_by')
# ### end Alembic commands ###
| {
"repo_name": "recursecenter/RSVPBot",
"path": "migrations/versions/d4d0517e8385_.py",
"copies": "1",
"size": "1079",
"license": "mit",
"hash": -6212852838794644000,
"line_mean": 30.7352941176,
"line_max": 95,
"alpha_frac": 0.6756255792,
"autogenerated": false,
"ratio": 3.164222873900293,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4339848453100293,
"avg_score": null,
"num_lines": null
} |
"""add created_utc index to mod_actions
Revision ID: df498825ec28
Revises: e566e8e209d1
Create Date: 2018-02-10 14:53:46.146167
"""
# revision identifiers, used by Alembic.
revision = 'df498825ec28'
down_revision = 'e566e8e209d1'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_development():
op.create_index(op.f('ix_mod_actions_created_utc_index'), 'mod_actions', ['created_utc'], unique=False)
def downgrade_development():
op.drop_index(op.f('ix_mod_actions_created_utc_index'), table_name='mod_actions')
def upgrade_test():
op.create_index(op.f('ix_mod_actions_created_utc_index'), 'mod_actions', ['created_utc'], unique=False)
def downgrade_test():
op.drop_index(op.f('ix_mod_actions_created_utc_index'), table_name='mod_actions')
def upgrade_production():
op.create_index(op.f('ix_mod_actions_created_utc_index'), 'mod_actions', ['created_utc'], unique=False)
def downgrade_production():
op.drop_index(op.f('ix_mod_actions_created_utc_index'), table_name='mod_actions')
| {
"repo_name": "c4fcm/CivilServant",
"path": "alembic/versions/df498825ec28_add_created_utc_index_to_mod_actions.py",
"copies": "1",
"size": "1209",
"license": "mit",
"hash": 7861390526679540000,
"line_mean": 27.1162790698,
"line_max": 107,
"alpha_frac": 0.7088502895,
"autogenerated": false,
"ratio": 2.9851851851851854,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4194035474685185,
"avg_score": null,
"num_lines": null
} |
"""Add CSV file format
Revision ID: 52c2d8ff8e6f
Revises: 5880ee6542b
Create Date: 2013-11-05 13:10:48.463890
"""
# revision identifiers, used by Alembic.
revision = '52c2d8ff8e6f'
down_revision = '5880ee6542b'
from alembic import op
import sqlalchemy as sa
def upgrade():
# In PostgreSQL < 9.1 there was no ALTER TYPE for enums, so it would have
# been something like:
#
# ALTER TABLE foo ALTER COLUMN bar TYPE new_type USING bar::text::new_type;
#
# However, all my installations are PostgreSQL >= 9.1 and I think the USING
# syntax is PostgreSQL-specific, so let's ignore that. It would also come
# with all the hassle of moving old column values into the new column.
context = op.get_context()
if context.bind.dialect.name == 'postgresql':
if context.bind.dialect.server_version_info >= (9, 3):
op.execute('COMMIT')
op.execute("ALTER TYPE filetype ADD VALUE IF NOT EXISTS 'csv'")
return
if context.bind.dialect.server_version_info >= (9, 1):
op.execute('COMMIT')
op.execute("ALTER TYPE filetype ADD VALUE 'csv'")
return
raise Exception('Sorry, only PostgreSQL >= 9.1 is supported by this migration')
def downgrade():
pass
| {
"repo_name": "varda/varda",
"path": "alembic/versions/52c2d8ff8e6f_add_csv_file_format.py",
"copies": "2",
"size": "1274",
"license": "mit",
"hash": -9028606228423017000,
"line_mean": 30.85,
"line_max": 83,
"alpha_frac": 0.6609105181,
"autogenerated": false,
"ratio": 3.64,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0027238612836438924,
"num_lines": 40
} |
'''Add currencies to the database'''
import os
import logging
import optparse
from django.contrib.contenttypes.management import update_all_contenttypes
from django.core.management.base import copy_helper, CommandError, BaseCommand
from django.utils.importlib import import_module
from ccy import currencydb
from jflow.db.instdata.models import DataId
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
optparse.make_option(
"-d", "--defaultvendor",
dest="defaultvendor",
action="append",
help="Default Vendor"
),
)
help = "Add currencies to database"
def handle(self, *args, **options):
from jflow.conf import settings
level = {
'0': logging.WARN,
'1': logging.INFO,
'2': logging.DEBUG
}[options.get('verbosity', '1')]
logging.basicConfig(level=level, format="%(name)s: %(levelname)s: %(message)s")
logger = logging.getLogger('addccy')
dv = options.get('defaultvendor', None)
if dv:
dv = unicode(dv[0])
ccys = currencydb()
for c in ccys.values():
id,created = DataId.objects.get_or_create(code = c.code,
country = c.default_country,
default_vendor = dv,
name = '%s Spot Rate' % c.as_cross('/'),
tags = 'forex currency spot index')
id.add_vendor('blb','%s Curncy' % c.code)
id.add_vendor('ecb',c.code)
if created:
logger.info("Created currency %s" % id)
else:
logger.info("Modified currency %s" % id)
| {
"repo_name": "lsbardel/flow",
"path": "flow/db/instdata/management/commands/addccy.py",
"copies": "1",
"size": "1885",
"license": "bsd-3-clause",
"hash": 2871136250132723000,
"line_mean": 34.5849056604,
"line_max": 94,
"alpha_frac": 0.5140583554,
"autogenerated": false,
"ratio": 4.488095238095238,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.05268338930616944,
"num_lines": 53
} |
""" add current db state
Revision ID: 2fa92ef6570b
Revises: None
Create Date: 2013-09-16 13:24:47.579179
"""
# revision identifiers, used by Alembic.
revision = '2fa92ef6570b'
down_revision = None
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table('file',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('path', sa.String(length=128), nullable=False),
sa.Column('filename', sa.Unicode(), nullable=True),
sa.Column('mimetype', sa.String(), nullable=True),
sa.Column('size', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('path')
)
op.create_table('content',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('type', sa.String(length=30), nullable=False),
sa.Column('owner', sa.Unicode(), nullable=True),
sa.Column('title', sa.Unicode(), nullable=True),
sa.Column('description', sa.UnicodeText(), nullable=True),
sa.Column('creation_date', sa.DateTime(), nullable=False),
sa.Column('modification_date', sa.DateTime(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_table('principal',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('active', sa.Boolean(), nullable=True),
sa.Column('email', sa.Unicode(length=100), nullable=False),
sa.Column('password', sa.Unicode(length=100), nullable=True),
sa.Column('firstname', sa.Unicode(), nullable=True),
sa.Column('lastname', sa.Unicode(), nullable=True),
sa.Column('creation_date', sa.DateTime(), nullable=False),
sa.Column('last_login_date', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('email')
)
def downgrade():
op.drop_table('principal')
op.drop_table('content')
op.drop_table('file')
| {
"repo_name": "pyfidelity/rest-seed",
"path": "backend/backrest/migrations/versions/2fa92ef6570b_current_db_state.py",
"copies": "1",
"size": "1899",
"license": "bsd-2-clause",
"hash": -4294951573777660400,
"line_mean": 34.1666666667,
"line_max": 70,
"alpha_frac": 0.6377040548,
"autogenerated": false,
"ratio": 3.6379310344827585,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.47756350892827587,
"avg_score": null,
"num_lines": null
} |
"""Add current request to thread local storage.
API:
- :class:`ThreadLocalMiddleware`: add this to MIDDLEWARE_CLASSES
- :func:`get_current_request`
- :func:`get_current_user`: use as model field default
This was added with a single purpose in mind: to automate the setting of
created-by and updated-by model fields in a request context. E.g.::
class MyModel(models.Model):
created_by = models.ForeignKey('auth.User', default=get_current_user)
updated_by = models.ForeignKey('auth.User', default=get_current_user)
@receiver(pre_save, sender=MyModel)
def set_updated_by(sender, instance, **kwargs):
if not instance.updated_by:
user = get_current_user()
if user is not None:
instance.updated_by = user
Note that in a non-request context such as a management command, the
current request won't be saved to thread local storage and both API
functions will return ``None``, hence the check for ``None`` in the
receiver above.
This approach ensures the created-by and updated-by fields are set on
the relevant models regardless of which views they're used in.
"""
import logging
import threading
from .middleware import MiddlewareBase
log = logging.getLogger(__name__)
class _ThreadLocalStorage(threading.local):
def get(self, name, default=None):
if hasattr(self, name):
return getattr(self, name)
log.warning('%s has not been saved to thread local storage', name)
return default
def put(self, name, value):
setattr(self, name, value)
def remove(self, name):
if hasattr(self, name):
delattr(self, name)
_thread_local_storage = _ThreadLocalStorage()
def get_current_request(default=None):
"""Don't use this unless you have a REALLY good reason."""
return _thread_local_storage.get('request', default)
def get_current_user(default=None):
"""Don't use this unless you have a REALLY good reason."""
request = _thread_local_storage.get('request')
if request is None:
return default
return request.user
class ThreadLocalMiddleware(MiddlewareBase):
def before_view(self, request):
_thread_local_storage.put('request', request)
def after_view(self, request, response):
_thread_local_storage.remove('request')
def process_exception(self, request, exception):
_thread_local_storage.remove('request')
| {
"repo_name": "wylee/django-arcutils",
"path": "arcutils/threadlocals.py",
"copies": "2",
"size": "2444",
"license": "mit",
"hash": -2283542959270811600,
"line_mean": 28.4457831325,
"line_max": 77,
"alpha_frac": 0.6857610475,
"autogenerated": false,
"ratio": 3.9934640522875817,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 83
} |
"""add currenttbl
Revision ID: 7497821c0290
Revises: ed878002d0f3
Create Date: 2019-07-24 16:25:03.095429
"""
# revision identifiers, used by Alembic.
revision = '7497821c0290'
down_revision = 'ed878002d0f3'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table('ParameterTbl',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('name', sa.String(40)))
op.create_table('UnitsTbl',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('name', sa.String(40)))
op.create_table('CurrentTbl',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('timestamp', sa.TIMESTAMP,
server_default=sa.text('CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP')),
sa.Column('parameterID', sa.Integer, sa.ForeignKey('ParameterTbl.id')),
sa.Column('analysisID', sa.Integer, sa.ForeignKey('AnalysisTbl.id')),
sa.Column('unitsID', sa.Integer, sa.ForeignKey('UnitsTbl.id')),
sa.Column('value', sa.Float(32)),
sa.Column('error', sa.Float(32)))
def downgrade():
op.drop_table('CurrentTbl')
op.drop_table('ParameterTbl')
op.drop_table('UnitsTbl')
| {
"repo_name": "UManPychron/pychron",
"path": "alembic_dvc/versions/7497821c0290_add_currenttbl.py",
"copies": "2",
"size": "1338",
"license": "apache-2.0",
"hash": 3609444669933146600,
"line_mean": 33.3076923077,
"line_max": 103,
"alpha_frac": 0.5822122571,
"autogenerated": false,
"ratio": 3.5871313672922254,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5169343624392225,
"avg_score": null,
"num_lines": null
} |
"""add custom attribute table
Revision ID: 512c71e4d93b
Revises: 3a4ce23d81b0
Create Date: 2014-10-08 22:44:20.424247
"""
# revision identifiers, used by Alembic.
revision = '512c71e4d93b'
down_revision = '36950678299f'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table(
'custom_attribute_definitions',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('modified_by_id', sa.Integer(), nullable=True),
sa.Column('context_id', sa.Integer(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('title', sa.String(length=250), nullable=False),
sa.Column('helptext', sa.String(length=250), nullable=False),
sa.Column('placeholder', sa.String(length=250), nullable=True),
sa.Column('definition_type', sa.String(length=250), nullable=False),
sa.Column('attribute_type', sa.String(length=250), nullable=False),
sa.Column('multi_choice_options', sa.Text(), nullable=True),
sa.Column('mandatory', sa.Boolean(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index('ix_custom_attributes_title', 'custom_attribute_definitions', ['title'], unique=False)
op.create_table(
'custom_attribute_values',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('modified_by_id', sa.Integer(), nullable=True),
sa.Column('context_id', sa.Integer(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('custom_attribute_id', sa.Integer(), nullable=False),
sa.Column('attributable_id', sa.Integer(), nullable=True),
sa.Column('attributable_type', sa.String(length=250), nullable=True),
sa.Column('attribute_value', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['custom_attribute_id'], ['custom_attribute_definitions.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('ix_custom_attributes_attributable', 'custom_attribute_values', ['attributable_id', 'attributable_type'], unique=False)
def downgrade():
op.drop_constraint('custom_attribute_values_ibfk_1', 'custom_attribute_values', type_='foreignkey')
op.drop_table('custom_attribute_definitions')
op.drop_table('custom_attribute_values')
| {
"repo_name": "vladan-m/ggrc-core",
"path": "src/ggrc/migrations/versions/20141008224420_512c71e4d93b_add_custom_attribute_table.py",
"copies": "2",
"size": "2323",
"license": "apache-2.0",
"hash": 2624945880784460300,
"line_mean": 42.0185185185,
"line_max": 137,
"alpha_frac": 0.7021093414,
"autogenerated": false,
"ratio": 3.38136826783115,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.508347760923115,
"avg_score": null,
"num_lines": null
} |
"""Add custom read permission table..
Revision ID: 21a2938dd9f5
Revises: 7525fd3b67d5
Create Date: 2018-02-10 16:20:52.218339
"""
import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = '21a2938dd9f5'
down_revision = '7525fd3b67d5'
def upgrade():
op.add_column('page_permission',
sa.Column('created', sa.DateTime(), nullable=True))
op.add_column('page_permission',
sa.Column('modified', sa.DateTime(), nullable=True))
op.add_column('page', sa.Column('custom_read_permission', sa.Boolean(),
nullable=True))
op.drop_column('page_permission', 'permission')
op.rename_table('page_permission', 'page_read_permission')
def downgrade():
op.rename_table('page_read_permission', 'page_permission')
op.add_column('page_permission',
sa.Column('permission', mysql.INTEGER(display_width=11),
autoincrement=False, nullable=True))
op.drop_column('page_permission', 'modified')
op.drop_column('page_permission', 'created')
op.drop_column('page', 'custom_read_permission')
# ### end Alembic commands ###
| {
"repo_name": "viaict/viaduct",
"path": "migrations/versions/2018_02_10_21a2938dd9f5_add_custom_read_permission_table.py",
"copies": "1",
"size": "1238",
"license": "mit",
"hash": -7385393051225635000,
"line_mean": 32.4594594595,
"line_max": 75,
"alpha_frac": 0.6494345719,
"autogenerated": false,
"ratio": 3.63049853372434,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.977993310562434,
"avg_score": 0,
"num_lines": 37
} |
"""add data reduction tagging
Revision ID: 9b63a03f7ee
Revises: 30db613c85f8
Create Date: 2014-06-30 14:40:44.103815
"""
# revision identifiers, used by Alembic.
revision = '9b63a03f7ee'
down_revision = '30db613c85f8'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table('proc_DataReductionTagTable',
sa.Column('id', sa.INTEGER, primary_key=True),
sa.Column('name', sa.String(140)),
sa.Column('create_date', sa.DateTime),
sa.Column('comment', sa.BLOB),
sa.Column('user_id', sa.INTEGER, sa.ForeignKey('gen_UserTable.id')),
mysql_engine='InnoDB')
op.create_table('proc_DataReductionTagSetTable',
sa.Column('id', sa.INTEGER, primary_key=True),
sa.Column('tag_id', sa.INTEGER, sa.ForeignKey('proc_DataReductionTagTable.id')),
sa.Column('analysis_id', sa.INTEGER, sa.ForeignKey('meas_AnalysisTable.id')),
sa.Column('selected_histories_id', sa.INTEGER, sa.ForeignKey('proc_SelectedHistoriesTable.id')),
mysql_engine='InnoDB')
op.add_column('meas_AnalysisTable', sa.Column('data_reduction_tag_id',
sa.INTEGER,
sa.ForeignKey('proc_DataReductionTagTable.id')))
def downgrade():
op.drop_constraint('proc_datareductiontagtable_ibfk_1',
'proc_datareductiontagtable',
'foreignkey')
op.drop_constraint('proc_datareductiontagsettable_ibfk_1',
'proc_datareductiontagsettable',
'foreignkey')
op.drop_constraint('proc_datareductiontagsettable_ibfk_2',
'proc_datareductiontagsettable',
'foreignkey')
op.drop_constraint('proc_datareductiontagsettable_ibfk_3',
'proc_datareductiontagsettable',
'foreignkey')
op.drop_constraint('meas_analysistable_ibfk_8',
'meas_analysistable',
'foreignkey')
op.drop_column('meas_analysistable', 'data_reduction_tag_id')
op.drop_table('proc_DataReductionTagTable')
op.drop_table('proc_DataReductionTagSetTable')
| {
"repo_name": "USGSDenverPychron/pychron",
"path": "migration/versions/9b63a03f7ee_add_data_reduction_tagging.py",
"copies": "1",
"size": "2356",
"license": "apache-2.0",
"hash": 322931549678670140,
"line_mean": 38.9322033898,
"line_max": 116,
"alpha_frac": 0.5751273345,
"autogenerated": false,
"ratio": 3.97972972972973,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.505485706422973,
"avg_score": null,
"num_lines": null
} |
"""Add data required for analytics
Revision ID: 31ded1f6ad6
Revises: f1ce7950ae8
Create Date: 2014-01-27 19:33:24.306250
"""
# revision identifiers, used by Alembic.
revision = '31ded1f6ad6'
down_revision = 'f1ce7950ae8'
from alembic import op
import sqlalchemy as sa
import sqlalchemy.sql as sql
metadata = sa.MetaData()
uue = sa.Table('UserUsedExperiment', metadata,
sa.Column('id', sa.Integer()),
sa.Column('start_date', sa.DateTime()),
sa.Column('start_date_micro', sa.Integer()),
sa.Column('end_date', sa.DateTime()),
sa.Column('end_date_micro', sa.Integer()),
# New fields
sa.Column('start_date_date', sa.Date()),
sa.Column('start_date_weekday', sa.Integer()),
sa.Column('start_date_hour', sa.Integer()),
sa.Column('session_time_micro', sa.Integer()),
sa.Column('session_time_seconds', sa.Integer()),
)
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column(u'UserUsedExperiment', sa.Column('start_date_date', sa.Date(), nullable=True))
op.add_column(u'UserUsedExperiment', sa.Column('start_date_weekday', sa.Integer(), nullable=True))
op.add_column(u'UserUsedExperiment', sa.Column('start_date_hour', sa.Integer(), nullable=True))
op.add_column(u'UserUsedExperiment', sa.Column('session_time_micro', sa.BigInteger(), nullable=True))
op.add_column(u'UserUsedExperiment', sa.Column('session_time_seconds', sa.Integer(), nullable=True))
### end Alembic commands ###
op.create_index(u'idx_UserUsedExperiment_start_date_date', u'UserUsedExperiment', ['start_date_date', ])
op.create_index(u'idx_UserUsedExperiment_start_date_weekday', u'UserUsedExperiment', ['start_date_weekday',])
op.create_index(u'idx_UserUsedExperiment_session_time_micro', u'UserUsedExperiment', ['session_time_micro',])
op.create_index(u'idx_UserUsedExperiment_start_date_hour', u'UserUsedExperiment', ['start_date_hour', ])
op.create_index(u'idx_UserUsedExperiment_timetable', u'UserUsedExperiment', ['start_date_weekday', 'start_date_hour'])
op.create_index(u'idx_UserUsedExperiment_user_experiment', u'UserUsedExperiment', ['user_id', 'experiment_id'])
op.create_index(u'idx_UserUsedExperiment_user_origin', u'UserUsedExperiment', ['user_id', 'origin'])
op.create_index('idx_UserUsedExperiment_user_group_permission_id', u'UserUsedExperiment', ['user_id', 'group_permission_id'])
op.create_index('idx_UserUsedExperiment_user_user_permission_id', u'UserUsedExperiment', ['user_id', 'user_permission_id'])
op.create_index('idx_UserUsedExperiment_user_role_permission_id', u'UserUsedExperiment', ['user_id', 'role_permission_id'])
op.create_index(u'idx_UserUsedExperiment_experiment_id_group_id', u'UserUsedExperiment', ['experiment_id', 'group_permission_id'])
op.create_index(u'idx_UserUsedExperiment_experiment_id_user_id', u'UserUsedExperiment', ['experiment_id', 'user_permission_id'])
op.create_index(u'idx_UserUsedExperiment_experiment_id_permission_id', u'UserUsedExperiment', ['experiment_id', 'role_permission_id'])
s = sql.select([
uue.c.id, uue.c.start_date, uue.c.start_date_micro,
uue.c.end_date, uue.c.end_date_micro
]).order_by(uue.c.id)
for use in op.get_bind().execute(s):
use_id = use[uue.c.id]
start_date = use[uue.c.start_date].replace(microsecond = use[uue.c.start_date_micro])
end_date_col = use[uue.c.end_date]
kwargs = dict(
start_date_date = start_date.date(),
start_date_hour = start_date.hour,
start_date_weekday = start_date.weekday(),
)
if end_date_col:
end_date = end_date_col.replace(microsecond = use[uue.c.end_date_micro])
session_time_micro = (end_date - start_date).seconds * 1e6 + (end_date - start_date).microseconds
session_time_seconds = session_time_micro / 1000000
kwargs.update(dict(
session_time_micro = session_time_micro,
session_time_seconds = session_time_seconds
))
update_stmt = uue.update().where(uue.c.id == use_id).values(**kwargs)
op.execute(update_stmt)
def downgrade():
op.drop_index(u'idx_UserUsedExperiment_user_experiment', 'UserUsedExperiment')
op.drop_index(u'idx_UserUsedExperiment_user_origin', 'UserUsedExperiment')
op.drop_index(u'idx_UserUsedExperiment_experiment_id_group_id', 'UserUsedExperiment')
op.drop_index(u'idx_UserUsedExperiment_experiment_id_user_id', 'UserUsedExperiment')
op.drop_index(u'idx_UserUsedExperiment_experiment_id_permission_id', 'UserUsedExperiment')
op.drop_index('idx_UserUsedExperiment_user_group_permission_id','UserUsedExperiment')
op.drop_index('idx_UserUsedExperiment_user_user_permission_id', 'UserUsedExperiment')
op.drop_index('idx_UserUsedExperiment_user_role_permission_id', 'UserUsedExperiment')
op.drop_index(u'idx_UserUsedExperiment_timetable', 'UserUsedExperiment')
op.drop_index(u'idx_UserUsedExperiment_start_date_date', 'UserUsedExperiment')
op.drop_index(u'idx_UserUsedExperiment_start_date_weekday', 'UserUsedExperiment')
op.drop_index(u'idx_UserUsedExperiment_session_time_micro', 'UserUsedExperiment')
op.drop_index(u'idx_UserUsedExperiment_start_date_hour', 'UserUsedExperiment')
### commands auto generated by Alembic - please adjust! ###
op.drop_column(u'UserUsedExperiment', 'start_date_hour')
op.drop_column(u'UserUsedExperiment', 'session_time_micro')
op.drop_column(u'UserUsedExperiment', 'start_date_weekday')
op.drop_column(u'UserUsedExperiment', 'start_date_date')
### end Alembic commands ###
| {
"repo_name": "zstars/weblabdeusto",
"path": "server/src/weblab/db/upgrade/regular/versions/31ded1f6ad6_add_data_required_fo.py",
"copies": "1",
"size": "5738",
"license": "bsd-2-clause",
"hash": -9014428567945252000,
"line_mean": 51.1636363636,
"line_max": 140,
"alpha_frac": 0.6917044266,
"autogenerated": false,
"ratio": 3.295807007466973,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9437173940683182,
"avg_score": 0.010067498676758145,
"num_lines": 110
} |
"""add date and featured fields to posts
Revision ID: 191f4dbb7278
Revises: 22a87ce63503
Create Date: 2017-08-17 14:22:51.287803
"""
# revision identifiers, used by Alembic.
revision = '191f4dbb7278'
down_revision = '22a87ce63503'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('post', sa.Column('date', sa.DateTime(timezone=True), server_default=sa.text(u'now()'), nullable=False))
op.add_column('post', sa.Column('featured', sa.Boolean(), server_default=sa.text(u'false'), nullable=False))
op.create_index(op.f('ix_post_date'), 'post', ['date'], unique=False)
op.create_index(op.f('ix_post_featured'), 'post', ['featured'], unique=False)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_post_featured'), table_name='post')
op.drop_index(op.f('ix_post_date'), table_name='post')
op.drop_column('post', 'featured')
op.drop_column('post', 'date')
### end Alembic commands ###
| {
"repo_name": "Code4SA/pmg-cms-2",
"path": "migrations/versions/191f4dbb7278_add_date_and_featured_fields_to_posts.py",
"copies": "1",
"size": "1100",
"license": "apache-2.0",
"hash": 2734573142209806300,
"line_mean": 33.375,
"line_max": 122,
"alpha_frac": 0.6745454545,
"autogenerated": false,
"ratio": 3.142857142857143,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4317402597357143,
"avg_score": null,
"num_lines": null
} |
"""Add date
Usage:
add_date.py <input_file> <output_file>
add_date.py (-h | --help)
add_date.py --version
Options:
-h --help Show this help.
--version Show version number.
"""
from docopt import docopt
import csv
import datetime
def excel_date(date1):
"""Turn a date object into an Excel date string """
temp = datetime.datetime(1899 + 4, 12, 31)
delta = date1 - temp
return float(delta.days) + (float(delta.seconds) / 86400)
def add_date(input_file, output_file):
"""Add a datetime string to the CSV
@TODO - Add an argument to the script to pass the input and output files
"""
with open(input_file, 'r') as csvinput:
with open(output_file, 'w') as csvoutput:
writer = csv.writer(csvoutput, lineterminator='\r')
reader = csv.reader(csvinput)
all = []
# Skip the headers
for _ in xrange(3):
next(reader)
for row in reader:
row_year = row[1]
day_of_year = row[2]
minutes_of_day = row[3]
year_start = datetime.datetime(int(row_year), 1, 1)
day_delta = datetime.timedelta(int(day_of_year) - 1, minutes=int(minutes_of_day))
row_date = year_start + day_delta
iso_date = row_date.isoformat()
#row_date = excel_date(year_start + day_delta)
row.insert(4, iso_date)
all.append(row)
writer.writerows(all)
if __name__ == "__main__":
args = docopt(__doc__, version='Add Date 0.1')
add_date(args['<input_file>'], args['<output_file>'])
| {
"repo_name": "scimusmn/energy_tools",
"path": "add_date.py",
"copies": "1",
"size": "1669",
"license": "mit",
"hash": -4643393195412888000,
"line_mean": 28.2807017544,
"line_max": 97,
"alpha_frac": 0.5524266028,
"autogenerated": false,
"ratio": 3.5815450643776825,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9631684202484203,
"avg_score": 0.00045749293869594616,
"num_lines": 57
} |
"""Add dates to tas index
Revision ID: 807a203713a4
Revises: bb33cc8f0a3e
Create Date: 2016-11-09 19:47:52.671178
"""
# revision identifiers, used by Alembic.
revision = '807a203713a4'
down_revision = 'bb33cc8f0a3e'
branch_labels = None
depends_on = None
from alembic import op
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_data_broker():
### commands auto generated by Alembic - please adjust! ###
op.drop_index('ix_tas', table_name='tas_lookup')
op.create_index('ix_tas', 'tas_lookup', ['allocation_transfer_agency', 'agency_identifier', 'beginning_period_of_availability', 'ending_period_of_availability', 'availability_type_code', 'main_account_code', 'sub_account_code', 'internal_start_date', 'internal_end_date'], unique=False)
### end Alembic commands ###
def downgrade_data_broker():
### commands auto generated by Alembic - please adjust! ###
op.drop_index('ix_tas', table_name='tas_lookup')
op.create_index('ix_tas', 'tas_lookup', ['allocation_transfer_agency', 'agency_identifier', 'beginning_period_of_availability', 'ending_period_of_availability', 'availability_type_code', 'main_account_code', 'sub_account_code'], unique=False)
### end Alembic commands ###
| {
"repo_name": "fedspendingtransparency/data-act-broker-backend",
"path": "dataactcore/migrations/versions/807a203713a4_add_dates_to_tas_index.py",
"copies": "1",
"size": "1332",
"license": "cc0-1.0",
"hash": -7660730719513008000,
"line_mean": 31.487804878,
"line_max": 290,
"alpha_frac": 0.7004504505,
"autogenerated": false,
"ratio": 3.225181598062954,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9281747125681801,
"avg_score": 0.028776984576230555,
"num_lines": 41
} |
"""Add db_name
Revision ID: 2d16d7104dd6
Revises: 41daf2bf458b
Create Date: 2015-02-09 15:25:22.452554
"""
# revision identifiers, used by Alembic.
revision = '2d16d7104dd6'
down_revision = '41daf2bf458b'
from alembic import op
import sqlalchemy as sa
import sqlalchemy.sql as sql
metadata = sa.MetaData()
entities_table = sa.Table('entities', metadata,
sa.Column('id', sa.Integer),
sa.Column('name', sa.Unicode(length=200)),
sa.Column('db_name', sa.Unicode(length=200)),
)
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('entities', sa.Column('db_name', sa.Unicode(length=200), nullable=True))
### end Alembic commands ###
sentence = sql.select([entities_table.c.id])
connection = op.get_bind()
rows = connection.execute(sentence)
for row in rows:
current_id = row[entities_table.c.id]
db_name = u'wcloud%s' % current_id
print("Entity %s: db_name=%s" % (current_id, db_name))
update_stmt = entities_table.update().where(entities_table.c.id == current_id).values(db_name = db_name)
connection.execute(update_stmt)
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('entities', 'db_name')
### end Alembic commands ###
| {
"repo_name": "morelab/weblabdeusto",
"path": "tools/wcloud/alembic/versions/2d16d7104dd6_add_db_name.py",
"copies": "4",
"size": "1298",
"license": "bsd-2-clause",
"hash": -8483873956427481000,
"line_mean": 30.6585365854,
"line_max": 112,
"alpha_frac": 0.6679506934,
"autogenerated": false,
"ratio": 3.228855721393035,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5896806414793035,
"avg_score": null,
"num_lines": null
} |
"""Add `deal_group` table.
Revision ID: 88e45a3c98
Revises: 1f75e6d37e
Create Date: 2015-02-07 01:14:20.398906
"""
# revision identifiers, used by Alembic.
revision = '88e45a3c98'
down_revision = '1f75e6d37e'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
from sqlalchemy_utils.types.uuid import UUIDType
def upgrade():
op.create_table(
'deal_group',
sa.Column(
'id',
UUIDType(binary=False),
server_default=sa.text('uuid_generate_v4()'),
nullable=False
),
sa.Column('name', sa.Unicode(length=255), nullable=False),
sa.Column('frozen_magazine_amount', sa.Integer(), nullable=True),
sa.Column('owner_id', UUIDType(binary=False), nullable=False),
sa.ForeignKeyConstraint(['owner_id'], ['user.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_index(
op.f('ix_deal_group_owner_id'),
'deal_group',
['owner_id'],
unique=True
)
def downgrade():
op.drop_index(op.f('ix_deal_group_owner_id'), table_name='deal_group')
op.drop_table('deal_group')
| {
"repo_name": "wappulehti-apy/diilikone-api",
"path": "diilikone/migrations/versions/88e45a3c98_add_deal_group_table.py",
"copies": "1",
"size": "1170",
"license": "mit",
"hash": 5286903943132835000,
"line_mean": 25,
"line_max": 79,
"alpha_frac": 0.6222222222,
"autogenerated": false,
"ratio": 3.2409972299168976,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.43632194521168977,
"avg_score": null,
"num_lines": null
} |
"""Add `deal` table.
Revision ID: 18a06f2c7d7
Revises: 88e45a3c98
Create Date: 2015-02-07 01:36:24.589027
"""
# revision identifiers, used by Alembic.
revision = '18a06f2c7d7'
down_revision = '88e45a3c98'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
from sqlalchemy_utils.types.uuid import UUIDType
def upgrade():
op.create_table(
'deal',
sa.Column(
'id',
UUIDType(binary=False),
server_default=sa.text('uuid_generate_v4()'),
nullable=False
),
sa.Column('size', sa.Integer(), nullable=False),
sa.Column('deal_group_id', UUIDType(binary=False), nullable=False),
sa.Column('salesperson_id', UUIDType(binary=False), nullable=False),
sa.ForeignKeyConstraint(
['deal_group_id'], ['deal_group.id'], ondelete='RESTRICT'
),
sa.ForeignKeyConstraint(
['salesperson_id'], ['user.id'], ondelete='RESTRICT'
),
sa.PrimaryKeyConstraint('id')
)
op.create_index(
op.f('ix_deal_deal_group_id'), 'deal', ['deal_group_id'], unique=False
)
op.create_index(
op.f('ix_deal_salesperson_id'), 'deal', ['salesperson_id'], unique=True
)
def downgrade():
op.drop_index(op.f('ix_deal_salesperson_id'), table_name='deal')
op.drop_index(op.f('ix_deal_deal_group_id'), table_name='deal')
op.drop_table('deal')
| {
"repo_name": "wappulehti-apy/diilikone-api",
"path": "diilikone/migrations/versions/18a06f2c7d7_add_deal_table.py",
"copies": "1",
"size": "1439",
"license": "mit",
"hash": 1655598851521812000,
"line_mean": 27.2156862745,
"line_max": 79,
"alpha_frac": 0.6129256428,
"autogenerated": false,
"ratio": 3.226457399103139,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9329034699667693,
"avg_score": 0.00206966844708898,
"num_lines": 51
} |
"""Add default badge to categories
Revision ID: c997dc927fbc
Revises: 497c61b68050
Create Date: 2020-07-30 14:23:35.808229
"""
import json
import sqlalchemy as sa
from alembic import context, op
from indico.core.db.sqlalchemy.util.management import DEFAULT_BADGE_DATA
# revision identifiers, used by Alembic.
revision = 'c997dc927fbc'
down_revision = '497c61b68050'
branch_labels = None
depends_on = None
def upgrade():
if context.is_offline_mode():
raise Exception('This upgrade is only possible in online mode')
op.add_column('categories', sa.Column('default_badge_template_id', sa.Integer(), nullable=True),
schema='categories')
op.create_index(None, 'categories', ['default_badge_template_id'], unique=False, schema='categories')
op.create_foreign_key(None, 'categories', 'designer_templates', ['default_badge_template_id'], ['id'],
source_schema='categories', referent_schema='indico')
conn = op.get_bind()
root_categ_id = conn.execute('SELECT id FROM categories.categories WHERE parent_id IS NULL').scalar()
badge_id = conn.execute('''
INSERT INTO indico.designer_templates
(category_id, title, type, is_system_template, is_clonable, data) VALUES
(%s , %s , %s , true , true , %s)
RETURNING id
''', (root_categ_id, 'Default badge', 1, json.dumps(DEFAULT_BADGE_DATA))).scalar()
conn.execute('''
UPDATE categories.categories
SET default_badge_template_id = %s
WHERE id = %s
''', (badge_id, root_categ_id))
def downgrade():
if context.is_offline_mode():
raise Exception('This downgrade is only possible in online mode')
conn = op.get_bind()
id_ = conn.execute('SELECT default_badge_template_id FROM categories.categories WHERE parent_id IS NULL').scalar()
op.drop_column('categories', 'default_badge_template_id', schema='categories')
conn.execute('DELETE FROM indico.designer_templates WHERE is_system_template AND id = %s', (id_,))
| {
"repo_name": "indico/indico",
"path": "indico/migrations/versions/20200805_1423_c997dc927fbc_add_default_badge_to_categories.py",
"copies": "4",
"size": "2053",
"license": "mit",
"hash": -4929249125806346000,
"line_mean": 38.4807692308,
"line_max": 118,
"alpha_frac": 0.6678032148,
"autogenerated": false,
"ratio": 3.6080843585237257,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6275887573323726,
"avg_score": null,
"num_lines": null
} |
"""Add default options
Revision ID: e53e5a4e6213
Revises: fe8905a69c8c
Create Date: 2019-06-09 22:46:55.028111
"""
from alembic import op
from sqlalchemy import orm, Column, Text
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
# Redefine class
class GlobalConfiguration(Base):
__tablename__ = 'global_configuration'
key = Column(Text, primary_key=True, nullable=False)
value = Column(Text, nullable=False)
# revision identifiers, used by Alembic.
revision = 'e53e5a4e6213'
down_revision = 'fe8905a69c8c'
branch_labels = None
depends_on = None
def upgrade():
bind = op.get_bind()
session = orm.Session(bind=bind)
# No comments allowed by default.
config = GlobalConfiguration()
config.key = "comments"
config.value = "OFF"
session.add(config)
session.commit()
def downgrade():
bind = op.get_bind()
session = orm.Session(bind=bind)
all_configurations = session.query(GlobalConfiguration).all()
for config in all_configurations:
session.delete(config)
session.commit()
| {
"repo_name": "CrystalKoan/morpheus-api",
"path": "api/model/alembic/versions/e53e5a4e6213_add_default_options.py",
"copies": "1",
"size": "1088",
"license": "bsd-2-clause",
"hash": 4969460354336822000,
"line_mean": 20.3333333333,
"line_max": 65,
"alpha_frac": 0.7077205882,
"autogenerated": false,
"ratio": 3.4430379746835444,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4650758562883544,
"avg_score": null,
"num_lines": null
} |
"""Add default rotation_policy to certs where it's missing
Revision ID: 1db4f82bc780
Revises: 3adfdd6598df
Create Date: 2018-08-03 12:56:44.565230
"""
# revision identifiers, used by Alembic.
revision = "1db4f82bc780"
down_revision = "3adfdd6598df"
from alembic import op
from flask import current_app
from logging import Formatter, FileHandler, getLogger
log = getLogger(__name__)
handler = FileHandler(current_app.config.get("LOG_UPGRADE_FILE", "db_upgrade.log"))
handler.setFormatter(
Formatter(
"%(asctime)s %(levelname)s: %(message)s " "[in %(pathname)s:%(lineno)d]"
)
)
handler.setLevel(current_app.config.get("LOG_LEVEL", "DEBUG"))
log.setLevel(current_app.config.get("LOG_LEVEL", "DEBUG"))
log.addHandler(handler)
def upgrade():
connection = op.get_bind()
result = connection.execute(
"""\
UPDATE certificates
SET rotation_policy_id=(SELECT id FROM rotation_policies WHERE name='default')
WHERE rotation_policy_id IS NULL
RETURNING id
"""
)
log.info("Filled rotation_policy for %d certificates" % result.rowcount)
def downgrade():
pass
| {
"repo_name": "Netflix/lemur",
"path": "lemur/migrations/versions/1db4f82bc780_.py",
"copies": "1",
"size": "1138",
"license": "apache-2.0",
"hash": -2438653320113310700,
"line_mean": 24.2888888889,
"line_max": 89,
"alpha_frac": 0.6924428822,
"autogenerated": false,
"ratio": 3.4589665653495443,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.46514094475495443,
"avg_score": null,
"num_lines": null
} |
"""add deleted flag for genomic metrics
Revision ID: ce0d4837ba00
Revises: a41c2f2266cb
Create Date: 2021-02-17 13:31:28.006077
"""
from alembic import op
import sqlalchemy as sa
import rdr_service.model.utils
from rdr_service.participant_enums import PhysicalMeasurementsStatus, QuestionnaireStatus, OrderStatus
from rdr_service.participant_enums import WithdrawalStatus, WithdrawalReason, SuspensionStatus, QuestionnaireDefinitionStatus
from rdr_service.participant_enums import EnrollmentStatus, Race, SampleStatus, OrganizationType, BiobankOrderStatus
from rdr_service.participant_enums import OrderShipmentTrackingStatus, OrderShipmentStatus
from rdr_service.participant_enums import MetricSetType, MetricsKey, GenderIdentity
from rdr_service.model.base import add_table_history_table, drop_table_history_table
from rdr_service.model.code import CodeType
from rdr_service.model.site_enums import SiteStatus, EnrollingStatus, DigitalSchedulingStatus, ObsoleteStatus
# revision identifiers, used by Alembic.
revision = 'ce0d4837ba00'
down_revision = 'a41c2f2266cb'
branch_labels = None
depends_on = None
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('genomic_gc_validation_metrics', sa.Column('crai_deleted', sa.SmallInteger(), nullable=False))
op.add_column('genomic_gc_validation_metrics', sa.Column('cram_deleted', sa.SmallInteger(), nullable=False))
op.add_column('genomic_gc_validation_metrics', sa.Column('cram_md5_deleted', sa.SmallInteger(), nullable=False))
op.add_column('genomic_gc_validation_metrics', sa.Column('hf_vcf_deleted', sa.SmallInteger(), nullable=False))
op.add_column('genomic_gc_validation_metrics', sa.Column('hf_vcf_md5_deleted', sa.SmallInteger(), nullable=False))
op.add_column('genomic_gc_validation_metrics', sa.Column('hf_vcf_tbi_deleted', sa.SmallInteger(), nullable=False))
op.add_column('genomic_gc_validation_metrics', sa.Column('idat_green_deleted', sa.SmallInteger(), nullable=False))
op.add_column('genomic_gc_validation_metrics', sa.Column('idat_green_md5_deleted', sa.SmallInteger(), nullable=False))
op.add_column('genomic_gc_validation_metrics', sa.Column('idat_red_deleted', sa.SmallInteger(), nullable=False))
op.add_column('genomic_gc_validation_metrics', sa.Column('idat_red_md5_deleted', sa.SmallInteger(), nullable=False))
op.add_column('genomic_gc_validation_metrics', sa.Column('raw_vcf_deleted', sa.SmallInteger(), nullable=False))
op.add_column('genomic_gc_validation_metrics', sa.Column('raw_vcf_md5_deleted', sa.SmallInteger(), nullable=False))
op.add_column('genomic_gc_validation_metrics', sa.Column('raw_vcf_tbi_deleted', sa.SmallInteger(), nullable=False))
op.add_column('genomic_gc_validation_metrics', sa.Column('vcf_deleted', sa.SmallInteger(), nullable=False))
op.add_column('genomic_gc_validation_metrics', sa.Column('vcf_md5_deleted', sa.SmallInteger(), nullable=False))
op.add_column('genomic_gc_validation_metrics', sa.Column('vcf_tbi_deleted', sa.SmallInteger(), nullable=False))
# ### end Alembic commands ###
def downgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('genomic_gc_validation_metrics', 'vcf_tbi_deleted')
op.drop_column('genomic_gc_validation_metrics', 'vcf_md5_deleted')
op.drop_column('genomic_gc_validation_metrics', 'vcf_deleted')
op.drop_column('genomic_gc_validation_metrics', 'raw_vcf_tbi_deleted')
op.drop_column('genomic_gc_validation_metrics', 'raw_vcf_md5_deleted')
op.drop_column('genomic_gc_validation_metrics', 'raw_vcf_deleted')
op.drop_column('genomic_gc_validation_metrics', 'idat_red_md5_deleted')
op.drop_column('genomic_gc_validation_metrics', 'idat_red_deleted')
op.drop_column('genomic_gc_validation_metrics', 'idat_green_md5_deleted')
op.drop_column('genomic_gc_validation_metrics', 'idat_green_deleted')
op.drop_column('genomic_gc_validation_metrics', 'hf_vcf_tbi_deleted')
op.drop_column('genomic_gc_validation_metrics', 'hf_vcf_md5_deleted')
op.drop_column('genomic_gc_validation_metrics', 'hf_vcf_deleted')
op.drop_column('genomic_gc_validation_metrics', 'cram_md5_deleted')
op.drop_column('genomic_gc_validation_metrics', 'cram_deleted')
op.drop_column('genomic_gc_validation_metrics', 'crai_deleted')
# ### end Alembic commands ###
def upgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
| {
"repo_name": "all-of-us/raw-data-repository",
"path": "rdr_service/alembic/versions/ce0d4837ba00_add_deleted_flag_for_genomic_metrics.py",
"copies": "1",
"size": "4812",
"license": "bsd-3-clause",
"hash": -8702397717639826000,
"line_mean": 52.4666666667,
"line_max": 125,
"alpha_frac": 0.7383624273,
"autogenerated": false,
"ratio": 3.2208835341365463,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9389294631398808,
"avg_score": 0.013990266007547497,
"num_lines": 90
} |
"""Add deleted service status
Revision ID: 1460
Revises: 1450
Create Date: 2021-03-09 08:35:41.713530
"""
from alembic import op
# revision identifiers, used by Alembic.
revision = '1460'
down_revision = '1450'
def upgrade():
with op.get_context().autocommit_block():
op.execute("""
ALTER TABLE services
DROP CONSTRAINT ck_services_status,
ADD CONSTRAINT ck_services_status CHECK(status::text = ANY(ARRAY['disabled', 'enabled', 'published', 'deleted']));
""")
op.execute("""
ALTER TABLE archived_services
DROP CONSTRAINT ck_archived_services_status,
ADD CONSTRAINT ck_archived_services_status CHECK(status::text = ANY(ARRAY['disabled', 'enabled', 'published', 'deleted']));
""")
def downgrade():
with op.get_context().autocommit_block():
op.execute("""
ALTER TABLE services
DROP CONSTRAINT ck_services_status,
ADD CONSTRAINT ck_services_status CHECK(status::text = ANY(ARRAY['disabled', 'enabled', 'published']));
""")
op.execute("""
ALTER TABLE archived_services
DROP CONSTRAINT ck_archived_services_status,
ADD CONSTRAINT ck_archived_services_status CHECK(status::text = ANY(ARRAY['disabled', 'enabled', 'published']));
""")
| {
"repo_name": "alphagov/digitalmarketplace-api",
"path": "migrations/versions/1460_add_deleted_service_status.py",
"copies": "1",
"size": "1382",
"license": "mit",
"hash": -5350017542688503000,
"line_mean": 33.55,
"line_max": 139,
"alpha_frac": 0.6041968162,
"autogenerated": false,
"ratio": 4.252307692307692,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5356504508507691,
"avg_score": null,
"num_lines": null
} |
"""Add DEP device columns
Revision ID: a3ddaad5c358
Revises: 2808deb9fc62
Create Date: 2018-07-04 21:44:41.549806
"""
# From: http://alembic.zzzcomputing.com/en/latest/cookbook.html#conditional-migration-elements
from alembic import op
import sqlalchemy as sa
import commandment.dbtypes
from alembic import context
# revision identifiers, used by Alembic.
revision = 'a3ddaad5c358'
down_revision = '2808deb9fc62'
branch_labels = None
depends_on = None
def upgrade():
schema_upgrades()
# if context.get_x_argument(as_dictionary=True).get('data', None):
# data_upgrades()
def downgrade():
# if context.get_x_argument(as_dictionary=True).get('data', None):
# data_downgrades()
schema_downgrades()
def schema_upgrades():
op.add_column('devices', sa.Column('description', sa.String(), nullable=True))
op.add_column('devices', sa.Column('asset_tag', sa.String(), nullable=True))
op.add_column('devices', sa.Column('color', sa.String(), nullable=True))
op.add_column('devices', sa.Column('device_assigned_by', sa.String(), nullable=True))
op.add_column('devices', sa.Column('device_assigned_date', sa.DateTime(), nullable=True))
op.add_column('devices', sa.Column('device_family', sa.String(), nullable=True))
op.add_column('devices', sa.Column('is_dep', sa.Boolean(), nullable=True))
op.add_column('devices', sa.Column('os', sa.String(), nullable=True))
op.add_column('devices', sa.Column('profile_assign_time', sa.DateTime(), nullable=True))
op.add_column('devices', sa.Column('profile_push_time', sa.DateTime(), nullable=True))
op.add_column('devices', sa.Column('profile_status', sa.String(), nullable=True))
op.add_column('devices', sa.Column('profile_uuid', sa.String(), nullable=True))
def schema_downgrades():
op.drop_column('devices', 'profile_uuid')
op.drop_column('devices', 'profile_status')
op.drop_column('devices', 'profile_push_time')
op.drop_column('devices', 'profile_assign_time')
op.drop_column('devices', 'os')
op.drop_column('devices', 'is_dep')
op.drop_column('devices', 'device_family')
op.drop_column('devices', 'device_assigned_date')
op.drop_column('devices', 'device_assigned_by')
op.drop_column('devices', 'color')
op.drop_column('devices', 'asset_tag')
op.drop_column('devices', 'description')
# def data_upgrades():
# """Add any optional data upgrade migrations here!"""
# pass
#
#
# def data_downgrades():
# """Add any optional data downgrade migrations here!"""
# pass
| {
"repo_name": "jessepeterson/commandment",
"path": "commandment/alembic/versions/a3ddaad5c358_add_dep_device_columns.py",
"copies": "1",
"size": "2556",
"license": "mit",
"hash": 1733132700475040500,
"line_mean": 33.5405405405,
"line_max": 94,
"alpha_frac": 0.6819248826,
"autogenerated": false,
"ratio": 3.268542199488491,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4450467082088491,
"avg_score": null,
"num_lines": null
} |
"""add dep profile id to device
Revision ID: d5b32b5cc74e
Revises: 1005dc7dea01
Create Date: 2018-03-13 21:16:23.964086
"""
# From: http://alembic.zzzcomputing.com/en/latest/cookbook.html#conditional-migration-elements
from alembic import op
import sqlalchemy as sa
import commandment.dbtypes
from alembic import context
# revision identifiers, used by Alembic.
revision = 'd5b32b5cc74e'
down_revision = '1005dc7dea01'
branch_labels = None
depends_on = None
def upgrade():
schema_upgrades()
# if context.get_x_argument(as_dictionary=True).get('data', None):
# data_upgrades()
def downgrade():
# if context.get_x_argument(as_dictionary=True).get('data', None):
# data_downgrades()
schema_downgrades()
def schema_upgrades():
"""schema upgrade migrations go here."""
op.add_column('devices', sa.Column('dep_profile_id', sa.Integer(), nullable=True))
# Unsupported on SQLite3
# op.create_foreign_key(None, 'devices', 'dep_profiles', ['dep_profile_id'], ['id'])
def schema_downgrades():
"""schema downgrade migrations go here."""
# Unsupported on SQLite3
# op.drop_constraint(None, 'devices', type_='foreignkey')
op.drop_column('devices', 'dep_profile_id')
def data_upgrades():
"""Add any optional data upgrade migrations here!"""
pass
def data_downgrades():
"""Add any optional data downgrade migrations here!"""
pass
| {
"repo_name": "jessepeterson/commandment",
"path": "commandment/alembic/versions/d5b32b5cc74e_add_dep_profile_id_to_device.py",
"copies": "1",
"size": "1413",
"license": "mit",
"hash": -269378283355281500,
"line_mean": 23.3620689655,
"line_max": 94,
"alpha_frac": 0.6928520878,
"autogenerated": false,
"ratio": 3.3642857142857143,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9554270860385781,
"avg_score": 0.0005733883399864944,
"num_lines": 58
} |
"""add dep profile relationships
Revision ID: 7cf5787a089e
Revises: b231394ab475
Create Date: 2018-11-06 21:11:54.606189
"""
# From: http://alembic.zzzcomputing.com/en/latest/cookbook.html#conditional-migration-elements
from alembic import op
import sqlalchemy as sa
import commandment.dbtypes
from alembic import context
# revision identifiers, used by Alembic.
revision = '7cf5787a089e'
down_revision = 'b231394ab475'
branch_labels = None
depends_on = None
def upgrade():
schema_upgrades()
# if context.get_x_argument(as_dictionary=True).get('data', None):
# data_upgrades()
def downgrade():
# if context.get_x_argument(as_dictionary=True).get('data', None):
# data_downgrades()
schema_downgrades()
def schema_upgrades():
with op.batch_alter_table('dep_accounts', schema=None) as batch_op:
batch_op.add_column(sa.Column('default_dep_profile_id', sa.Integer(), nullable=True))
batch_op.create_foreign_key('fk_dep_accounts_default_dep_profile_id', 'dep_profiles', ['default_dep_profile_id'], ['id'])
with op.batch_alter_table('dep_profiles', schema=None) as batch_op:
batch_op.add_column(sa.Column('dep_account_id', sa.Integer(), nullable=True))
batch_op.add_column(sa.Column('skip_setup_items', commandment.dbtypes.JSONEncodedDict(), nullable=True))
batch_op.create_foreign_key('fk_dep_profiles_dep_account_id', 'dep_accounts', ['dep_account_id'], ['id'])
def schema_downgrades():
with op.batch_alter_table('dep_profiles', schema=None) as batch_op:
batch_op.drop_constraint('fk_dep_profiles_dep_account_id', 'dep_profiles', type_='foreignkey')
batch_op.drop_column('skip_setup_items')
batch_op.drop_column('dep_account_id')
with op.batch_alter_table('dep_accounts', schema=None) as batch_op:
batch_op.drop_constraint('fk_dep_accounts_default_dep_profile_id', 'dep_accounts', type_='foreignkey')
batch_op.drop_column('default_dep_profile_id')
| {
"repo_name": "jessepeterson/commandment",
"path": "commandment/alembic/versions/7cf5787a089e_add_dep_profile_relationships.py",
"copies": "1",
"size": "1988",
"license": "mit",
"hash": -6773479421242855000,
"line_mean": 34.5,
"line_max": 129,
"alpha_frac": 0.699195171,
"autogenerated": false,
"ratio": 3.2012882447665056,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.44004834157665057,
"avg_score": null,
"num_lines": null
} |
"""Add descriptions to AwardProcurement
Revision ID: 668d9fa93acb
Revises: 224dac8f951c
Create Date: 2018-04-02 09:30:16.347112
"""
# revision identifiers, used by Alembic.
revision = '668d9fa93acb'
down_revision = '224dac8f951c'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_data_broker():
### commands auto generated by Alembic - please adjust! ###
op.execute("ALTER TABLE award_procurement RENAME COLUMN referenced_multi_or_single TO referenced_mult_or_single")
op.execute("ALTER TABLE award_procurement RENAME COLUMN legal_entity_state_code TO legal_entity_state_descrip")
op.add_column('award_procurement', sa.Column('a_76_fair_act_action_desc', sa.Text(), nullable=True))
op.add_column('award_procurement', sa.Column('action_type_description', sa.Text(), nullable=True))
op.add_column('award_procurement', sa.Column('clinger_cohen_act_pla_desc', sa.Text(), nullable=True))
op.add_column('award_procurement', sa.Column('commercial_item_acqui_desc', sa.Text(), nullable=True))
op.add_column('award_procurement', sa.Column('commercial_item_test_desc', sa.Text(), nullable=True))
op.add_column('award_procurement', sa.Column('consolidated_contract_desc', sa.Text(), nullable=True))
op.add_column('award_procurement', sa.Column('construction_wage_rat_desc', sa.Text(), nullable=True))
op.add_column('award_procurement', sa.Column('contingency_humanitar_desc', sa.Text(), nullable=True))
op.add_column('award_procurement', sa.Column('contract_award_type_desc', sa.Text(), nullable=True))
op.add_column('award_procurement', sa.Column('contract_bundling_descrip', sa.Text(), nullable=True))
op.add_column('award_procurement', sa.Column('contract_financing_descrip', sa.Text(), nullable=True))
op.add_column('award_procurement', sa.Column('contracting_officers_desc', sa.Text(), nullable=True))
op.add_column('award_procurement', sa.Column('cost_accounting_stand_desc', sa.Text(), nullable=True))
op.add_column('award_procurement', sa.Column('cost_or_pricing_data_desc', sa.Text(), nullable=True))
op.add_column('award_procurement', sa.Column('country_of_product_or_desc', sa.Text(), nullable=True))
op.add_column('award_procurement', sa.Column('dod_claimant_prog_cod_desc', sa.Text(), nullable=True))
op.add_column('award_procurement', sa.Column('domestic_or_foreign_e_desc', sa.Text(), nullable=True))
op.add_column('award_procurement', sa.Column('epa_designated_produc_desc', sa.Text(), nullable=True))
op.add_column('award_procurement', sa.Column('evaluated_preference_desc', sa.Text(), nullable=True))
op.add_column('award_procurement', sa.Column('extent_compete_description', sa.Text(), nullable=True))
op.add_column('award_procurement', sa.Column('fair_opportunity_limi_desc', sa.Text(), nullable=True))
op.add_column('award_procurement', sa.Column('fed_biz_opps_description', sa.Text(), nullable=True))
op.add_column('award_procurement', sa.Column('foreign_funding_desc', sa.Text(), nullable=True))
op.add_column('award_procurement', sa.Column('government_furnished_desc', sa.Text(), nullable=True))
op.add_column('award_procurement', sa.Column('idv_type_description', sa.Text(), nullable=True))
op.add_column('award_procurement', sa.Column('information_technolog_desc', sa.Text(), nullable=True))
op.add_column('award_procurement', sa.Column('inherently_government_desc', sa.Text(), nullable=True))
op.add_column('award_procurement', sa.Column('interagency_contract_desc', sa.Text(), nullable=True))
op.add_column('award_procurement', sa.Column('labor_standards_descrip', sa.Text(), nullable=True))
op.add_column('award_procurement', sa.Column('last_modified', sa.Text(), nullable=True))
op.add_column('award_procurement', sa.Column('legal_entity_state_code', sa.Text(), nullable=True))
op.add_column('award_procurement', sa.Column('local_area_set_aside_desc', sa.Text(), nullable=True))
op.add_column('award_procurement', sa.Column('materials_supplies_descrip', sa.Text(), nullable=True))
op.add_column('award_procurement', sa.Column('multi_year_contract_desc', sa.Text(), nullable=True))
op.add_column('award_procurement', sa.Column('multiple_or_single_aw_desc', sa.Text(), nullable=True))
op.add_column('award_procurement', sa.Column('national_interest_desc', sa.Text(), nullable=True))
op.add_column('award_procurement', sa.Column('other_than_full_and_o_desc', sa.Text(), nullable=True))
op.add_column('award_procurement', sa.Column('performance_based_se_desc', sa.Text(), nullable=True))
op.add_column('award_procurement', sa.Column('place_of_manufacture_desc', sa.Text(), nullable=True))
op.add_column('award_procurement', sa.Column('place_of_performance_city', sa.Text(), nullable=True))
op.add_column('award_procurement', sa.Column('product_or_service_co_desc', sa.Text(), nullable=True))
op.add_column('award_procurement', sa.Column('program_system_or_equ_desc', sa.Text(), nullable=True))
op.add_column('award_procurement', sa.Column('purchase_card_as_paym_desc', sa.Text(), nullable=True))
op.add_column('award_procurement', sa.Column('recovered_materials_s_desc', sa.Text(), nullable=True))
op.add_column('award_procurement', sa.Column('referenced_idv_type_desc', sa.Text(), nullable=True))
op.add_column('award_procurement', sa.Column('referenced_mult_or_si_desc', sa.Text(), nullable=True))
op.add_column('award_procurement', sa.Column('research_description', sa.Text(), nullable=True))
op.add_column('award_procurement', sa.Column('sam_exception_description', sa.Text(), nullable=True))
op.add_column('award_procurement', sa.Column('sea_transportation_desc', sa.Text(), nullable=True))
op.add_column('award_procurement', sa.Column('solicitation_procedur_desc', sa.Text(), nullable=True))
op.add_column('award_procurement', sa.Column('subcontracting_plan_desc', sa.Text(), nullable=True))
op.add_column('award_procurement', sa.Column('type_of_contract_pric_desc', sa.Text(), nullable=True))
op.add_column('award_procurement', sa.Column('type_of_idc_description', sa.Text(), nullable=True))
op.add_column('award_procurement', sa.Column('type_set_aside_description', sa.Text(), nullable=True))
op.add_column('award_procurement', sa.Column('undefinitized_action_desc', sa.Text(), nullable=True))
op.add_column('detached_award_procurement', sa.Column('inherently_government_desc', sa.Text(), nullable=True))
op.drop_column('detached_award_procurement', 'referenced_multi_or_single')
### end Alembic commands ###
def downgrade_data_broker():
### commands auto generated by Alembic - please adjust! ###
op.add_column('detached_award_procurement', sa.Column('referenced_multi_or_single', sa.TEXT(), autoincrement=False, nullable=True))
op.drop_column('detached_award_procurement', 'inherently_government_desc')
op.drop_column('award_procurement', 'undefinitized_action_desc')
op.drop_column('award_procurement', 'type_set_aside_description')
op.drop_column('award_procurement', 'type_of_idc_description')
op.drop_column('award_procurement', 'type_of_contract_pric_desc')
op.drop_column('award_procurement', 'subcontracting_plan_desc')
op.drop_column('award_procurement', 'solicitation_procedur_desc')
op.drop_column('award_procurement', 'sea_transportation_desc')
op.drop_column('award_procurement', 'sam_exception_description')
op.drop_column('award_procurement', 'research_description')
op.drop_column('award_procurement', 'referenced_mult_or_si_desc')
op.drop_column('award_procurement', 'referenced_idv_type_desc')
op.drop_column('award_procurement', 'recovered_materials_s_desc')
op.drop_column('award_procurement', 'purchase_card_as_paym_desc')
op.drop_column('award_procurement', 'program_system_or_equ_desc')
op.drop_column('award_procurement', 'product_or_service_co_desc')
op.drop_column('award_procurement', 'place_of_performance_city')
op.drop_column('award_procurement', 'place_of_manufacture_desc')
op.drop_column('award_procurement', 'performance_based_se_desc')
op.drop_column('award_procurement', 'other_than_full_and_o_desc')
op.drop_column('award_procurement', 'national_interest_desc')
op.drop_column('award_procurement', 'multiple_or_single_aw_desc')
op.drop_column('award_procurement', 'multi_year_contract_desc')
op.drop_column('award_procurement', 'materials_supplies_descrip')
op.drop_column('award_procurement', 'local_area_set_aside_desc')
op.drop_column('award_procurement', 'legal_entity_state_code')
op.drop_column('award_procurement', 'last_modified')
op.drop_column('award_procurement', 'labor_standards_descrip')
op.drop_column('award_procurement', 'interagency_contract_desc')
op.drop_column('award_procurement', 'inherently_government_desc')
op.drop_column('award_procurement', 'information_technolog_desc')
op.drop_column('award_procurement', 'idv_type_description')
op.drop_column('award_procurement', 'government_furnished_desc')
op.drop_column('award_procurement', 'foreign_funding_desc')
op.drop_column('award_procurement', 'fed_biz_opps_description')
op.drop_column('award_procurement', 'fair_opportunity_limi_desc')
op.drop_column('award_procurement', 'extent_compete_description')
op.drop_column('award_procurement', 'evaluated_preference_desc')
op.drop_column('award_procurement', 'epa_designated_produc_desc')
op.drop_column('award_procurement', 'domestic_or_foreign_e_desc')
op.drop_column('award_procurement', 'dod_claimant_prog_cod_desc')
op.drop_column('award_procurement', 'country_of_product_or_desc')
op.drop_column('award_procurement', 'cost_or_pricing_data_desc')
op.drop_column('award_procurement', 'cost_accounting_stand_desc')
op.drop_column('award_procurement', 'contracting_officers_desc')
op.drop_column('award_procurement', 'contract_financing_descrip')
op.drop_column('award_procurement', 'contract_bundling_descrip')
op.drop_column('award_procurement', 'contract_award_type_desc')
op.drop_column('award_procurement', 'contingency_humanitar_desc')
op.drop_column('award_procurement', 'construction_wage_rat_desc')
op.drop_column('award_procurement', 'consolidated_contract_desc')
op.drop_column('award_procurement', 'commercial_item_test_desc')
op.drop_column('award_procurement', 'commercial_item_acqui_desc')
op.drop_column('award_procurement', 'clinger_cohen_act_pla_desc')
op.drop_column('award_procurement', 'action_type_description')
op.drop_column('award_procurement', 'a_76_fair_act_action_desc')
op.execute("ALTER TABLE award_procurement RENAME COLUMN legal_entity_state_descrip TO legal_entity_state_code")
op.execute("ALTER TABLE award_procurement RENAME COLUMN referenced_mult_or_single TO referenced_multi_or_single")
### end Alembic commands ###
| {
"repo_name": "fedspendingtransparency/data-act-broker-backend",
"path": "dataactcore/migrations/versions/668d9fa93acb_add_descriptions_to_awardprocurement.py",
"copies": "1",
"size": "11135",
"license": "cc0-1.0",
"hash": -7611987417882727000,
"line_mean": 70.3782051282,
"line_max": 135,
"alpha_frac": 0.7219577907,
"autogenerated": false,
"ratio": 3.1942053930005736,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4416163183700574,
"avg_score": null,
"num_lines": null
} |
"""Add descriptions to (Published)AwardFinancialAssistance
Revision ID: c4d42e86c655
Revises: 668d9fa93acb
Create Date: 2018-04-04 11:00:18.103961
"""
# revision identifiers, used by Alembic.
revision = 'c4d42e86c655'
down_revision = '668d9fa93acb'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_data_broker():
### commands auto generated by Alembic - please adjust! ###
op.add_column('award_financial_assistance', sa.Column('action_type_description', sa.Text(), nullable=True))
op.add_column('award_financial_assistance', sa.Column('assistance_type_desc', sa.Text(), nullable=True))
op.add_column('award_financial_assistance', sa.Column('business_funds_ind_desc', sa.Text(), nullable=True))
op.add_column('award_financial_assistance', sa.Column('business_types_desc', sa.Text(), nullable=True))
op.add_column('award_financial_assistance', sa.Column('correction_delete_ind_desc', sa.Text(), nullable=True))
op.add_column('award_financial_assistance', sa.Column('record_type_description', sa.Text(), nullable=True))
op.add_column('published_award_financial_assistance', sa.Column('action_type_description', sa.Text(), nullable=True))
op.add_column('published_award_financial_assistance', sa.Column('assistance_type_desc', sa.Text(), nullable=True))
op.add_column('published_award_financial_assistance', sa.Column('business_funds_ind_desc', sa.Text(), nullable=True))
op.add_column('published_award_financial_assistance', sa.Column('business_types_desc', sa.Text(), nullable=True))
op.add_column('published_award_financial_assistance', sa.Column('correction_delete_ind_desc', sa.Text(), nullable=True))
op.add_column('published_award_financial_assistance', sa.Column('record_type_description', sa.Text(), nullable=True))
### end Alembic commands ###
def downgrade_data_broker():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('published_award_financial_assistance', 'record_type_description')
op.drop_column('published_award_financial_assistance', 'correction_delete_ind_desc')
op.drop_column('published_award_financial_assistance', 'business_types_desc')
op.drop_column('published_award_financial_assistance', 'business_funds_ind_desc')
op.drop_column('published_award_financial_assistance', 'assistance_type_desc')
op.drop_column('published_award_financial_assistance', 'action_type_description')
op.drop_column('award_financial_assistance', 'record_type_description')
op.drop_column('award_financial_assistance', 'correction_delete_ind_desc')
op.drop_column('award_financial_assistance', 'business_types_desc')
op.drop_column('award_financial_assistance', 'business_funds_ind_desc')
op.drop_column('award_financial_assistance', 'assistance_type_desc')
op.drop_column('award_financial_assistance', 'action_type_description')
### end Alembic commands ###
| {
"repo_name": "fedspendingtransparency/data-act-broker-backend",
"path": "dataactcore/migrations/versions/c4d42e86c655_add_descriptions_to_published_.py",
"copies": "1",
"size": "3105",
"license": "cc0-1.0",
"hash": -8877380755550875000,
"line_mean": 49.0806451613,
"line_max": 124,
"alpha_frac": 0.7320450886,
"autogenerated": false,
"ratio": 3.25130890052356,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9371435920165199,
"avg_score": 0.02238361379167239,
"num_lines": 62
} |
"""Add description to App
Revision ID: 4211b3736e90
Revises: 542cdf68faa5
Create Date: 2014-05-19 19:13:17.086649
"""
# revision identifiers, used by Alembic.
revision = '4211b3736e90'
down_revision = '542cdf68faa5'
import json
from alembic import op
import sqlalchemy as sa
import sqlalchemy.sql as sql
metadata = sa.MetaData()
app = sa.Table("Apps", metadata,
sa.Column("id", sa.Integer()),
sa.Column("data", sa.Text),
sa.Column("description", sa.Unicode(length=1000)),
)
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('Apps', sa.Column('description', sa.Unicode(length=1000), nullable=True))
### end Alembic commands ###
apps_changes = {}
apps_data = sql.select([app.c.id, app.c.data])
for row in op.get_bind().execute(apps_data):
try:
contents = json.loads(row[app.c.data])
except:
continue
if 'description' in contents:
apps_changes[row[app.c.id]] = contents['description']
for app_id in apps_changes:
update_stmt = app.update().where(app.c.id == app_id).values(description = apps_changes[app_id])
op.execute(update_stmt)
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('Apps', 'description')
### end Alembic commands ###
| {
"repo_name": "go-lab/appcomposer",
"path": "alembic/versions/4211b3736e90_add_description_to_a.py",
"copies": "3",
"size": "1353",
"license": "bsd-2-clause",
"hash": -2645543675809456000,
"line_mean": 26.06,
"line_max": 103,
"alpha_frac": 0.6481892092,
"autogenerated": false,
"ratio": 3.3656716417910446,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.01102188847619441,
"num_lines": 50
} |
"""add detached D2 submission models
Revision ID: 4a1988f74a78
Revises: 4bf29ae16467
Create Date: 2017-01-20 11:40:50.782401
"""
# revision identifiers, used by Alembic.
revision = '4a1988f74a78'
down_revision = '4bf29ae16467'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_data_broker():
### commands auto generated by Alembic - please adjust! ###
op.create_table('detached_award_financial_assistance',
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('detached_award_financial_assistance_id', sa.Integer(), nullable=False),
sa.Column('submission_id', sa.Integer(), nullable=False),
sa.Column('job_id', sa.Integer(), nullable=False),
sa.Column('row_number', sa.Integer(), nullable=False),
sa.Column('action_date', sa.Text(), nullable=True),
sa.Column('action_type', sa.Text(), nullable=True),
sa.Column('assistance_type', sa.Text(), nullable=True),
sa.Column('award_description', sa.Text(), nullable=True),
sa.Column('awardee_or_recipient_legal', sa.Text(), nullable=True),
sa.Column('awardee_or_recipient_uniqu', sa.Text(), nullable=True),
sa.Column('awarding_agency_code', sa.Text(), nullable=True),
sa.Column('awarding_agency_name', sa.Text(), nullable=True),
sa.Column('awarding_office_code', sa.Text(), nullable=True),
sa.Column('awarding_office_name', sa.Text(), nullable=True),
sa.Column('awarding_sub_tier_agency_c', sa.Text(), nullable=True),
sa.Column('awarding_sub_tier_agency_n', sa.Text(), nullable=True),
sa.Column('award_modification_amendme', sa.Text(), nullable=True),
sa.Column('business_funds_indicator', sa.Text(), nullable=True),
sa.Column('business_types', sa.Text(), nullable=True),
sa.Column('cfda_number', sa.Text(), nullable=True),
sa.Column('cfda_title', sa.Text(), nullable=True),
sa.Column('correction_late_delete_ind', sa.Text(), nullable=True),
sa.Column('face_value_loan_guarantee', sa.Numeric(), nullable=True),
sa.Column('fain', sa.Text(), nullable=True),
sa.Column('federal_action_obligation', sa.Numeric(), nullable=True),
sa.Column('fiscal_year_and_quarter_co', sa.Text(), nullable=True),
sa.Column('funding_agency_code', sa.Text(), nullable=True),
sa.Column('funding_agency_name', sa.Text(), nullable=True),
sa.Column('funding_office_name', sa.Text(), nullable=True),
sa.Column('funding_office_code', sa.Text(), nullable=True),
sa.Column('funding_sub_tier_agency_co', sa.Text(), nullable=True),
sa.Column('funding_sub_tier_agency_na', sa.Text(), nullable=True),
sa.Column('legal_entity_address_line1', sa.Text(), nullable=True),
sa.Column('legal_entity_address_line2', sa.Text(), nullable=True),
sa.Column('legal_entity_address_line3', sa.Text(), nullable=True),
sa.Column('legal_entity_city_code', sa.Text(), nullable=True),
sa.Column('legal_entity_city_name', sa.Text(), nullable=True),
sa.Column('legal_entity_congressional', sa.Text(), nullable=True),
sa.Column('legal_entity_country_code', sa.Text(), nullable=True),
sa.Column('legal_entity_county_code', sa.Text(), nullable=True),
sa.Column('legal_entity_county_name', sa.Text(), nullable=True),
sa.Column('legal_entity_foreign_city', sa.Text(), nullable=True),
sa.Column('legal_entity_foreign_posta', sa.Text(), nullable=True),
sa.Column('legal_entity_foreign_provi', sa.Text(), nullable=True),
sa.Column('legal_entity_state_code', sa.Text(), nullable=True),
sa.Column('legal_entity_state_name', sa.Text(), nullable=True),
sa.Column('legal_entity_zip5', sa.Text(), nullable=True),
sa.Column('legal_entity_zip_last4', sa.Text(), nullable=True),
sa.Column('non_federal_funding_amount', sa.Numeric(), nullable=True),
sa.Column('original_loan_subsidy_cost', sa.Numeric(), nullable=True),
sa.Column('period_of_performance_curr', sa.Text(), nullable=True),
sa.Column('period_of_performance_star', sa.Text(), nullable=True),
sa.Column('place_of_performance_city', sa.Text(), nullable=True),
sa.Column('place_of_performance_code', sa.Text(), nullable=True),
sa.Column('place_of_performance_congr', sa.Text(), nullable=True),
sa.Column('place_of_perform_country_c', sa.Text(), nullable=True),
sa.Column('place_of_perform_county_na', sa.Text(), nullable=True),
sa.Column('place_of_performance_forei', sa.Text(), nullable=True),
sa.Column('place_of_perform_state_nam', sa.Text(), nullable=True),
sa.Column('place_of_performance_zip4a', sa.Text(), nullable=True),
sa.Column('record_type', sa.Integer(), nullable=True),
sa.Column('sai_number', sa.Text(), nullable=True),
sa.Column('total_funding_amount', sa.Numeric(), nullable=True),
sa.Column('uri', sa.Text(), nullable=True),
sa.Column('is_valid', sa.Boolean(), server_default='False', nullable=False),
sa.PrimaryKeyConstraint('detached_award_financial_assistance_id')
)
op.create_index(op.f('ix_detached_award_financial_assistance_fain'), 'detached_award_financial_assistance', ['fain'], unique=False)
op.create_index(op.f('ix_detached_award_financial_assistance_job_id'), 'detached_award_financial_assistance', ['job_id'], unique=False)
op.create_index(op.f('ix_detached_award_financial_assistance_submission_id'), 'detached_award_financial_assistance', ['submission_id'], unique=False)
op.create_index(op.f('ix_detached_award_financial_assistance_uri'), 'detached_award_financial_assistance', ['uri'], unique=False)
op.create_table('submission_sub_tier_affiliation',
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('submission_sub_tier_affiliation_id', sa.Integer(), nullable=False),
sa.Column('submission_id', sa.Integer(), nullable=True),
sa.Column('sub_tier_agency_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['sub_tier_agency_id'], ['sub_tier_agency.sub_tier_agency_id'], name='fk_sub_tier_submission_affiliation_agency_id'),
sa.ForeignKeyConstraint(['submission_id'], ['submission.submission_id'], name='fk_submission_sub_tier_affiliation_id'),
sa.PrimaryKeyConstraint('submission_sub_tier_affiliation_id')
)
op.add_column('submission', sa.Column('d2_submission', sa.Boolean(), server_default='False', nullable=False))
### end Alembic commands ###
def downgrade_data_broker():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('submission', 'd2_submission')
op.drop_table('submission_sub_tier_affiliation')
op.drop_index(op.f('ix_detached_award_financial_assistance_uri'), table_name='detached_award_financial_assistance')
op.drop_index(op.f('ix_detached_award_financial_assistance_submission_id'), table_name='detached_award_financial_assistance')
op.drop_index(op.f('ix_detached_award_financial_assistance_job_id'), table_name='detached_award_financial_assistance')
op.drop_index(op.f('ix_detached_award_financial_assistance_fain'), table_name='detached_award_financial_assistance')
op.drop_table('detached_award_financial_assistance')
### end Alembic commands ###
| {
"repo_name": "fedspendingtransparency/data-act-broker-backend",
"path": "dataactcore/migrations/versions/4a1988f74a78_add_detached_d2_submission_models.py",
"copies": "1",
"size": "7389",
"license": "cc0-1.0",
"hash": -441980450908023040,
"line_mean": 55.8384615385,
"line_max": 153,
"alpha_frac": 0.6967113276,
"autogenerated": false,
"ratio": 3.1685248713550602,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9270188286005848,
"avg_score": 0.01900958258984238,
"num_lines": 130
} |
"""Add deudores al Estado to index"""
from tqdm import tqdm
from django.core.management.base import BaseCommand, CommandError
from cazador.models import Cazador
from cazador import shrink_url_in_string
DBS = ['deudores', 'candidato_2014', 'narcoindultos', 'redam']
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('-i',
action='store',
dest='input_file',
help='Enter filename with data to import.',
)
parser.add_argument('-db',
action='store',
dest='database',
help='Enter database, options: {}'.format(
', '.join(DBS)))
def handle(self, *args, **options):
if not options['input_file']:
raise CommandError("Input Filename does not exist.")
if options['database'] not in DBS:
raise CommandError("Name of database is incorrect. Use one of the "
"following options: {}".format(
', '.join(DBS)))
input_file = options['input_file']
database = options['database']
self.process_file(input_file, database)
def process_file(self, input_file, database):
print(input_file, database)
with open(input_file, "r") as handle:
data = handle.readlines()
entries = []
for raw_line in tqdm(data):
line = raw_line.strip()
fields = line.split("\t")
if database == 'candidato_2014':
raw_data = " ".join([fields[2], fields[3], fields[4], fields[1], fields[7]])
c = Cazador(
raw_data=raw_data,
raw_data_with_short_links=shrink_url_in_string(raw_data),
source=database,
)
elif database == 'redam':
c = Cazador(
raw_data=", ".join(fields),
source=database,
)
elif database == 'narcoindultos':
c = Cazador(
raw_data=" ".join(fields),
source=database,
)
else:
raw_data = " ".join([fields[2], fields[10], fields[1]])
c = Cazador(
raw_data=raw_data,
raw_data_with_short_links=shrink_url_in_string(raw_data),
source=database,
)
entries.append(c)
Cazador.objects.bulk_create(entries)
| {
"repo_name": "aniversarioperu/django-manolo",
"path": "cazador/management/commands/import_data.py",
"copies": "2",
"size": "2708",
"license": "bsd-3-clause",
"hash": -421372323790162940,
"line_mean": 36.095890411,
"line_max": 92,
"alpha_frac": 0.4715657312,
"autogenerated": false,
"ratio": 4.490878938640133,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5962444669840133,
"avg_score": null,
"num_lines": null
} |
"""Add device ADR tracking, creation, update and fcnterror columns
Revision ID: ad38a9fad16b
Revises: e7ff8a1b22fd
Create Date: 2016-12-15 22:25:48.605782
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'ad38a9fad16b'
down_revision = 'e7ff8a1b22fd'
branch_labels = None
depends_on = None
def upgrade():
op.add_column('devices',
sa.Column('adr_datr', sa.String(), nullable=True))
op.add_column('devices',
sa.Column('snr_pointer', sa.Integer(), nullable=True))
op.add_column('devices',
sa.Column('snr_average', sa.Float(), nullable=True))
op.add_column('devices',
sa.Column('snr1', sa.Float(), nullable=True))
op.add_column('devices',
sa.Column('snr2', sa.Float(), nullable=True))
op.add_column('devices',
sa.Column('snr3', sa.Float(), nullable=True))
op.add_column('devices',
sa.Column('snr4', sa.Float(), nullable=True))
op.add_column('devices',
sa.Column('snr5', sa.Float(), nullable=True))
op.add_column('devices',
sa.Column('snr6', sa.Float(), nullable=True))
op.add_column('devices',
sa.Column('snr7', sa.Float(), nullable=True))
op.add_column('devices',
sa.Column('snr8', sa.Float(), nullable=True))
op.add_column('devices',
sa.Column('snr9', sa.Float(), nullable=True))
op.add_column('devices',
sa.Column('snr10', sa.Float(), nullable=True))
op.add_column('devices',
sa.Column('snr11', sa.Float(), nullable=True))
op.add_column('devices',
sa.Column('fcnterror', sa.Boolean(), nullable=False, default=False))
op.add_column('devices',
sa.Column('created', sa.DateTime(timezone=True)))
op.add_column('devices',
sa.Column('updated', sa.DateTime(timezone=True)))
def downgrade():
op.drop_column('devices', 'adr_datr')
op.drop_column('devices', 'snr_pointer')
op.drop_column('devices', 'snr_average')
op.drop_column('devices', 'snr1')
op.drop_column('devices', 'snr2')
op.drop_column('devices', 'snr3')
op.drop_column('devices', 'snr4')
op.drop_column('devices', 'snr5')
op.drop_column('devices', 'snr6')
op.drop_column('devices', 'snr7')
op.drop_column('devices', 'snr8')
op.drop_column('devices', 'snr9')
op.drop_column('devices', 'snr10')
op.drop_column('devices', 'snr11')
op.drop_column('devices', 'fcnterror')
op.drop_column('devices', 'created')
op.drop_column('devices', 'updated')
| {
"repo_name": "Fluent-networks/floranet",
"path": "floranet/data/alembic/versions/ad38a9fad16b_add_device_adr_creation_and_update_columns.py",
"copies": "1",
"size": "2528",
"license": "mit",
"hash": -6004258472920730000,
"line_mean": 35.1142857143,
"line_max": 76,
"alpha_frac": 0.6305379747,
"autogenerated": false,
"ratio": 3.0754257907542577,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9172977773159738,
"avg_score": 0.006597198458903983,
"num_lines": 70
} |
"""Add device history in favor of logical deletion
Revision ID: 8d64bce23c6b
Revises: e4d4e95ae481
Create Date: 2017-03-16 12:52:57.852420
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects.postgresql import TSTZRANGE, JSONB
from sqlalchemy import false as false_just_for_sqlalchemy
import textwrap
# revision identifiers, used by Alembic.
revision = '8d64bce23c6b'
down_revision = 'e4d4e95ae481'
branch_labels = None
depends_on = None
def upgrade():
# TODO: Get this to use the op/sa functions better.
op.execute("""
ALTER TABLE device ADD COLUMN sys_period tstzrange not null default tstzrange(current_timestamp, null);
UPDATE device set sys_period = tstzrange(current_timestamp, null);
CREATE TABLE device_history(LIKE device INCLUDING DEFAULTS);
CREATE OR REPLACE FUNCTION device_history_version() returns trigger as $$
BEGIN
if (TG_OP = 'UPDATE') then
INSERT INTO public.device_history
(device_id, device_type, properties, hostname, ip_address, mac_address, profile_name, sys_period)
VALUES
(OLD.device_id, OLD.device_type, OLD.properties, OLD.hostname, OLD.ip_address, OLD.mac_address, OLD.profile_name,
tstzrange(lower(OLD.sys_period), current_timestamp));
NEW.sys_period = tstzrange(current_timestamp, null);
return new;
elsif (TG_OP = 'DELETE') then
INSERT INTO public.device_history
(device_id, device_type, properties, hostname, ip_address, mac_address, profile_name, sys_period)
VALUES
(OLD.device_id, OLD.device_type, OLD.properties, OLD.hostname, OLD.ip_address, OLD.mac_address, OLD.profile_name,
tstzrange(lower(OLD.sys_period), current_timestamp));
return old;
end if;
end;
$$ LANGUAGE plpgsql;
CREATE TRIGGER device_history_upd
before update or delete on device
for each row execute procedure device_history_version();""")
op.execute("DELETE FROM public.device where deleted = True;")
op.drop_column('device', 'deleted')
op.drop_column('device_history', 'deleted')
op.execute(
"CREATE VIEW device_with_history as SELECT * FROM public.device UNION ALL SELECT * FROM public.device_history;")
op.execute("DROP FUNCTION public.delete_device_logical(character varying);")
op.execute("ALTER FUNCTION public.delete_device_fatal(character varying) RENAME TO delete_device;")
op.execute(textwrap.dedent("""
CREATE OR REPLACE FUNCTION public.delete_device(p_device_name character varying)
RETURNS SETOF change_result AS
$BODY$
DECLARE num_rows integer;
DECLARE v_device_id integer;
BEGIN
IF (p_device_name is not null) THEN
v_device_id := public.get_device_id(p_device_name);
DELETE FROM public.device WHERE device_id=v_device_id;
GET DIAGNOSTICS num_rows = ROW_COUNT;
RETURN QUERY SELECT num_rows, v_device_id;
END IF;
RETURN QUERY SELECT 0, 0;
END;
$BODY$
LANGUAGE plpgsql VOLATILE
COST 100;
"""))
op.drop_constraint("log_process", "log")
# op.create_foreign_key("fk_log_device_history", "log", "device_history", ["device_id"], ["device_id"])
# Add sys_period to type
op.execute(textwrap.dedent("""
ALTER TYPE type_device_details ADD ATTRIBUTE sys_period tstzrange;
"""))
# Remove 'where device.deleted` check
op.execute(textwrap.dedent("""
CREATE OR REPLACE FUNCTION public.get_device_details(p_device_name character varying)
RETURNS SETOF type_device_details AS
$BODY$
DECLARE
result_device_details type_device_details;
v_device_id integer;
BEGIN
-- Get the device details
IF (p_device_name is null) THEN
-- return all of them...
RETURN QUERY SELECT device_id, device_type, device.properties,
hostname, ip_address, mac_address, device.profile_name, profile.properties, sys_period
FROM device
LEFT JOIN profile ON device.profile_name = profile.profile_name
ORDER BY device.device_id;
ELSE
-- Get the device ID
v_device_id := public.get_device_id(p_device_name);
-- Just get the one
RETURN QUERY SELECT device_id, device_type, device.properties,
hostname, ip_address, mac_address, device.profile_name, profile.properties, sys_period
FROM device
LEFT JOIN profile ON device.profile_name = profile.profile_name
WHERE device_id = v_device_id
ORDER BY device.device_id;
END IF;
RETURN;
END
$BODY$
LANGUAGE plpgsql VOLATILE
COST 100;
"""))
# Remove device.deleted check
op.execute(textwrap.dedent("""
CREATE OR REPLACE FUNCTION public.get_profile_devices(p_profile_name character varying)
RETURNS SETOF integer AS
$BODY$
DECLARE
result_device_details type_device_details;
BEGIN
-- Get the device details
RETURN QUERY SELECT device_id
FROM public.device
WHERE profile_name = p_profile_name
ORDER BY device_id;
END
$BODY$
LANGUAGE plpgsql VOLATILE
COST 100;
"""))
# Get history function
op.execute(textwrap.dedent("""
CREATE OR REPLACE FUNCTION public.get_device_history(p_device_name character varying)
RETURNS SETOF type_device_details AS
$BODY$
DECLARE
result_device_details type_device_details;
v_device_id integer;
BEGIN
-- Get the device details
IF (p_device_name is null) THEN
-- return all of them...
RETURN QUERY SELECT device_id, device_type, properties,
hostname, ip_address, mac_address, profile_name, cast(null AS JSONB), sys_period
FROM device_with_history
ORDER BY sys_period;
ELSE
-- Just get things that match
RETURN QUERY SELECT device_id, device_type, properties,
hostname, ip_address, mac_address, profile_name, cast(null AS JSONB), sys_period
FROM device_history
WHERE device_id = cast_to_int(p_device_name, 0)
or hostname = p_device_name
or ip_address = p_device_name
ORDER BY sys_period;
END IF;
RETURN;
END
$BODY$
LANGUAGE plpgsql VOLATILE
COST 100;
"""))
def downgrade():
op.execute("""DROP TRIGGER device_history_upd ON public.device;""")
op.execute("""DROP FUNCTION public.device_history_version();""")
op.execute("""DROP FUNCTION public.get_device_history(character varying);""")
op.execute("DROP VIEW device_with_history;")
op.drop_table('device_history')
op.drop_column("device", "sys_period")
op.add_column("device",
sa.Column('deleted', sa.BOOLEAN(), server_default=false_just_for_sqlalchemy(), nullable=False))
op.execute(textwrap.dedent("""
CREATE OR REPLACE FUNCTION public.delete_device_logical(p_device_name character varying)
RETURNS SETOF change_result AS
$BODY$
DECLARE
num_rows integer;
v_device_id integer;
BEGIN
IF (p_device_name IS NULL) THEN
RETURN QUERY SELECT 0,0;
END IF;
v_device_id := public.get_device_id(p_device_name);
UPDATE public.device SET deleted = true
WHERE device.device_id = v_device_id;
GET DIAGNOSTICS num_rows = ROW_COUNT;
RETURN QUERY SELECT num_rows, v_device_id;
END;
$BODY$
LANGUAGE plpgsql VOLATILE
COST 100;
"""))
op.execute("ALTER FUNCTION public.delete_device(character varying) RENAME TO delete_device_fatal;")
op.execute(textwrap.dedent("""
CREATE OR REPLACE FUNCTION public.delete_device_fatal(p_device_name character varying)
RETURNS SETOF change_result AS
$BODY$
DECLARE num_rows integer;
DECLARE v_device_id integer;
BEGIN
IF (p_device_name is not null) THEN
v_device_id := public.get_device_id(p_device_name);
DELETE FROM public.log WHERE device_id=v_device_id;
DELETE FROM public.device WHERE device_id=v_device_id;
GET DIAGNOSTICS num_rows = ROW_COUNT;
RETURN QUERY SELECT num_rows, v_device_id;
END IF;
RETURN QUERY SELECT 0, 0;
END;
$BODY$
LANGUAGE plpgsql VOLATILE
COST 100;
"""))
# op.drop_constraint("fk_log_device_history", "log")
op.create_foreign_key("log_process", "log", "device", ["device_id"], ["device_id"])
op.execute(textwrap.dedent("""
CREATE OR REPLACE FUNCTION public.get_device_details(p_device_name character varying)
RETURNS SETOF type_device_details AS
$BODY$
DECLARE
result_device_details type_device_details;
v_device_id integer;
BEGIN
-- Get the device details
IF (p_device_name is null) THEN
-- return all of them...
RETURN QUERY SELECT device_id, device_type, device.properties,
hostname, ip_address, mac_address, device.profile_name, profile.properties
FROM device
LEFT JOIN profile ON device.profile_name = profile.profile_name
WHERE device.deleted = false
ORDER BY device.device_id;
ELSE
-- Get the device ID
v_device_id := public.get_device_id(p_device_name);
-- Just get the one
RETURN QUERY SELECT device_id, device_type, device.properties,
hostname, ip_address, mac_address, device.profile_name, profile.properties
FROM device
LEFT JOIN profile ON device.profile_name = profile.profile_name
WHERE device_id = v_device_id AND device.deleted = false
ORDER BY device.device_id;
END IF;
RETURN;
END
$BODY$
LANGUAGE plpgsql VOLATILE
COST 100;
"""))
op.execute(textwrap.dedent("""
CREATE OR REPLACE FUNCTION public.get_profile_devices(p_profile_name character varying)
RETURNS SETOF integer AS
$BODY$
DECLARE
result_device_details type_device_details;
BEGIN
-- Get the device details
RETURN QUERY SELECT device_id
FROM public.device
WHERE profile_name = p_profile_name and deleted is False
ORDER BY device_id;
END
$BODY$
LANGUAGE plpgsql VOLATILE
COST 100;
"""))
# Remove sys_period to type
op.execute(textwrap.dedent("""
ALTER TYPE type_device_details DROP ATTRIBUTE IF EXISTS sys_period;
"""))
| {
"repo_name": "intel-ctrlsys/actsys",
"path": "datastore/datastore/database_schema/schema_migration/versions/8d64bce23c6b_add_device_history_in_favor_of_logical_.py",
"copies": "1",
"size": "11717",
"license": "apache-2.0",
"hash": -5707603371565974000,
"line_mean": 35.7304075235,
"line_max": 120,
"alpha_frac": 0.580609371,
"autogenerated": false,
"ratio": 4.187634024303073,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5268243395303074,
"avg_score": null,
"num_lines": null
} |
"""Add device snr array column
Revision ID: 56e7e493cad7
Revises: 99f8aa50ac47
Create Date: 2017-03-01 20:00:55.501494
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '56e7e493cad7'
down_revision = '99f8aa50ac47'
branch_labels = None
depends_on = None
# Replace multiple snr columns with a snr array column
def upgrade():
op.drop_column('devices', 'snr1')
op.drop_column('devices', 'snr2')
op.drop_column('devices', 'snr3')
op.drop_column('devices', 'snr4')
op.drop_column('devices', 'snr5')
op.drop_column('devices', 'snr6')
op.drop_column('devices', 'snr7')
op.drop_column('devices', 'snr8')
op.drop_column('devices', 'snr9')
op.drop_column('devices', 'snr10')
op.drop_column('devices', 'snr11')
op.add_column('devices',
sa.Column('snr', sa.dialects.postgresql.ARRAY(sa.Float())))
def downgrade():
op.add_column('devices',
sa.Column('snr1', sa.Float(), nullable=True))
op.add_column('devices',
sa.Column('snr2', sa.Float(), nullable=True))
op.add_column('devices',
sa.Column('snr3', sa.Float(), nullable=True))
op.add_column('devices',
sa.Column('snr4', sa.Float(), nullable=True))
op.add_column('devices',
sa.Column('snr5', sa.Float(), nullable=True))
op.add_column('devices',
sa.Column('snr6', sa.Float(), nullable=True))
op.add_column('devices',
sa.Column('snr7', sa.Float(), nullable=True))
op.add_column('devices',
sa.Column('snr8', sa.Float(), nullable=True))
op.add_column('devices',
sa.Column('snr9', sa.Float(), nullable=True))
op.add_column('devices',
sa.Column('snr10', sa.Float(), nullable=True))
op.add_column('devices',
sa.Column('snr11', sa.Float(), nullable=True))
op.drop_column('devices', 'snr')
| {
"repo_name": "Fluent-networks/floranet",
"path": "floranet/data/alembic/versions/56e7e493cad7_add_device_snr_array_column.py",
"copies": "1",
"size": "1868",
"license": "mit",
"hash": 6696242257118034000,
"line_mean": 32.3571428571,
"line_max": 67,
"alpha_frac": 0.6332976445,
"autogenerated": false,
"ratio": 3.0226537216828477,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.41559513661828473,
"avg_score": null,
"num_lines": null
} |
"""add d file metadata table
Revision ID: 6f5c2c66b328
Revises: 65ce5d505f12
Create Date: 2016-08-02 13:57:44.000353
"""
# revision identifiers, used by Alembic.
revision = '6f5c2c66b328'
down_revision = '65ce5d505f12'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_data_broker():
### commands auto generated by Alembic - please adjust! ###
op.create_table('d_file_metadata',
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('d_file_id', sa.Integer(), nullable=False),
sa.Column('type', sa.Text(), nullable=True),
sa.Column('submission_id', sa.Integer(), nullable=True),
sa.Column('start_date', sa.Date(), nullable=True),
sa.Column('end_date', sa.Date(), nullable=True),
sa.Column('status_id', sa.Integer(), nullable=True),
sa.Column('url', sa.Text(), nullable=True),
sa.Column('error_message', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['status_id'], ['job_status.job_status_id'], name='fk_status_id'),
sa.ForeignKeyConstraint(['submission_id'], ['submission.submission_id'], name='fk_submission_id'),
sa.PrimaryKeyConstraint('d_file_id'),
sa.UniqueConstraint('submission_id', 'type', name='_submission_type_uc')
)
### end Alembic commands ###
def downgrade_data_broker():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('d_file_metadata')
### end Alembic commands ###
| {
"repo_name": "fedspendingtransparency/data-act-broker-backend",
"path": "dataactcore/migrations/versions/6f5c2c66b328_add_d_file_metadata_table.py",
"copies": "2",
"size": "1671",
"license": "cc0-1.0",
"hash": -5517301307931495000,
"line_mean": 29.3818181818,
"line_max": 102,
"alpha_frac": 0.670855775,
"autogenerated": false,
"ratio": 3.342,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9879045386402796,
"avg_score": 0.026762077719440683,
"num_lines": 55
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.