text stringlengths 0 1.05M | meta dict |
|---|---|
"""Add diary invites
Revision ID: 3632e1c5e000
Revises:
Create Date: 2017-09-25 21:14:24.743599
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '3632e1c5e000'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('diary_invites',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('guest_id', sa.Integer(), nullable=False),
sa.Column('guest_name', sa.String(length=128), nullable=False),
sa.Column('guest_relation', sa.Enum('Spouse', 'Child', 'Sibling', 'Parent', 'Other', name='guestrelation'),
nullable=False),
sa.Column('status', sa.Enum('Sent', 'Accepted', 'Declined', name='invitestatus'), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_diary_invites_guest_id'), 'diary_invites', ['guest_id'], unique=False)
op.create_index(op.f('ix_diary_invites_user_id'), 'diary_invites', ['user_id'], unique=False)
op.add_column('users', sa.Column('birth_year', sa.Integer(), nullable=True))
op.add_column('users', sa.Column('death_year', sa.Integer(), nullable=True))
op.add_column('users', sa.Column('life_story', sa.Text(), nullable=True))
op.add_column('users', sa.Column('life_title', sa.String(length=256), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('users', 'life_title')
op.drop_column('users', 'life_story')
op.drop_column('users', 'death_year')
op.drop_column('users', 'birth_year')
op.drop_index(op.f('ix_diary_invites_user_id'), table_name='diary_invites')
op.drop_index(op.f('ix_diary_invites_guest_id'), table_name='diary_invites')
op.drop_table('diary_invites')
# ### end Alembic commands ###
| {
"repo_name": "maxzheng/part-of-family",
"path": "migration/versions/3632e1c5e000_add_diary_invites.py",
"copies": "1",
"size": "2107",
"license": "mit",
"hash": 3054790232422002700,
"line_mean": 42,
"line_max": 127,
"alpha_frac": 0.616516374,
"autogenerated": false,
"ratio": 3.2666666666666666,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.43831830406666666,
"avg_score": null,
"num_lines": null
} |
# add dictionary schema
# dictionaries/dictionary_name endpoint
# ISSUES datetime type not serialized to json
obligor_schema = {
# Schema definition, based on Cerberus grammar. Check the Cerberus project
# (https://github.com/nicolaiarocci/cerberus) for details.
'id': {
'type': 'integer',
'required': True,
'unique': True,
},
'EAD': {
'type': 'float',
},
'PD': {
'type': 'float',
},
'LGD': {
'type': 'float',
},
}
# endpoints
obligors = {
# 'title' tag used in item links. Defaults to the resource title minus
# the final, plural 's' (works fine in most cases but not for 'people')
'item_title': 'obligor',
# by default the standard item entry point is defined as
# '/obligors/<ObligorId>'.
'additional_lookup': {
'url': 'regex("[\w]+")',
'field': 'id'
},
# We choose to override global cache-control directives for this resource.
'cache_control': 'max-age=10,must-revalidate',
'cache_expires': 10,
# most global settings can be overridden at resource level
'resource_methods': ['GET', 'POST', 'DELETE'],
'schema': obligor_schema
}
DOMAIN = {
'obligors': obligors,
}
MONGO_HOST = 'localhost'
MONGO_PORT = 27017
MONGO_DBNAME = 'eve'
# Enable reads (GET), inserts (POST) and DELETE for resources/collections
# (if you omit this line, the API will default to ['GET'] and provide
# read-only access to the endpoint).
RESOURCE_METHODS = ['GET', 'POST', 'DELETE']
# Enable reads (GET), edits (PATCH), replacements (PUT) and deletes of
# individual items (defaults to read-only item access).
ITEM_METHODS = ['GET', 'PATCH', 'PUT', 'DELETE']
DEBUG = 'True' | {
"repo_name": "open-risk/Open_Risk_API",
"path": "data_server/settings.py",
"copies": "1",
"size": "1727",
"license": "mpl-2.0",
"hash": 2849193319549809700,
"line_mean": 25.1818181818,
"line_max": 78,
"alpha_frac": 0.6218876665,
"autogenerated": false,
"ratio": 3.4609218436873745,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.45828095101873745,
"avg_score": null,
"num_lines": null
} |
"""add digital scheduling
Revision ID: 0e0908363f40
Revises: 2bd074e60e19
Create Date: 2018-04-11 13:59:29.497293
"""
import model.utils
import sqlalchemy as sa
from alembic import op
from rdr_service.model.site_enums import DigitalSchedulingStatus
# revision identifiers, used by Alembic.
revision = "0e0908363f40"
down_revision = "2bd074e60e19"
branch_labels = None
depends_on = None
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column(
"site", sa.Column("digital_scheduling_status", model.utils.Enum(DigitalSchedulingStatus), nullable=True)
)
op.add_column("site", sa.Column("schedule_instructions", sa.String(length=2048), nullable=True))
# ### end Alembic commands ###
def downgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column("site", "schedule_instructions")
op.drop_column("site", "digital_scheduling_status")
# ### end Alembic commands ###
def upgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
| {
"repo_name": "all-of-us/raw-data-repository",
"path": "rdr_service/alembic/versions/0e0908363f40_add_digital_scheduling.py",
"copies": "1",
"size": "1403",
"license": "bsd-3-clause",
"hash": -4220219554268525600,
"line_mean": 24.9814814815,
"line_max": 112,
"alpha_frac": 0.6821097648,
"autogenerated": false,
"ratio": 3.542929292929293,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4725039057729293,
"avg_score": null,
"num_lines": null
} |
"""add direct award tables
Revision ID: 950
Revises: 940
Create Date: 2017-08-22 12:01:28.054240
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '950'
down_revision = '940'
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('direct_award_projects',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('locked_at', sa.DateTime(), nullable=True),
sa.Column('active', sa.Boolean(), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('direct_award_projects_pkey'))
)
op.create_table('direct_award_project_users',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('project_id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['project_id'], ['direct_award_projects.id'], name=op.f('direct_award_project_users_project_id_fkey')),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], name=op.f('direct_award_project_users_user_id_fkey')),
sa.PrimaryKeyConstraint('id', name=op.f('direct_award_project_users_pkey'))
)
op.create_index(op.f('ix_direct_award_project_users_project_id'), 'direct_award_project_users', ['project_id'], unique=False)
op.create_index(op.f('ix_direct_award_project_users_user_id'), 'direct_award_project_users', ['user_id'], unique=False)
op.create_table('direct_award_searches',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_by', sa.Integer(), nullable=False),
sa.Column('project_id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('searched_at', sa.DateTime(), nullable=True),
sa.Column('search_url', sa.Text(), nullable=False),
sa.Column('active', sa.Boolean(), nullable=False),
sa.ForeignKeyConstraint(['created_by'], ['users.id'], name=op.f('direct_award_searches_created_by_fkey')),
sa.ForeignKeyConstraint(['project_id'], ['direct_award_projects.id'], name=op.f('direct_award_searches_project_id_fkey')),
sa.PrimaryKeyConstraint('id', name=op.f('direct_award_searches_pkey'))
)
op.create_index('idx_project_id_active', 'direct_award_searches', ['project_id', 'active'], unique=True, postgresql_where=sa.text('active'))
op.create_index(op.f('ix_direct_award_searches_active'), 'direct_award_searches', ['active'], unique=False)
op.create_index(op.f('ix_direct_award_searches_project_id'), 'direct_award_searches', ['project_id'], unique=False)
op.create_table('direct_award_search_result_entries',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('search_id', sa.Integer(), nullable=False),
sa.Column('archived_service_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['archived_service_id'], ['archived_services.id'], name=op.f('direct_award_search_result_entries_archived_service_id_fkey')),
sa.ForeignKeyConstraint(['search_id'], ['direct_award_searches.id'], name=op.f('direct_award_search_result_entries_search_id_fkey')),
sa.PrimaryKeyConstraint('id', name=op.f('direct_award_search_result_entries_pkey'))
)
op.create_index(op.f('ix_direct_award_search_result_entries_archived_service_id'), 'direct_award_search_result_entries', ['archived_service_id'], unique=False)
op.create_index(op.f('ix_direct_award_search_result_entries_search_id'), 'direct_award_search_result_entries', ['search_id'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_direct_award_search_result_entries_search_id'), table_name='direct_award_search_result_entries')
op.drop_index(op.f('ix_direct_award_search_result_entries_archived_service_id'), table_name='direct_award_search_result_entries')
op.drop_table('direct_award_search_result_entries')
op.drop_index(op.f('ix_direct_award_searches_project_id'), table_name='direct_award_searches')
op.drop_index(op.f('ix_direct_award_searches_active'), table_name='direct_award_searches')
op.drop_index('idx_project_id_active', table_name='direct_award_searches')
op.drop_table('direct_award_searches')
op.drop_index(op.f('ix_direct_award_project_users_user_id'), table_name='direct_award_project_users')
op.drop_index(op.f('ix_direct_award_project_users_project_id'), table_name='direct_award_project_users')
op.drop_table('direct_award_project_users')
op.drop_table('direct_award_projects')
# ### end Alembic commands ###
| {
"repo_name": "alphagov/digitalmarketplace-api",
"path": "migrations/versions/950_add_direct_award_tables.py",
"copies": "1",
"size": "4730",
"license": "mit",
"hash": -3595799738626608000,
"line_mean": 58.125,
"line_max": 163,
"alpha_frac": 0.6997885835,
"autogenerated": false,
"ratio": 3.1324503311258276,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4332238914625828,
"avg_score": null,
"num_lines": null
} |
"""add disabled column
Revision ID: 7ec6c2a6a1c8
Revises: 9e59e0b9d1cf
Create Date: 2020-02-28 16:12:35.548279
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = '7ec6c2a6a1c8'
down_revision = '9e59e0b9d1cf'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('flicket_users', sa.Column('disabled', sa.Boolean(), nullable=True))
# ### end Alembic commands ###
# update the user column so all values are disabled values are False for
# user.
from application import app, db
from application.flicket.models.flicket_user import FlicketUser
users = FlicketUser.query.all()
for user in users:
if user.disabled is None:
user.disabled = False
db.session.commit()
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('flicket_users', 'disabled')
# ### end Alembic commands ###
| {
"repo_name": "evereux/flicket",
"path": "migrations/versions/7ec6c2a6a1c8_add_disabled_column.py",
"copies": "1",
"size": "1062",
"license": "mit",
"hash": -17073768963712084,
"line_mean": 24.9024390244,
"line_max": 86,
"alpha_frac": 0.6902071563,
"autogenerated": false,
"ratio": 3.3821656050955413,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4572372761395541,
"avg_score": null,
"num_lines": null
} |
"""Add display_tas to certified tables
Revision ID: ce018801e0fa
Revises: 8b22879952cf
Create Date: 2020-02-20 09:26:23.017749
"""
# revision identifiers, used by Alembic.
revision = 'ce018801e0fa'
down_revision = '8b22879952cf'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_data_broker():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('certified_appropriation', sa.Column('display_tas', sa.Text(), nullable=True))
op.add_column('certified_award_financial', sa.Column('display_tas', sa.Text(), nullable=True))
op.add_column('certified_object_class_program_activity', sa.Column('display_tas', sa.Text(), nullable=True))
# ### end Alembic commands ###
def downgrade_data_broker():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('certified_object_class_program_activity', 'display_tas')
op.drop_column('certified_award_financial', 'display_tas')
op.drop_column('certified_appropriation', 'display_tas')
# ### end Alembic commands ###
| {
"repo_name": "fedspendingtransparency/data-act-broker-backend",
"path": "dataactcore/migrations/versions/ce018801e0fa_add_display_tas_to_certified_tables.py",
"copies": "1",
"size": "1243",
"license": "cc0-1.0",
"hash": 7253716742243857000,
"line_mean": 27.25,
"line_max": 112,
"alpha_frac": 0.698310539,
"autogenerated": false,
"ratio": 3.2710526315789474,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4469363170578947,
"avg_score": null,
"num_lines": null
} |
"""add distinct visit counts
Revision ID: 3513057132ca
Revises: ed28b84f061e
Create Date: 2019-03-01 16:36:41.722682
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "3513057132ca"
down_revision = "ed28b84f061e"
branch_labels = None
depends_on = None
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column("participant_summary", sa.Column("number_distinct_visits", sa.Integer(), nullable=True))
# ### end Alembic commands ###
def downgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column("participant_summary", "number_distinct_visits")
# ### end Alembic commands ###
def upgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
| {
"repo_name": "all-of-us/raw-data-repository",
"path": "rdr_service/alembic/versions/3513057132ca_add_distinct_visit_counts.py",
"copies": "1",
"size": "1149",
"license": "bsd-3-clause",
"hash": 2685943979613428000,
"line_mean": 23.4468085106,
"line_max": 106,
"alpha_frac": 0.6710182768,
"autogenerated": false,
"ratio": 3.590625,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.47616432768,
"avg_score": null,
"num_lines": null
} |
# Add documentation link
from __future__ import unicode_literals
from django.contrib.auth.models import User, Group
from categories.models import CategoryBase
from django.db import models
from datetime import datetime
class Skill(CategoryBase):
class Meta:
verbose_name = 'Skill'
verbose_name_plural = 'Skills'
def __str__(self):
return self.name
def __unicode__(self):
return str(self.name)
class Interest(CategoryBase):
class Meta:
verbose_name = 'Interest'
verbose_name_plural = 'Interests'
def __str__(self):
return self.name
def __unicode__(self):
return str(self.name)
class LMS(models.Model):
name = models.CharField(max_length=128)
url = models.CharField(max_length=128)
class Meta:
verbose_name = 'LMS'
verbose_name_plural = 'LMS'
def __str__(self):
return self.name
def __unicode__(self):
return self.name
class LMS_Web_Service(models.Model):
web_service_name = models.CharField(max_length=128)
# depending on the options we might be able to do a choicefield here
web_service_method = models.CharField(max_length=128)
web_service_url = models.CharField(max_length=128)
class Meta:
verbose_name = 'LMS Web Service'
verbose_name_plural = 'LMS Web Services'
def __str__(self):
return self.web_service_name + " - " + self.web_service_method
def __unicode__(self):
return self.web_service_name
class School(models.Model):
lms = models.ForeignKey(LMS, on_delete=models.CASCADE)
name = models.CharField(max_length=128)
url = models.CharField(max_length=128)
def __str__(self):
return self.name
def __unicode__(self):
return self.name
class User_Add_Ons(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
school = models.ForeignKey(School, on_delete=models.CASCADE)
# The user's ID w/in their LMS
lms = models.ForeignKey(LMS, on_delete=models.CASCADE)
class Meta:
verbose_name = 'User Add-ons'
verbose_name_plural = 'User Add-ons'
def __str__(self):
return self.user.username + " - " + self.school.name
def __unicode__(self):
return str(self.user)
class Volunteer_User_Add_Ons(models.Model):
"""
The name of the model is incorrect, but for the moment doesn't change because it's implies to update many interfaces.
"""
user = models.OneToOneField(User, on_delete=models.CASCADE)
phone = models.CharField(max_length=13, )
canGetText = models.BooleanField(default=True)
workTitle = models.CharField(max_length=25)
isBusinessOwner = models.BooleanField(default=True)
workIndustry = models.CharField(max_length=25)
yearsInIndustry = models.IntegerField()
linkedinProfile = models.CharField(max_length=128, null=True, blank=True, )
hsGradChoices = (
(1, '1-4'),
(2, '5-10'),
(3, '11 or more'),
(4, 'Have not graduated'),)
yearsSinceHSGraduation = models.IntegerField(choices=hsGradChoices)
collegeLevelChoice = (
(1, "Associate"),
(2, "Bachelor's"),
(3, "Master's"),
(4, "Doctoral"),
(5, "None"),)
collegeLevel = models.IntegerField(choices=collegeLevelChoice)
collegeMajor = models.CharField(max_length=128, null=True, blank=True)
skills = models.ManyToManyField(Skill, null=True, blank=True)
interests = models.ManyToManyField(Interest, max_length=128, null=True, blank=True)
# User_Skill_Map
# User_Interest_Map
class Meta:
verbose_name = 'Volunteer add-ons'
verbose_name_plural = 'Volunteer add-ons'
def __str__(self):
return self.user.username + " - " + self.workTitle
def __unicode__(self):
return "Volunteer: " + str(self.user)
# return "Volunteer: "
class User_Group_Role_Map(models.Model):
group = models.ForeignKey(Group)
user = models.ForeignKey(User_Add_Ons, on_delete=models.CASCADE)
role = models.CharField(max_length=128)
class Meta:
verbose_name = 'Role'
verbose_name_plural = 'Roles'
def __str__(self):
return self.user.group.name + ": " + self.user.username + "-" + self.role
def __unicode__(self):
return str(self.group) + ': ' + str(self.user) + '-' + str(self.role)
class Class(models.Model):
school = models.ForeignKey(School, on_delete=models.CASCADE)
lms = models.ForeignKey(LMS, on_delete=models.CASCADE)
teacher = models.ForeignKey(User_Add_Ons, on_delete=models.CASCADE, )
name = models.CharField(max_length=128)
academic_year = models.IntegerField(default=None, null=True)
semester = models.CharField(max_length=128, default=None, null=True)
class Meta:
verbose_name = 'FAL Class'
verbose_name_plural = 'FAL Classes'
def __str__(self):
return self.name + " - " + self.teacher.user.first_name
def __unicode__(self):
return str(self.name) + ':' + str(self.teacher)
class Class_Group(models.Model):
group = models.OneToOneField(Group, on_delete=models.CASCADE)
falClass = models.ForeignKey(Class, on_delete=models.CASCADE)
class Meta:
verbose_name = 'Class Groups'
verbose_name_plural = 'Class Groups'
def __str__(self):
return self.user.group.name + " - " + self.falClass.name
def __unicode__(self):
return str(self.group.name) + ':' + str(self.falClass.name)
class Student_Class(models.Model):
student = models.ForeignKey(User, on_delete=models.CASCADE, related_name='User')
falClass = models.ForeignKey(Class, on_delete=models.CASCADE)
def __str__(self):
return self.student.username + ": " + self.falClass.name
def __unicode__(self):
return str(self.student) + ':' + str(self.falClass)
class Assignment(models.Model):
title = models.CharField(max_length=128)
falClass = models.ForeignKey(Class, on_delete=models.CASCADE)
document = models.CharField(max_length=128, blank=True, null=True)
due_date = models.DateTimeField(blank=True, null=True)
creation_date = models.DateTimeField(auto_now_add=True)
description = models.CharField(max_length=256, blank=True, null=True)
def __str__(self):
return self.title + " - " + self.falClass.name
def __unicode__(self):
return str(self.title) + ' (' + str(self.falClass) + ')'
class Interview(models.Model):
interviewer = models.CharField(max_length=256)
interviewee = models.ForeignKey(User, on_delete=models.CASCADE, related_name='interviewee', )
group = models.ForeignKey(Group)
date = models.DateTimeField(default=datetime.now, blank=True)
assignment = models.ForeignKey(Assignment, on_delete=models.CASCADE, related_name='assignment')
def __str__(self):
return self.assignment.title + " - " + self.group.name
def __unicode__(self):
return 'Interview of ' + str(self.interviewee) + ' by ' + str(self.assignment)
class Question(models.Model):
name = models.CharField(max_length=128)
created_by = models.ForeignKey(User_Add_Ons, on_delete=models.CASCADE, )
creation_date = models.DateTimeField()
def __str__(self):
return self.name + " - " + self.created_by.user.username
def __unicode__(self):
return str(self.created_by) + ':' + str(self.name)
class Interview_Question_Map(models.Model):
interview = models.ForeignKey(Interview, on_delete=models.CASCADE, )
question = models.ForeignKey(Question, on_delete=models.CASCADE, )
class Meta:
verbose_name = 'Interview Question'
verbose_name_plural = 'Interview Questions'
def __str__(self):
return self.question.name + " - " + self.interview.interviewee.username
def __unicode__(self):
return str(self.question) + ' (' + str(self.interview) + ')'
class Answer(models.Model):
question = models.ForeignKey(Question, on_delete=models.CASCADE, )
result = models.CharField(max_length=128)
created_by = models.ForeignKey(User_Add_Ons, on_delete=models.CASCADE, )
creation_date = models.DateTimeField()
def __unicode__(self):
return str(self.question)
def __str__(self):
return self.question.name + "(" + self.result + ")"
class Video(models.Model):
# interview = models.ForeignKey(Interview, on_delete=models.CASCADE, null=True, blank=True, )
name = models.CharField(max_length=128)
url = models.CharField(max_length=128)
tags = models.CharField(max_length=128, null=True, blank=True, )
created_by = models.ForeignKey(User_Add_Ons, on_delete=models.CASCADE, )
creation_date = models.DateTimeField(default=datetime.now, blank=True)
status = models.CharField(max_length=128)
def __str__(self):
return str(self.name) + ' (' + str(self.creation_date) + ')'
def __unicode__(self):
return str(self.name) + ' (' + str(self.creation_date) + ')'
class Question_Video_Map(models.Model):
question = models.ForeignKey(Question, on_delete=models.CASCADE, )
video = models.ForeignKey(Video, on_delete=models.CASCADE, )
class Meta:
verbose_name = 'Video Question'
verbose_name_plural = 'Video Questions'
def __str__(self):
return str(self.question.name) + " - " + str(self.video.name)
def __unicode__(self):
return str(self.question) + ':' + str(self.video)
class Interview_Question_Video_Map(models.Model):
interview_question = models.ForeignKey(Interview_Question_Map, on_delete=models.CASCADE, )
video = models.ForeignKey(Video, on_delete=models.CASCADE, )
class Meta:
verbose_name = 'Interview Question Video'
verbose_name_plural = 'Interview Video Questions'
def __str__(self):
return str(self.interview_question.id) + " - " + str(self.video.name)
def __unicode__(self):
return str(self.interview_question) + '-' + str(self.video)
class Video_Comment(models.Model):
video = models.ForeignKey(Video, on_delete=models.CASCADE, )
comment = models.CharField(max_length=128)
created_by = models.ForeignKey(User_Add_Ons, on_delete=models.CASCADE, )
creation_date = models.DateTimeField()
class Meta:
verbose_name = 'Video Comment'
verbose_name_plural = 'Video Comments'
def __str__(self):
return self.video.name + ' (' + str(self.created_by) + ', ' + str(self.creation_date) + ')'
def __unicode__(self):
return str(self.video) + ' (' + str(self.created_by) + ', ' + str(self.creation_date) + ')'
class Assignment_Submission(models.Model):
name = models.CharField(max_length=128)
group = models.ForeignKey(Group)
class Meta:
verbose_name = 'Submission'
verbose_name_plural = 'Submissions'
def __str__(self):
return str(self.group.name) + ':' + str(self.name)
def __unicode__(self):
return str(self.group) + ':' + str(self.name)
class Type(models.Model):
name = models.CharField(max_length=128)
def __str__(self):
return self.name
def __unicode__(self):
return str(self.name)
class User_Type(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
type = models.ForeignKey(Type)
def __str__(self):
return str(self.user.username) + ':' + str(self.type.name)
def __unicode__(self):
return str(self.user.username) + ':' + str(self.type.name)
class Submission_Interview_Map(models.Model):
submission = models.ForeignKey(Assignment_Submission, on_delete=models.CASCADE, )
interview = models.ForeignKey(Interview, on_delete=models.CASCADE, )
class Meta:
verbose_name = 'Interview Submission'
verbose_name_plural = 'Interview Submissions'
def __str__(self):
return str(self.submission.name) + ':' + str(self.interview.interviewee)
def __unicode__(self):
return str(self.submission) + ':' + str(self.interview)
| {
"repo_name": "foraliving/pilot",
"path": "foraliving/models.py",
"copies": "1",
"size": "12052",
"license": "mit",
"hash": -3424809594938986500,
"line_mean": 30.7994722955,
"line_max": 121,
"alpha_frac": 0.6486060405,
"autogenerated": false,
"ratio": 3.528103044496487,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4676709084996487,
"avg_score": null,
"num_lines": null
} |
# Add documentation link
from __future__ import unicode_literals
from django.contrib.auth.models import User, Group
from django.db import models
from datetime import datetime
# from mptt.models import MPTTModel, TreeForeignKey
class LMS(models.Model):
name = models.CharField(max_length=128)
url = models.CharField(max_length=128)
class Meta:
verbose_name = 'LMS'
verbose_name_plural = 'LMS'
def __unicode__(self):
return self.name
class LMS_Web_Service(models.Model):
web_service_name = models.CharField(max_length=128)
# depending on the options we might be able to do a choicefield here
web_service_method = models.CharField(max_length=128)
web_service_url = models.CharField(max_length=128)
class Meta:
verbose_name = 'LMS Web Service'
verbose_name_plural = 'LMS Web Services'
def __unicode__(self):
return self.web_service_name
class School(models.Model):
lms = models.ForeignKey(LMS, on_delete=models.CASCADE)
name = models.CharField(max_length=128)
url = models.CharField(max_length=128)
def __unicode__(self):
return self.name
class User_Add_Ons(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
school = models.ForeignKey(School, on_delete=models.CASCADE)
# The user's ID w/in their LMS
lms = models.ForeignKey(LMS, on_delete=models.CASCADE)
class Meta:
verbose_name = 'User Add-ons'
verbose_name_plural = 'User Add-ons'
def __unicode__(self):
return str(self.user)
class Skill(models.Model):
name = models.CharField(max_length=25)
class Interest(models.Model):
name = models.CharField(max_length=25)
class Volunteer_User_Add_Ons(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
phone = models.CharField(max_length=12, )
canGetText = models.BooleanField(default=True)
workTitle = models.CharField(max_length=25)
isBusinessOwner = models.BooleanField(default=True)
workIndustry = models.CharField(max_length=25)
yearsInIndustry = models.IntegerField()
linkedinProfile = models.CharField(max_length=128, null=True, blank=True, )
hsGradChoices = (
(1, '1-4'),
(2, '5-10'),
(3, '11 or more'),
(4, 'Have not graduated'),)
yearsSinceHSGraduation = models.IntegerField(choices=hsGradChoices)
collegeLevelChoice = (
(1, "associate"),
(2, "bachelor's"),
(3, "master's"),
(4, "doctoral"),
(5, "none"),)
collegeLevel = models.IntegerField(choices=hsGradChoices)
collegeMajor = models.CharField(max_length=128, null=True, blank=True, )
# skills = TreeForeignKey('self', null=True, blank=True, related_name='children', db_index=True)
skills = models.ForeignKey(Skill, null=True, blank=True, )
# interests = TreeForeignKey('interest-self', null=True, blank=True, related_name='interest-children', db_index=True)
interests = models.ForeignKey(Interest, null=True, blank=True, )
# User_Skill_Map
# User_Interest_Map
class Meta:
verbose_name = 'Volunteer add-ons'
verbose_name_plural = 'Volunteer add-ons'
def __unicode__(self):
return "Volunteer: " + str(self.user)
# return "Volunteer: "
class User_Group_Role_Map(models.Model):
group = models.ForeignKey(Group)
user = models.ForeignKey(User_Add_Ons, on_delete=models.CASCADE)
role = models.CharField(max_length=128)
class Meta:
verbose_name = 'Role'
verbose_name_plural = 'Roles'
def __unicode__(self):
return str(self.group) + ': ' + str(self.user) + '-' + str(self.role)
class Class(models.Model):
school = models.ForeignKey(School, on_delete=models.CASCADE)
lms = models.ForeignKey(LMS, on_delete=models.CASCADE)
teacher = models.ForeignKey(User_Add_Ons, on_delete=models.CASCADE, )
name = models.CharField(max_length=128)
academic_year = models.IntegerField()
semester = models.CharField(max_length=128)
class Meta:
verbose_name = 'FAL Class'
verbose_name_plural = 'FAL Classes'
def __unicode__(self):
return str(self.school) + ':' + str(self.teacher)
class Interview(models.Model):
interviewer = models.ForeignKey(User_Add_Ons, on_delete=models.CASCADE, related_name='interviewer', )
interviewee = models.ForeignKey(User_Add_Ons, on_delete=models.CASCADE, related_name='interviewee', )
group = models.ForeignKey(Group)
date = models.DateTimeField()
def __unicode__(self):
return 'Interview of ' + str(interviewee) + ' by ' + str(interviewer)
class Question(models.Model):
name = models.CharField(max_length=128)
created_by = models.ForeignKey(User_Add_Ons, on_delete=models.CASCADE, )
creation_date = models.DateTimeField()
def __unicode__(self):
return str(self.created_by) + ':' + str(self.name)
class Interview_Question_Map(models.Model):
interview = models.ForeignKey(Interview, on_delete=models.CASCADE, )
question = models.ForeignKey(Question, on_delete=models.CASCADE, )
class Meta:
verbose_name = 'Interview Question'
verbose_name_plural = 'Interview Questions'
def __unicode__(self):
return str(self.question) + ' (' + str(interview) + ')'
class Answer(models.Model):
question = models.ForeignKey(Question, on_delete=models.CASCADE, )
result = models.CharField(max_length=128)
created_by = models.ForeignKey(User_Add_Ons, on_delete=models.CASCADE, )
creation_date = models.DateTimeField()
def __unicode__(self):
return str(self.question)
class Video(models.Model):
# interview = models.ForeignKey(Interview, on_delete=models.CASCADE, null=True, blank=True, )
name = models.CharField(max_length=128)
url = models.CharField(max_length=128)
tags = models.CharField(max_length=128, null=True, blank=True, )
created_by = models.ForeignKey(User_Add_Ons, on_delete=models.CASCADE, )
creation_date = models.DateTimeField(default=datetime.now, blank=True)
def __unicode__(self):
return str(self.name) + ' (' + str(self.creation_date) + ')'
class Question_Video_Map(models.Model):
question = models.ForeignKey(Question, on_delete=models.CASCADE, )
video = models.ForeignKey(Video, on_delete=models.CASCADE, )
class Meta:
verbose_name = 'Video Question'
verbose_name_plural = 'Video Questions'
def __unicode__(self):
return str(self.question) + ':' + str(self.video)
class Interview_Question_Video_Map(models.Model):
interview_question = models.ForeignKey(Question_Video_Map, on_delete=models.CASCADE, )
video = models.ForeignKey(Video, on_delete=models.CASCADE, )
class Meta:
verbose_name = 'Interview Question Video'
verbose_name_plural = 'Interview Video Questions'
def __unicode__(self):
return str(self.interview_question) + '-' + str(self.video)
class Video_Comment(models.Model):
video = models.ForeignKey(Video, on_delete=models.CASCADE, )
comment = models.CharField(max_length=128)
created_by = models.ForeignKey(User_Add_Ons, on_delete=models.CASCADE, )
creation_date = models.DateTimeField()
class Meta:
verbose_name = 'Video Comment'
verbose_name_plural = 'Video Comments'
def __unicode__(self):
return str(self.video) + ' (' + str(created_by) + ', ' + str(creation_date) + ')'
class Assignment(models.Model):
title = models.CharField(max_length=128)
falClass = models.ForeignKey(Class, on_delete=models.CASCADE)
document = models.CharField(max_length=128)
due_date = models.DateTimeField()
creation_date = models.DateTimeField()
def __unicode__(self):
return str(self.title) + ' (' + str(falClass) + ')'
class Assignment_Submission(models.Model):
name = models.CharField(max_length=128)
group = models.ForeignKey(Group)
class Meta:
verbose_name = 'Submission'
verbose_name_plural = 'Submissions'
def __unicode__(self):
return str(self.group) + ':' + str(self.name)
class Submission_Interview_Map(models.Model):
submission = models.ForeignKey(Assignment_Submission, on_delete=models.CASCADE, )
interview = models.ForeignKey(Interview, on_delete=models.CASCADE, )
class Meta:
verbose_name = 'Interview Submission'
verbose_name_plural = 'Interview Submissions'
def __unicode__(self):
return str(self.submission) + ':' + str(self.interview) | {
"repo_name": "foraliving/foraliving",
"path": "foraliving/models.py",
"copies": "1",
"size": "7919",
"license": "mit",
"hash": 8638137085017295000,
"line_mean": 32.2773109244,
"line_max": 118,
"alpha_frac": 0.7258492234,
"autogenerated": false,
"ratio": 3.056348900038595,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9140391450012595,
"avg_score": 0.0283613346852001,
"num_lines": 238
} |
"""Add document_record fields for text and index.
Revision ID: 01e0b8a07445
Revises: 769b20d7a421
Create Date: 2017-05-15 18:27:26.769574
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '01e0b8a07445'
down_revision = '769b20d7a421'
def upgrade():
op.add_column('document_record', sa.Column('index', sa.Integer(), nullable=True))
op.add_column('document_record', sa.Column('text', sa.Unicode(), nullable=True))
op.alter_column('document_record', 'sheet', existing_type=sa.INTEGER(), nullable=True)
op.alter_column('document_record', 'row_id', existing_type=sa.INTEGER(), nullable=True)
op.create_index(op.f('ix_document_record_document_id'), 'document_record', ['document_id'], unique=False)
bind = op.get_bind()
meta = sa.MetaData()
meta.bind = bind
meta.reflect()
records = meta.tables['document_record']
bind.execute(sa.update(records).values(index=records.c.row_id))
pages = meta.tables['document_page']
q = sa.select([pages.c.number, pages.c.text, pages.c.document_id], from_obj=pages)
q = sa.insert(records).from_select([records.c.index, records.c.text, records.c.document_id], q)
bind.execute(q)
def downgrade():
pass
| {
"repo_name": "OpenGazettes/aleph",
"path": "aleph/migrate/versions/01e0b8a07445_add_document_record_fields.py",
"copies": "3",
"size": "1251",
"license": "mit",
"hash": -7420718933525871000,
"line_mean": 33.75,
"line_max": 109,
"alpha_frac": 0.6978417266,
"autogenerated": false,
"ratio": 3.0737100737100738,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5271551800310074,
"avg_score": null,
"num_lines": null
} |
"""add_domain
Revision ID: 39f9df7b5774
Revises: c7267a60262e
Create Date: 2016-04-18 20:20:53.049686
"""
# revision identifiers, used by Alembic.
revision = '39f9df7b5774'
down_revision = 'c7267a60262e'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('domain',
sa.Column('id', mysql.BIGINT(unsigned=True), nullable=False),
sa.Column('created_time', sa.DateTime(), nullable=True),
sa.Column('content', sa.VARCHAR(length=255), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_table('last_domain',
sa.Column('id', mysql.BIGINT(unsigned=True), nullable=False),
sa.Column('created_time', sa.DateTime(), nullable=True),
sa.Column('content', sa.VARCHAR(length=255), nullable=False),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('last_domain')
op.drop_table('domain')
### end Alembic commands ###
| {
"repo_name": "f0x11/Merak",
"path": "migrations/versions/39f9df7b5774_add_domain.py",
"copies": "1",
"size": "1153",
"license": "mit",
"hash": -6714952306813972000,
"line_mean": 27.825,
"line_max": 65,
"alpha_frac": 0.6877710321,
"autogenerated": false,
"ratio": 3.361516034985423,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9496512077142127,
"avg_score": 0.010554997988659259,
"num_lines": 40
} |
"""add domain, page url unique keys
Revision ID: 178c6db7516
Revises: 25abb99fc57
Create Date: 2013-11-14 16:53:36.719799
"""
# revision identifiers, used by Alembic.
revision = '178c6db7516'
down_revision = '25abb99fc57'
import sqlalchemy as sa
from alembic import op
def upgrade():
op.add_column(
'pages',
sa.Column('url_hash', type_=sa.String(128))
)
op.add_column(
'domains',
sa.Column('url_hash', type_=sa.String(128))
)
op.alter_column('domains', 'url', type_=sa.Text)
op.alter_column('pages', 'url', type_=sa.Text)
op.create_unique_constraint("uk_domain_url", "domains", ["url_hash"])
op.create_unique_constraint("uk_page_url", "pages", ["url_hash"])
def downgrade():
op.drop_constraint('uk_domain_url', 'domains', type_='unique')
op.drop_constraint('uk_page_url', 'pages', type_='unique')
op.alter_column('pages', 'url', type_=sa.String(2000))
op.alter_column('domains', 'url', type_=sa.String(2000))
op.drop_column('pages', 'url_hash')
op.drop_column('domains', 'url_hash')
| {
"repo_name": "holmes-app/holmes-api",
"path": "holmes/migrations/versions/178c6db7516_add_domain_page_url_.py",
"copies": "2",
"size": "1081",
"license": "mit",
"hash": 1721110547829733000,
"line_mean": 26.025,
"line_max": 73,
"alpha_frac": 0.6401480111,
"autogenerated": false,
"ratio": 3.0195530726256985,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4659701083725699,
"avg_score": null,
"num_lines": null
} |
"""add draft fields
Revision ID: 3d429503a29a
Revises: 2a11dd14665
Create Date: 2014-08-30 13:26:03.698902
"""
# revision identifiers, used by Alembic.
revision = '3d429503a29a'
down_revision = '2a11dd14665'
import warnings
from alembic import op
import sqlalchemy as sa
from gertty.dbsupport import sqlite_alter_columns, sqlite_drop_columns
def upgrade():
with warnings.catch_warnings():
warnings.simplefilter("ignore")
op.add_column('message', sa.Column('draft', sa.Boolean()))
op.add_column('comment', sa.Column('draft', sa.Boolean()))
op.add_column('approval', sa.Column('draft', sa.Boolean()))
conn = op.get_bind()
conn.execute("update message set draft=pending")
conn.execute("update comment set draft=pending")
conn.execute("update approval set draft=pending")
sqlite_alter_columns('message', [
sa.Column('draft', sa.Boolean(), index=True, nullable=False),
])
sqlite_alter_columns('comment', [
sa.Column('draft', sa.Boolean(), index=True, nullable=False),
])
sqlite_alter_columns('approval', [
sa.Column('draft', sa.Boolean(), index=True, nullable=False),
])
sqlite_drop_columns('comment', ['pending'])
sqlite_drop_columns('approval', ['pending'])
def downgrade():
pass
| {
"repo_name": "aspiers/gertty",
"path": "gertty/alembic/versions/3d429503a29a_add_draft_fields.py",
"copies": "1",
"size": "1315",
"license": "apache-2.0",
"hash": 5262240126648167000,
"line_mean": 25.8367346939,
"line_max": 70,
"alpha_frac": 0.6631178707,
"autogenerated": false,
"ratio": 3.5636856368563685,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.47268035075563686,
"avg_score": null,
"num_lines": null
} |
"""add dtype to descriptors
Revision ID: 4856a6d821b
Revises: None
Create Date: 2018-04-11 18:20:42.412042
"""
# revision identifiers, used by Alembic.
revision = '4856a6d821b'
down_revision = None
from sqlalchemy.orm import sessionmaker, Session as BaseSession, relationship
from app.models import Descriptor
from alembic import op
import sqlalchemy as sa
Session = sessionmaker()
def upgrade():
bind = op.get_bind()
session = Session(bind=bind)
op.create_table('hyperlink_associations',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('resource_id', sa.Integer(), nullable=True),
sa.Column('descriptor_id', sa.Integer(), nullable=True),
sa.Column('url', sa.String(length=250), nullable=True),
sa.ForeignKeyConstraint(['descriptor_id'], ['descriptors.id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(['resource_id'], ['resources.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
# add d column
op.add_column('descriptors', sa.Column('dtype', sa.String(length=15), nullable=True))
for descriptor in session.query(Descriptor):
if (descriptor.values):
descriptor.dtype = "option" #if no values
else:
descriptor.dtype = "text"
session.commit()
print("done; added hyperlink table and dtype column")
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('hyperlink_associations')
op.drop_column('descriptors', 'dtype')
### end Alembic commands ###
| {
"repo_name": "hack4impact/maps4all",
"path": "migrations/versions/4856a6d821b_add_dtype_to_descriptors.py",
"copies": "1",
"size": "1587",
"license": "mit",
"hash": -6842781321868020000,
"line_mean": 29.5192307692,
"line_max": 91,
"alpha_frac": 0.6717076244,
"autogenerated": false,
"ratio": 3.824096385542169,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9947192994114296,
"avg_score": 0.009722203165574452,
"num_lines": 52
} |
"""add duns table
Revision ID: ff4728a82180
Revises: 0bf2ed508f33
Create Date: 2017-07-25 00:12:22.805037
"""
# revision identifiers, used by Alembic.
revision = 'ff4728a82180'
down_revision = '18d9b114c1dc'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_data_broker():
### commands auto generated by Alembic - please adjust! ###
op.create_table('duns',
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('duns_id', sa.Integer(), nullable=False),
sa.Column('awardee_or_recipient_uniqu', sa.Text(), nullable=True),
sa.Column('legal_business_name', sa.Text(), nullable=True),
sa.Column('activation_date', sa.Date(), nullable=True),
sa.Column('deactivation_date', sa.Date(), nullable=True),
sa.Column('expiration_date', sa.Date(), nullable=True),
sa.Column('last_sam_mod_date', sa.Date(), nullable=True),
sa.PrimaryKeyConstraint('duns_id')
)
op.create_index('ix_duns_awardee_or_recipient_uniqu', 'duns', ['awardee_or_recipient_uniqu'], unique=False)
### end Alembic commands ###
def downgrade_data_broker():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('duns')
### end Alembic commands ###
| {
"repo_name": "fedspendingtransparency/data-act-broker-backend",
"path": "dataactcore/migrations/versions/ff4728a82180_add_duns_table.py",
"copies": "1",
"size": "1468",
"license": "cc0-1.0",
"hash": 8792742973223060000,
"line_mean": 27.2307692308,
"line_max": 111,
"alpha_frac": 0.6750681199,
"autogenerated": false,
"ratio": 3.1706263498920086,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.43456944697920086,
"avg_score": null,
"num_lines": null
} |
"""Add duration and type to talks.
Revision ID: b8dc37a1f5
Revises: 9d0181579c
Create Date: 2014-04-16 22:54:41.449111
"""
# revision identifiers, used by Alembic.
revision = 'b8dc37a1f5'
down_revision = '9d0181579c'
from alembic import op
import sqlalchemy as sa
def upgrade():
context = op.get_context()
if context.bind.dialect.name == 'postgresql':
sql = "CREATE TYPE duration AS ENUM ('{}', '{}', '{}', '{}', '{}')"
op.execute(sql.format('30 minutes', '45 minutes', '60 minutes', '1/2 day', 'full day'))
sql = "CREATE TYPE type AS ENUM ('{}', '{}')"
op.execute(sql.format('talk', 'tutorial'))
### commands auto generated by Alembic - please adjust! ###
op.add_column('talks', sa.Column('duration', sa.Enum('30 minutes', '45 minutes', '60 minutes', '1/2 day', 'full day', name='duration'), nullable=False))
op.add_column('talks', sa.Column('type', sa.Enum('talk', 'tutorial', name='type'), nullable=False))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('talks', 'type')
op.drop_column('talks', 'duration')
### end Alembic commands ###
context = op.get_context()
if context.bind.dialect.name == 'postgresql':
op.execute('DROP TYPE duration')
op.execute('DROP TYPE type')
| {
"repo_name": "PyGotham/pygotham",
"path": "migrations/versions/b8dc37a1f5_.py",
"copies": "3",
"size": "1354",
"license": "bsd-3-clause",
"hash": 4343955565948844000,
"line_mean": 32.85,
"line_max": 156,
"alpha_frac": 0.6270310192,
"autogenerated": false,
"ratio": 3.359801488833747,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5486832508033748,
"avg_score": null,
"num_lines": null
} |
"""add dvehr sharing
Revision ID: 0a4ccc37472a
Revises: ebaea6f9f6a9
Create Date: 2018-07-09 12:07:31.313006
"""
import model.utils
import sqlalchemy as sa
from alembic import op
from rdr_service.participant_enums import QuestionnaireStatus
# revision identifiers, used by Alembic.
revision = "0a4ccc37472a"
down_revision = "ebaea6f9f6a9"
branch_labels = None
depends_on = None
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column(
"participant_summary",
sa.Column(
"consent_for_dv_electronic_health_records_sharing", model.utils.Enum(QuestionnaireStatus), nullable=True
),
)
op.add_column(
"participant_summary",
sa.Column("consent_for_dv_electronic_health_records_sharing_time", model.utils.UTCDateTime(), nullable=True),
)
# ### end Alembic commands ###
def downgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column("participant_summary", "consent_for_dv_electronic_health_records_sharing_time")
op.drop_column("participant_summary", "consent_for_dv_electronic_health_records_sharing")
# ### end Alembic commands ###
def upgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
| {
"repo_name": "all-of-us/raw-data-repository",
"path": "rdr_service/alembic/versions/0a4ccc37472a_add_dvehr_sharing.py",
"copies": "1",
"size": "1618",
"license": "bsd-3-clause",
"hash": 1877012135034824400,
"line_mean": 25.9666666667,
"line_max": 117,
"alpha_frac": 0.6767614339,
"autogenerated": false,
"ratio": 3.4870689655172415,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.46638303994172414,
"avg_score": null,
"num_lines": null
} |
"""add ebooks_download tables
Revision ID: 6fb37bbda661
Revises: f74bfed7ecdb
Create Date: 2017-05-26 09:52:39.877076
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = '6fb37bbda661'
down_revision = 'f74bfed7ecdb'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('ebooks_download',
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('ebooks_id', sa.Integer(), nullable=True),
sa.Column('download_time', sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(['ebooks_id'], ['ebooks.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], )
)
op.drop_table('test')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('test',
sa.Column('id', mysql.INTEGER(display_width=11), nullable=False),
sa.PrimaryKeyConstraint('id'),
mysql_collate=u'utf8mb4_unicode_ci',
mysql_default_charset=u'utf8mb4',
mysql_engine=u'InnoDB'
)
op.drop_table('ebooks_download')
# ### end Alembic commands ###
| {
"repo_name": "zhangmingkai4315/flask-bms",
"path": "migrations/versions/6fb37bbda661_add_ebooks_download_tables.py",
"copies": "1",
"size": "1230",
"license": "mit",
"hash": 8682305177005080000,
"line_mean": 28.2857142857,
"line_max": 69,
"alpha_frac": 0.6756097561,
"autogenerated": false,
"ratio": 3.271276595744681,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.942112537260285,
"avg_score": 0.005152195848366244,
"num_lines": 42
} |
"""Added a bunch of default values to the User table
Revision ID: 354eb71928d
Revises: 2428871e01d
Create Date: 2015-11-05 11:30:36.533431
"""
# revision identifiers, used by Alembic.
revision = '354eb71928d'
down_revision = '2428871e01d'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.alter_column('tb_user', 'banned',
existing_type=mysql.TINYINT(display_width=1),
nullable=False)
op.alter_column('tb_user', 'ignored',
existing_type=mysql.TINYINT(display_width=1),
nullable=False)
op.alter_column('tb_user', 'level',
existing_type=mysql.INTEGER(display_width=11),
nullable=False)
op.alter_column('tb_user', 'minutes_in_chat_offline',
existing_type=mysql.INTEGER(display_width=11),
nullable=False)
op.alter_column('tb_user', 'minutes_in_chat_online',
existing_type=mysql.INTEGER(display_width=11),
nullable=False)
op.alter_column('tb_user', 'num_lines',
existing_type=mysql.INTEGER(display_width=11),
nullable=False)
op.alter_column('tb_user', 'points',
existing_type=mysql.INTEGER(display_width=11),
nullable=False)
op.alter_column('tb_user', 'subscriber',
existing_type=mysql.TINYINT(display_width=1),
nullable=False)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.alter_column('tb_user', 'subscriber',
existing_type=mysql.TINYINT(display_width=1),
nullable=True)
op.alter_column('tb_user', 'points',
existing_type=mysql.INTEGER(display_width=11),
nullable=True)
op.alter_column('tb_user', 'num_lines',
existing_type=mysql.INTEGER(display_width=11),
nullable=True)
op.alter_column('tb_user', 'minutes_in_chat_online',
existing_type=mysql.INTEGER(display_width=11),
nullable=True)
op.alter_column('tb_user', 'minutes_in_chat_offline',
existing_type=mysql.INTEGER(display_width=11),
nullable=True)
op.alter_column('tb_user', 'level',
existing_type=mysql.INTEGER(display_width=11),
nullable=True)
op.alter_column('tb_user', 'ignored',
existing_type=mysql.TINYINT(display_width=1),
nullable=True)
op.alter_column('tb_user', 'banned',
existing_type=mysql.TINYINT(display_width=1),
nullable=True)
### end Alembic commands ###
| {
"repo_name": "gigglearrows/anniesbot",
"path": "alembic/versions/354eb71928d_added_a_bunch_of_default_values_to_the_.py",
"copies": "1",
"size": "2806",
"license": "mit",
"hash": 3150156350735870500,
"line_mean": 36.9189189189,
"line_max": 63,
"alpha_frac": 0.6037063435,
"autogenerated": false,
"ratio": 3.731382978723404,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4835089322223404,
"avg_score": null,
"num_lines": null
} |
"""Added achievementcategories
Revision ID: 42ab7edc19e2
Revises:
Create Date: 2015-03-31 13:57:22.570668
"""
# revision identifiers, used by Alembic.
revision = '42ab7edc19e2'
down_revision = None
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('achievementcategories',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.add_column(u'achievements', sa.Column('achievementcategory_id', sa.Integer(), nullable=True))
op.create_foreign_key(None, 'achievements', 'achievementcategories', ['achievementcategory_id'], ['id'])
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, 'achievements', type_='foreignkey')
op.drop_column(u'achievements', 'achievementcategory_id')
op.drop_table('achievementcategories')
### end Alembic commands ###
| {
"repo_name": "ArneBab/gamification-engine",
"path": "gengine/alembic/versions/42ab7edc19e2_added_achievementcategories.py",
"copies": "1",
"size": "1097",
"license": "mit",
"hash": 3948123789930713600,
"line_mean": 29.4722222222,
"line_max": 108,
"alpha_frac": 0.7028258888,
"autogenerated": false,
"ratio": 3.3042168674698793,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4507042756269879,
"avg_score": null,
"num_lines": null
} |
"""Added aliases
Revision ID: 4effbf7f3fe7
Revises: 42918bb1dffe
Create Date: 2014-12-30 11:21:47.379369
"""
# revision identifiers, used by Alembic.
revision = '4effbf7f3fe7'
down_revision = '42918bb1dffe'
from alembic import op
import sqlalchemy as sa
def upgrade(engine_name):
eval("upgrade_%s" % engine_name)()
def downgrade(engine_name):
eval("downgrade_%s" % engine_name)()
def upgrade_engine1():
### commands auto generated by Alembic - please adjust! ###
op.create_table('aliases',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=True),
sa.Column('region_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['region_id'], ['regions.id'], ),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
def downgrade_engine1():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('aliases')
### end Alembic commands ###
def upgrade_engine2():
### commands auto generated by Alembic - please adjust! ###
op.create_table('aliases',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=True),
sa.Column('region_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['region_id'], ['regions.id'], ),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
def downgrade_engine2():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('aliases')
### end Alembic commands ###
def upgrade_engine3():
### commands auto generated by Alembic - please adjust! ###
op.create_table('aliases',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=True),
sa.Column('region_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['region_id'], ['regions.id'], ),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
def downgrade_engine3():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('aliases')
### end Alembic commands ###
| {
"repo_name": "atiaxi/chromabot",
"path": "alembic/versions/4effbf7f3fe7_added_aliases.py",
"copies": "1",
"size": "2131",
"license": "mit",
"hash": -7485807018852001000,
"line_mean": 25.6375,
"line_max": 63,
"alpha_frac": 0.650868137,
"autogenerated": false,
"ratio": 3.593591905564924,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4744460042564924,
"avg_score": null,
"num_lines": null
} |
"""Added a new table for storing data about commands
Revision ID: 5393671cca7
Revises: 496dba8300a
Create Date: 2015-12-13 03:41:30.735949
"""
# revision identifiers, used by Alembic.
revision = '5393671cca7'
down_revision = '496dba8300a'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
Session = sessionmaker()
Base = declarative_base()
class CommandData(Base):
__tablename__ = 'tb_command_data'
command_id = sa.Column(sa.Integer, sa.ForeignKey('tb_command.id'), primary_key=True, autoincrement=False)
num_uses = sa.Column(sa.Integer, nullable=False)
class Command(Base):
__tablename__ = 'tb_command'
id = sa.Column(sa.Integer, primary_key=True)
level = sa.Column(sa.Integer, nullable=False, default=100)
action_json = sa.Column('action', sa.TEXT)
extra_extra_args = sa.Column('extra_args', sa.TEXT)
command = sa.Column(sa.TEXT, nullable=False)
description = sa.Column(sa.TEXT, nullable=True)
delay_all = sa.Column(sa.Integer, nullable=False, default=5)
delay_user = sa.Column(sa.Integer, nullable=False, default=15)
enabled = sa.Column(sa.Boolean, nullable=False, default=True)
num_uses = sa.Column(sa.Integer, nullable=False, default=0)
cost = sa.Column(sa.Integer, nullable=False, default=0)
can_execute_with_whisper = sa.Column(sa.Boolean)
sub_only = sa.Column(sa.Boolean, nullable=False, default=False)
mod_only = sa.Column(sa.Boolean, nullable=False, default=False)
def upgrade():
bind = op.get_bind()
session = Session(bind=bind)
op.create_table('tb_command_data',
sa.Column('command_id', sa.Integer(), autoincrement=False, nullable=False),
sa.Column('num_uses', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['command_id'], ['tb_command.id'], ),
sa.PrimaryKeyConstraint('command_id'))
for command in session.query(Command):
data = CommandData()
data.command_id = command.id
data.num_uses = command.num_uses
session.add(data)
session.commit()
def downgrade():
bind = op.get_bind()
session = Session(bind=bind)
for data in session.query(CommandData):
command = session.query(Command).filter_by(id=data.command_id).one()
command.num_uses = data.num_uses
session.add(data)
session.commit()
op.drop_table('tb_command_data')
| {
"repo_name": "gigglearrows/anniesbot",
"path": "alembic/versions/5393671cca7_added_a_new_table_for_storing_data_.py",
"copies": "1",
"size": "2508",
"license": "mit",
"hash": -1198257723287156500,
"line_mean": 30.35,
"line_max": 109,
"alpha_frac": 0.6834130781,
"autogenerated": false,
"ratio": 3.3846153846153846,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.45680284627153844,
"avg_score": null,
"num_lines": null
} |
"""Added a table for storing data about pleblist songs.
Revision ID: 1daf721dac
Revises: 204b3e5a69e
Create Date: 2015-12-07 02:45:41.058891
"""
# revision identifiers, used by Alembic.
revision = '1daf721dac'
down_revision = '204b3e5a69e'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('tb_pleblist_song_info',
sa.Column('pleblist_song_youtube_id', sa.String(length=64, collation='utf8mb4_bin'), autoincrement=False, nullable=False),
sa.Column('title', sa.String(length=128), nullable=False),
sa.Column('duration', sa.Integer(), nullable=False),
sa.Column('default_thumbnail', sa.String(length=256), nullable=False),
sa.ForeignKeyConstraint(['pleblist_song_youtube_id'], ['tb_pleblist_song.youtube_id'], ),
sa.PrimaryKeyConstraint('pleblist_song_youtube_id')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('tb_pleblist_song_info')
### end Alembic commands ###
| {
"repo_name": "gigglearrows/anniesbot",
"path": "alembic/versions/1daf721dac_added_a_table_for_storing_data_about_.py",
"copies": "1",
"size": "1126",
"license": "mit",
"hash": 5428867519057270000,
"line_mean": 31.1714285714,
"line_max": 126,
"alpha_frac": 0.7042628774,
"autogenerated": false,
"ratio": 3.244956772334294,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4449219649734294,
"avg_score": null,
"num_lines": null
} |
"""Added a table for timed commands
Revision ID: 4db5dc4bc98
Revises: 514f4b9bc74
Create Date: 2015-12-23 00:00:59.156496
"""
# revision identifiers, used by Alembic.
revision = '4db5dc4bc98'
down_revision = '514f4b9bc74'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('tb_timer',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=256), nullable=False),
sa.Column('action', mysql.TEXT(), nullable=False),
sa.Column('interval_online', sa.Integer(), nullable=False),
sa.Column('interval_offline', sa.Integer(), nullable=False),
sa.Column('enabled', sa.Boolean(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('tb_timer')
### end Alembic commands ###
| {
"repo_name": "gigglearrows/anniesbot",
"path": "alembic/versions/4db5dc4bc98_added_a_table_for_timed_commands.py",
"copies": "1",
"size": "1034",
"license": "mit",
"hash": 2927679593078413300,
"line_mean": 27.7222222222,
"line_max": 64,
"alpha_frac": 0.6876208897,
"autogenerated": false,
"ratio": 3.4125412541254128,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4600162143825413,
"avg_score": null,
"num_lines": null
} |
"""added authored fields
Revision ID: 076625bffafe
Revises: 19f6a5e27c4f
Create Date: 2019-04-26 10:59:47.626266
"""
import model.utils
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "076625bffafe"
down_revision = "19f6a5e27c4f"
branch_labels = None
depends_on = None
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column(
"participant_summary", sa.Column("consent_for_cabor_authored", model.utils.UTCDateTime(), nullable=True)
)
op.add_column(
"participant_summary",
sa.Column(
"consent_for_dv_electronic_health_records_sharing_authored", model.utils.UTCDateTime(), nullable=True
),
)
op.add_column(
"participant_summary",
sa.Column("consent_for_electronic_health_records_authored", model.utils.UTCDateTime(), nullable=True),
)
op.add_column(
"participant_summary",
sa.Column("consent_for_study_enrollment_authored", model.utils.UTCDateTime(), nullable=True),
)
op.add_column(
"participant_summary",
sa.Column("questionnaire_on_family_health_authored", model.utils.UTCDateTime(), nullable=True),
)
op.add_column(
"participant_summary",
sa.Column("questionnaire_on_healthcare_access_authored", model.utils.UTCDateTime(), nullable=True),
)
op.add_column(
"participant_summary",
sa.Column("questionnaire_on_lifestyle_authored", model.utils.UTCDateTime(), nullable=True),
)
op.add_column(
"participant_summary",
sa.Column("questionnaire_on_medical_history_authored", model.utils.UTCDateTime(), nullable=True),
)
op.add_column(
"participant_summary",
sa.Column("questionnaire_on_medications_authored", model.utils.UTCDateTime(), nullable=True),
)
op.add_column(
"participant_summary",
sa.Column("questionnaire_on_overall_health_authored", model.utils.UTCDateTime(), nullable=True),
)
op.add_column(
"participant_summary",
sa.Column("questionnaire_on_the_basics_authored", model.utils.UTCDateTime(), nullable=True),
)
# ### end Alembic commands ###
def downgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column("participant_summary", "questionnaire_on_the_basics_authored")
op.drop_column("participant_summary", "questionnaire_on_overall_health_authored")
op.drop_column("participant_summary", "questionnaire_on_medications_authored")
op.drop_column("participant_summary", "questionnaire_on_medical_history_authored")
op.drop_column("participant_summary", "questionnaire_on_lifestyle_authored")
op.drop_column("participant_summary", "questionnaire_on_healthcare_access_authored")
op.drop_column("participant_summary", "questionnaire_on_family_health_authored")
op.drop_column("participant_summary", "consent_for_study_enrollment_authored")
op.drop_column("participant_summary", "consent_for_electronic_health_records_authored")
op.drop_column("participant_summary", "consent_for_dv_electronic_health_records_sharing_authored")
op.drop_column("participant_summary", "consent_for_cabor_authored")
# ### end Alembic commands ###
def upgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
| {
"repo_name": "all-of-us/raw-data-repository",
"path": "rdr_service/alembic/versions/076625bffafe_added_authored_fields.py",
"copies": "1",
"size": "3713",
"license": "bsd-3-clause",
"hash": 2323911118403517400,
"line_mean": 35.4019607843,
"line_max": 113,
"alpha_frac": 0.6840829518,
"autogenerated": false,
"ratio": 3.4995287464655984,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.46836116982655984,
"avg_score": null,
"num_lines": null
} |
"""Added auth-related fields to User
Revision ID: 452902fab185
Revises: 501404b36cef
Create Date: 2013-09-24 12:42:04.461000
"""
# revision identifiers, used by Alembic.
revision = '452902fab185'
down_revision = '501404b36cef'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('Users', sa.Column('auth_data', sa.Unicode(length=255), nullable=True))
op.add_column('Users', sa.Column('auth_system', sa.Unicode(length=20), nullable=True))
### end Alembic commands ###
op.execute("INSERT INTO Users (login, name, auth_system, auth_data, password) VALUES ('testuser', 'Test User', 'userpass', 'password', 'password')")
op.execute("INSERT INTO Users (login, name, auth_system, auth_data, password) VALUES ('testuser2', 'Second Test User', 'userpass', 'password', 'password')")
def downgrade():
pass
### commands auto generated by Alembic - please adjust! ###
#op.drop_column('Users', 'auth_system')
#op.drop_column('Users', 'auth_data')
### end Alembic commands ###
| {
"repo_name": "go-lab/appcomposer",
"path": "alembic/versions/452902fab185_added_auth_related_f.py",
"copies": "3",
"size": "1130",
"license": "bsd-2-clause",
"hash": 4240878748139738600,
"line_mean": 21.1568627451,
"line_max": 160,
"alpha_frac": 0.6672566372,
"autogenerated": false,
"ratio": 3.1920903954802258,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.01415929203539823,
"num_lines": 1
} |
"""Added autoapprove column for oauth client.
Revision ID: 776b0d9121b7
Revises: da44c279ce0a
Create Date: 2018-05-07 21:17:21.013289
"""
import sqlalchemy as sa
from alembic import op
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, relationship
# revision identifiers, used by Alembic.
revision = '776b0d9121b7'
down_revision = 'c008583e8f8d'
Base = declarative_base()
db = sa
db.Model = Base
db.relationship = relationship
def create_session():
connection = op.get_bind()
session_maker = sa.orm.sessionmaker()
session = session_maker(bind=connection)
db.session = session
def upgrade():
create_session()
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('oauth_client',
sa.Column('auto_approve', sa.Boolean(), nullable=False))
# ### end Alembic commands ###
def downgrade():
create_session()
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('oauth_client', 'auto_approve')
# ### end Alembic commands ###
# vim: ft=python
| {
"repo_name": "viaict/viaduct",
"path": "migrations/versions/2018_05_07_776b0d9121b7_added_autoapprove_column_for_oauth_.py",
"copies": "1",
"size": "1105",
"license": "mit",
"hash": 7833732676537782000,
"line_mean": 23.0217391304,
"line_max": 74,
"alpha_frac": 0.6941176471,
"autogenerated": false,
"ratio": 3.5191082802547773,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4713225927354777,
"avg_score": null,
"num_lines": null
} |
"""Added bank account details.
Revision ID: 0c0c2110ae8b
Revises: bc1d6194ff58
Create Date: 2018-08-21 19:22:52.113441
"""
# revision identifiers, used by Alembic.
revision = '0c0c2110ae8b'
down_revision = 'bc1d6194ff58'
from alembic import op
import sqlalchemy as sa
import sqlalchemy_utils
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('bank_accounts',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('bank', sqlalchemy_utils.types.encrypted.encrypted_type.EncryptedType(), nullable=True),
sa.Column('clearing', sqlalchemy_utils.types.encrypted.encrypted_type.EncryptedType(), nullable=True),
sa.Column('_number', sqlalchemy_utils.types.encrypted.encrypted_type.EncryptedType(), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('bank_accounts')
# ### end Alembic commands ###
| {
"repo_name": "Limpan/bytardag",
"path": "web/migrations/versions/0c0c2110ae8b_added_bank_account_details.py",
"copies": "1",
"size": "1126",
"license": "mit",
"hash": 1617150958898217700,
"line_mean": 31.1714285714,
"line_max": 106,
"alpha_frac": 0.7007104796,
"autogenerated": false,
"ratio": 3.4753086419753085,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4676019121575309,
"avg_score": null,
"num_lines": null
} |
"""Added BaseSchedule, Deviation, Schedule, Workday, WorkPeriod.
Revision ID: 196516f3284c
Revises: 7cf47cd2a016
Create Date: 2016-04-27 10:48:10.946882
"""
# revision identifiers, used by Alembic.
revision = '196516f3284c'
down_revision = '7cf47cd2a016'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('work_periods',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('start', sa.Date(), nullable=True),
sa.Column('end', sa.Date(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('workdays',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('index', sa.Integer(), nullable=True),
sa.Column('start', sa.Time(), nullable=True),
sa.Column('lunch_start', sa.Time(), nullable=True),
sa.Column('lunch_end', sa.Time(), nullable=True),
sa.Column('end', sa.Time(), nullable=True),
sa.Column('base_schedule_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['base_schedule_id'], ['roles.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('schedules',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('approved', sa.Boolean(), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('work_period_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
sa.ForeignKeyConstraint(['work_period_id'], ['work_periods.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('base_schedules',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('schedule_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['schedule_id'], ['schedules.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('deviations',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('date', sa.Date(), nullable=True),
sa.Column('start', sa.Time(), nullable=True),
sa.Column('lunch_start', sa.Time(), nullable=True),
sa.Column('lunch_end', sa.Time(), nullable=True),
sa.Column('end', sa.Time(), nullable=True),
sa.Column('schedule_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['schedule_id'], ['schedules.id'], ),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('deviations')
op.drop_table('base_schedules')
op.drop_table('schedules')
op.drop_table('workdays')
op.drop_table('work_periods')
### end Alembic commands ###
| {
"repo_name": "teknik-eksjo/chronos",
"path": "web/migrations/versions/196516f3284c_added_baseschedule_deviation_schedule_.py",
"copies": "1",
"size": "2627",
"license": "mit",
"hash": 7212505049324033000,
"line_mean": 35.4861111111,
"line_max": 71,
"alpha_frac": 0.6509326228,
"autogenerated": false,
"ratio": 3.380952380952381,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9473921364313405,
"avg_score": 0.011592727887795461,
"num_lines": 72
} |
"""Added Buffs
Revision ID: 2de0d2488523
Revises: 5274feda1fa6
Create Date: 2013-08-06 20:25:42.194935
"""
# revision identifiers, used by Alembic.
revision = '2de0d2488523'
down_revision = '5274feda1fa6'
from alembic import op
import sqlalchemy as sa
def upgrade(engine_name):
eval("upgrade_%s" % engine_name)()
def downgrade(engine_name):
eval("downgrade_%s" % engine_name)()
def upgrade_engine1():
### commands auto generated by Alembic - please adjust! ###
op.create_table('buffs',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(), nullable=True),
sa.Column('internal', sa.String(), nullable=True),
sa.Column('value', sa.Float(), nullable=True),
sa.Column('expires', sa.Integer(), nullable=True),
sa.Column('skirmish_id', sa.Integer(), nullable=True),
sa.Column('region_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['region_id'], ['regions.id'], ),
sa.ForeignKeyConstraint(['skirmish_id'], ['skirmish_actions.id'], ),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
def downgrade_engine1():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('buffs')
### end Alembic commands ###
def upgrade_engine2():
### commands auto generated by Alembic - please adjust! ###
op.create_table('buffs',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(), nullable=True),
sa.Column('internal', sa.String(), nullable=True),
sa.Column('value', sa.Float(), nullable=True),
sa.Column('expires', sa.Integer(), nullable=True),
sa.Column('skirmish_id', sa.Integer(), nullable=True),
sa.Column('region_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['region_id'], ['regions.id'], ),
sa.ForeignKeyConstraint(['skirmish_id'], ['skirmish_actions.id'], ),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
def downgrade_engine2():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('buffs')
### end Alembic commands ###
def upgrade_engine3():
### commands auto generated by Alembic - please adjust! ###
op.create_table('buffs',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(), nullable=True),
sa.Column('internal', sa.String(), nullable=True),
sa.Column('value', sa.Float(), nullable=True),
sa.Column('expires', sa.Integer(), nullable=True),
sa.Column('skirmish_id', sa.Integer(), nullable=True),
sa.Column('region_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['region_id'], ['regions.id'], ),
sa.ForeignKeyConstraint(['skirmish_id'], ['skirmish_actions.id'], ),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
def downgrade_engine3():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('buffs')
### end Alembic commands ###
| {
"repo_name": "atiaxi/chromabot",
"path": "alembic/versions/2de0d2488523_added_buffs.py",
"copies": "1",
"size": "2966",
"license": "mit",
"hash": 2597343590540914700,
"line_mean": 30.2210526316,
"line_max": 72,
"alpha_frac": 0.6480107889,
"autogenerated": false,
"ratio": 3.4609101516919485,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.46089209405919485,
"avg_score": null,
"num_lines": null
} |
"""added buildmessage and bazeltargetmessage tables
Revision ID: 1164433ae5c9
Revises: 181adec926e2
Create Date: 2016-09-27 18:49:14.886600
"""
# revision identifiers, used by Alembic.
revision = '1164433ae5c9'
down_revision = '181adec926e2'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table('buildmessage',
sa.Column('id', sa.GUID(), nullable=False),
sa.Column('build_id', sa.GUID(), nullable=False),
sa.Column('text', sa.Text(), nullable=False),
sa.Column('date_created', sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(['build_id'], ['build.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_table('bazeltargetmessage',
sa.Column('id', sa.GUID(), nullable=False),
sa.Column('target_id', sa.GUID(), nullable=False),
sa.Column('text', sa.Text(), nullable=False),
sa.Column('date_created', sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(['target_id'], ['bazeltarget.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
def downgrade():
op.drop_table('bazeltargetmessage')
op.drop_table('buildmessage')
| {
"repo_name": "dropbox/changes",
"path": "migrations/versions/1164433ae5c9_added_buildmessage_and_.py",
"copies": "1",
"size": "1199",
"license": "apache-2.0",
"hash": -2994721776671812,
"line_mean": 30.5526315789,
"line_max": 87,
"alpha_frac": 0.658882402,
"autogenerated": false,
"ratio": 3.47536231884058,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9614861065396665,
"avg_score": 0.003876731088782958,
"num_lines": 38
} |
"""Added Bundles and Messages tables as well as new relationships
Revision ID: 2d3d6675101a
Revises: 182eb89ec642
Create Date: 2014-11-19 16:54:49.694432
"""
# revision identifiers, used by Alembic.
import json
import traceback
from sqlalchemy import text
revision = '2d3d6675101a'
down_revision = '182eb89ec642'
from alembic import op
import sqlalchemy as sa
def upgrade():
# ## commands auto generated by Alembic - please adjust! ###
op.create_table('Bundles',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('lang', sa.Unicode(length=15), nullable=True),
sa.Column('target', sa.Unicode(length=30), nullable=True),
sa.Column('app_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['app_id'], ['Apps.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('Messages',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('key', sa.Unicode(length=250), index = True),
sa.Column('value', sa.Text(), nullable=True),
sa.Column('bundle_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['bundle_id'], ['Bundles.id'], ),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
metadata = sa.MetaData()
messages_table = sa.Table('Messages', metadata,
sa.Column('key', sa.Unicode(250)),
sa.Column('value', sa.Text()),
sa.Column('bundle_id', sa.Integer()),
)
# To migrate we need to extract the data from the bundles of translator apps and to
# create the Message and Bundle objects.
connection = op.get_bind()
approws = connection.execute("SELECT * FROM Apps WHERE composer='translate'")
for pos, approw in enumerate(approws):
if pos % 10 == 0 and pos > 0:
print " %s..." % pos
continue
try:
data = json.loads(approw["data"])
app_id = approw["id"]
bundles = data["bundles"]
for bundle in data["bundles"]:
l, terr, group = bundle.split("_")
messages = data["bundles"][bundle]["messages"]
# Create the new Bundle object.
lang = "%s_%s" % (l, terr)
result = connection.execute(text("INSERT INTO Bundles (lang, target, app_id) VALUES (:lang, :target, :app_id)"), lang=lang,
target=group, app_id=app_id)
bundle_id = result.lastrowid
# For each message in the bundle, create a Message object and link it to the bundle we just created.
for key, value in messages.items():
insertion = messages_table.insert().values(key = key, value = value, bundle_id = bundle_id)
op.execute(insertion)
# result = connection.execute(text(u"INSERT INTO Messages (`key`, `value`, bundle_id) VALUES (:key, :value, :bundle_id)"),
# key=key, value=value, bundle_id=bundle_id)
# Delete the bundles.
del data["bundles"]
# Save the changes.
datastr = json.dumps(data)
result = connection.execute(text(u"UPDATE Apps SET `data` = :data WHERE id = :app_id"), data=datastr, app_id=app_id)
except:
traceback.print_exc()
print "Exception on an app: %r" % approw["id"]
raise
# TODO: Remove dependencies on data urls.
print "[done]"
def downgrade():
# ## commands auto generated by Alembic - please adjust! ###
op.drop_table('Messages')
op.drop_table('Bundles')
### end Alembic commands ###
| {
"repo_name": "go-lab/appcomposer",
"path": "alembic/versions/2d3d6675101a_added_bundles_and_me.py",
"copies": "3",
"size": "3824",
"license": "bsd-2-clause",
"hash": -6821609438736941000,
"line_mean": 37.24,
"line_max": 142,
"alpha_frac": 0.5557008368,
"autogenerated": false,
"ratio": 4.076759061833688,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00738312826967711,
"num_lines": 100
} |
"""added business categories field
Revision ID: 7ac94529d32e
Revises: f6a9c7e6694b
Create Date: 2018-04-09 16:02:50.367204
"""
# revision identifiers, used by Alembic.
revision = '7ac94529d32e'
down_revision = 'f6a9c7e6694b'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_data_broker():
### commands auto generated by Alembic - please adjust! ###
op.add_column('detached_award_procurement', sa.Column('business_categories', sa.ARRAY(sa.Text()), nullable=True))
op.add_column('published_award_financial_assistance', sa.Column('business_categories', sa.ARRAY(sa.Text()), nullable=True))
### end Alembic commands ###
def downgrade_data_broker():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('published_award_financial_assistance', 'business_categories')
op.drop_column('detached_award_procurement', 'business_categories')
### end Alembic commands ###
| {
"repo_name": "fedspendingtransparency/data-act-broker-backend",
"path": "dataactcore/migrations/versions/7ac94529d32e_added_business_categories_field.py",
"copies": "1",
"size": "1121",
"license": "cc0-1.0",
"hash": -4442248530101685000,
"line_mean": 25.6904761905,
"line_max": 127,
"alpha_frac": 0.7109723461,
"autogenerated": false,
"ratio": 3.3264094955489614,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9389462767434469,
"avg_score": 0.029583814842898463,
"num_lines": 42
} |
#### Added by Ranth for High Level Expansion
from toee import *
from utilities import *
from co8 import *
from combat_standard_routines import *
def san_dying( attachee, triggerer ):
if should_modify_CR( attachee ):
modify_CR( attachee, get_av_level() )
if (attachee.name == 8737):
destroy_gear( attachee, triggerer )
game.global_vars[511] = game.global_vars[511] + 1
if (game.global_vars[511] >= 24 and game.global_flags[501] == 1):
game.global_flags[511] = 1
if (game.global_flags[511] == 1 and game.global_flags[512] == 1 and game.global_flags[513] == 1 and game.global_flags[514] == 1 and game.global_flags[515] == 1 and game.global_flags[516] == 1 and game.global_flags[517] == 1 and game.global_flags[518] == 1 and game.global_flags[519] == 1 and game.global_flags[520] == 1 and game.global_flags[521] == 1 and game.global_flags[522] == 1):
game.quests[97].state = qs_completed
game.party[0].reputation_add( 52 )
game.global_vars[501] = 7
else:
game.sound( 4132, 2 )
if (attachee.name == 8738):
destroy_gear( attachee, triggerer )
game.global_vars[512] = game.global_vars[512] + 1
if (game.global_vars[512] >= 24):
game.global_flags[512] = 1
if (game.global_flags[511] == 1 and game.global_flags[512] == 1 and game.global_flags[513] == 1 and game.global_flags[514] == 1 and game.global_flags[515] == 1 and game.global_flags[516] == 1 and game.global_flags[517] == 1 and game.global_flags[518] == 1 and game.global_flags[519] == 1 and game.global_flags[520] == 1 and game.global_flags[521] == 1 and game.global_flags[522] == 1):
game.quests[97].state = qs_completed
game.party[0].reputation_add( 52 )
game.global_vars[501] = 7
else:
game.sound( 4132, 2 )
if (attachee.name == 8739):
destroy_gear( attachee, triggerer )
game.global_vars[513] = game.global_vars[513] + 1
if (game.global_vars[513] >= 24):
game.global_flags[513] = 1
if (game.global_flags[511] == 1 and game.global_flags[512] == 1 and game.global_flags[513] == 1 and game.global_flags[514] == 1 and game.global_flags[515] == 1 and game.global_flags[516] == 1 and game.global_flags[517] == 1 and game.global_flags[518] == 1 and game.global_flags[519] == 1 and game.global_flags[520] == 1 and game.global_flags[521] == 1 and game.global_flags[522] == 1):
game.quests[97].state = qs_completed
game.party[0].reputation_add( 52 )
game.global_vars[501] = 7
else:
game.sound( 4132, 2 )
if (attachee.name == 8740):
destroy_gear( attachee, triggerer )
game.global_vars[514] = game.global_vars[514] + 1
if (game.global_vars[514] >= 24):
game.global_flags[514] = 1
if (game.global_flags[511] == 1 and game.global_flags[512] == 1 and game.global_flags[513] == 1 and game.global_flags[514] == 1 and game.global_flags[515] == 1 and game.global_flags[516] == 1 and game.global_flags[517] == 1 and game.global_flags[518] == 1 and game.global_flags[519] == 1 and game.global_flags[520] == 1 and game.global_flags[521] == 1 and game.global_flags[522] == 1):
game.quests[97].state = qs_completed
game.party[0].reputation_add( 52 )
game.global_vars[501] = 7
else:
game.sound( 4132, 2 )
if (attachee.name == 8741):
destroy_gear( attachee, triggerer )
game.global_vars[515] = game.global_vars[515] + 1
if (game.global_vars[515] >= 24):
game.global_flags[515] = 1
if (game.global_flags[511] == 1 and game.global_flags[512] == 1 and game.global_flags[513] == 1 and game.global_flags[514] == 1 and game.global_flags[515] == 1 and game.global_flags[516] == 1 and game.global_flags[517] == 1 and game.global_flags[518] == 1 and game.global_flags[519] == 1 and game.global_flags[520] == 1 and game.global_flags[521] == 1 and game.global_flags[522] == 1):
game.quests[97].state = qs_completed
game.party[0].reputation_add( 52 )
game.global_vars[501] = 7
else:
game.sound( 4132, 2 )
if (attachee.name == 8742):
destroy_gear( attachee, triggerer )
game.global_vars[516] = game.global_vars[516] + 1
if (game.global_vars[516] >= 12):
game.global_flags[516] = 1
if (game.global_flags[511] == 1 and game.global_flags[512] == 1 and game.global_flags[513] == 1 and game.global_flags[514] == 1 and game.global_flags[515] == 1 and game.global_flags[516] == 1 and game.global_flags[517] == 1 and game.global_flags[518] == 1 and game.global_flags[519] == 1 and game.global_flags[520] == 1 and game.global_flags[521] == 1 and game.global_flags[522] == 1):
game.quests[97].state = qs_completed
game.party[0].reputation_add( 52 )
game.global_vars[501] = 7
else:
game.sound( 4132, 2 )
if (attachee.name == 8743):
destroy_gear( attachee, triggerer )
game.global_vars[517] = game.global_vars[517] + 1
if (game.global_vars[517] >= 12):
game.global_flags[517] = 1
if (game.global_flags[511] == 1 and game.global_flags[512] == 1 and game.global_flags[513] == 1 and game.global_flags[514] == 1 and game.global_flags[515] == 1 and game.global_flags[516] == 1 and game.global_flags[517] == 1 and game.global_flags[518] == 1 and game.global_flags[519] == 1 and game.global_flags[520] == 1 and game.global_flags[521] == 1 and game.global_flags[522] == 1):
game.quests[97].state = qs_completed
game.party[0].reputation_add( 52 )
game.global_vars[501] = 7
else:
game.sound( 4132, 2 )
if (attachee.name == 8744):
destroy_gear( attachee, triggerer )
game.global_vars[518] = game.global_vars[518] + 1
if (game.global_vars[518] >= 12):
game.global_flags[518] = 1
if (game.global_flags[511] == 1 and game.global_flags[512] == 1 and game.global_flags[513] == 1 and game.global_flags[514] == 1 and game.global_flags[515] == 1 and game.global_flags[516] == 1 and game.global_flags[517] == 1 and game.global_flags[518] == 1 and game.global_flags[519] == 1 and game.global_flags[520] == 1 and game.global_flags[521] == 1 and game.global_flags[522] == 1):
game.quests[97].state = qs_completed
game.party[0].reputation_add( 52 )
game.global_vars[501] = 7
else:
game.sound( 4132, 2 )
if (attachee.name == 8745):
destroy_gear( attachee, triggerer )
game.global_vars[519] = game.global_vars[519] + 1
if (game.global_vars[519] >= 12):
game.global_flags[519] = 1
if (game.global_flags[511] == 1 and game.global_flags[512] == 1 and game.global_flags[513] == 1 and game.global_flags[514] == 1 and game.global_flags[515] == 1 and game.global_flags[516] == 1 and game.global_flags[517] == 1 and game.global_flags[518] == 1 and game.global_flags[519] == 1 and game.global_flags[520] == 1 and game.global_flags[521] == 1 and game.global_flags[522] == 1):
game.quests[97].state = qs_completed
game.party[0].reputation_add( 52 )
game.global_vars[501] = 7
else:
game.sound( 4132, 2 )
if (attachee.name == 8746):
destroy_gear( attachee, triggerer )
game.global_vars[520] = game.global_vars[520] + 1
if (game.global_vars[520] >= 5):
game.global_flags[520] = 1
if (game.global_flags[511] == 1 and game.global_flags[512] == 1 and game.global_flags[513] == 1 and game.global_flags[514] == 1 and game.global_flags[515] == 1 and game.global_flags[516] == 1 and game.global_flags[517] == 1 and game.global_flags[518] == 1 and game.global_flags[519] == 1 and game.global_flags[520] == 1 and game.global_flags[521] == 1 and game.global_flags[522] == 1):
game.quests[97].state = qs_completed
game.party[0].reputation_add( 52 )
game.global_vars[501] = 7
else:
game.sound( 4132, 2 )
if (attachee.name == 8747):
destroy_gear( attachee, triggerer )
game.global_vars[521] = game.global_vars[521] + 1
if (game.global_vars[521] >= 6):
game.global_flags[521] = 1
if (game.global_flags[511] == 1 and game.global_flags[512] == 1 and game.global_flags[513] == 1 and game.global_flags[514] == 1 and game.global_flags[515] == 1 and game.global_flags[516] == 1 and game.global_flags[517] == 1 and game.global_flags[518] == 1 and game.global_flags[519] == 1 and game.global_flags[520] == 1 and game.global_flags[521] == 1 and game.global_flags[522] == 1):
game.quests[97].state = qs_completed
game.party[0].reputation_add( 52 )
game.global_vars[501] = 7
else:
game.sound( 4132, 2 )
if (attachee.name == 8748):
destroy_gear( attachee, triggerer )
game.global_vars[522] = game.global_vars[522] + 1
if (game.global_vars[522] >= 6):
game.global_flags[522] = 1
if (game.global_flags[511] == 1 and game.global_flags[512] == 1 and game.global_flags[513] == 1 and game.global_flags[514] == 1 and game.global_flags[515] == 1 and game.global_flags[516] == 1 and game.global_flags[517] == 1 and game.global_flags[518] == 1 and game.global_flags[519] == 1 and game.global_flags[520] == 1 and game.global_flags[521] == 1 and game.global_flags[522] == 1):
game.quests[97].state = qs_completed
game.party[0].reputation_add( 52 )
game.global_vars[501] = 7
else:
game.sound( 4132, 2 )
return RUN_DEFAULT
def san_enter_combat( attachee, triggerer ):
if (game.global_vars[505] == 0):
game.timevent_add( out_of_time, ( attachee, triggerer ), 7200000 ) # 2 hours
game.global_vars[505] = 1
if (triggerer.type == obj_t_pc):
if anyone( triggerer.group_list(), "has_follower", 8736 ):
wakefield = find_npc_near( triggerer, 8736 )
if (wakefield != OBJ_HANDLE_NULL):
triggerer.follower_remove(wakefield)
wakefield.float_line( 20000,triggerer )
wakefield.attack(triggerer)
return RUN_DEFAULT
def san_start_combat( attachee, triggerer ):
if (attachee.name == 8738):
attachee.obj_set_int(obj_f_critter_strategy, 436)
elif (attachee.name == 8739):
attachee.obj_set_int(obj_f_critter_strategy, 437)
elif (attachee.name == 8740):
attachee.obj_set_int(obj_f_critter_strategy, 438)
elif (attachee.name == 8741):
attachee.obj_set_int(obj_f_critter_strategy, 439)
return RUN_DEFAULT
def san_heartbeat( attachee, triggerer ):
if (game.quests[97].state == qs_botched):
attachee.object_flag_set(OF_OFF)
if (attachee.name == 8738):
if (game.global_flags[509] == 0):
game.timevent_add( tower_attack, ( attachee, triggerer ), 6000 )
game.global_flags[509] = 1
elif (attachee.name == 8739):
if (game.global_flags[510] == 0):
game.timevent_add( church_attack, ( attachee, triggerer ), 6000 )
game.global_flags[510] = 1
elif (attachee.name == 8740):
if (game.global_flags[523] == 0):
game.timevent_add( grove_attack, ( attachee, triggerer ), 6000 )
game.global_flags[523] = 1
elif (attachee.name == 8741):
if (game.global_flags[524] == 0):
game.timevent_add( wench_attack, ( attachee, triggerer ), 6000 )
game.global_flags[524] = 1
float_select = game.random_range(1,6)
if (attachee.scripts[san_dialog]):
attachee.float_line(float_select,triggerer)
return RUN_DEFAULT
def san_will_kos( attachee, triggerer ):
if (game.global_flags[525] == 1):
return SKIP_DEFAULT
else:
return RUN_DEFAULT
def destroy_gear( attachee, triggerer ):
fighter_longsword = attachee.item_find(4132)
fighter_longsword.destroy()
fighter_towershield = attachee.item_find(6078)
fighter_towershield.destroy()
soldier_shield = attachee.item_find(6068)
soldier_shield.destroy()
cleric_crossbow = attachee.item_find(4178)
cleric_crossbow.destroy()
archer_longbow = attachee.item_find(4087)
archer_longbow.destroy()
gold_breastplate = attachee.item_find(6477)
gold_breastplate.destroy()
gold_chainmail = attachee.item_find(6476)
gold_chainmail.destroy()
plain_chainmail = attachee.item_find(6454)
plain_chainmail.destroy()
red_chainmail = attachee.item_find(6019)
red_chainmail.destroy()
fine_chainmail = attachee.item_find(6475)
fine_chainmail.destroy()
splintmail = attachee.item_find(6096)
splintmail.destroy()
black_bandedmail = attachee.item_find(6341)
black_bandedmail.destroy()
silver_bandedmail = attachee.item_find(6120)
silver_bandedmail.destroy()
halfplate = attachee.item_find(6158)
halfplate.destroy()
return
def out_of_time( attachee, triggerer ):
game.global_vars[505] = 3
return
def tower_attack( attachee, triggerer ):
game.particles( "sp-Fireball-Hit", location_from_axis( 455, 609 ) )
game.particles( "ef-fireburning", location_from_axis( 455, 609 ) )
game.particles( "ef-FirePit", location_from_axis( 455, 609 ) )
game.particles( "sp-Fireball-Hit", location_from_axis( 439, 610 ) )
game.particles( "ef-fireburning", location_from_axis( 439, 610 ) )
game.particles( "ef-FirePit", location_from_axis( 439, 610 ) )
game.sound( 4134, 1 )
game.shake(75,3200)
game.timevent_add( tower_attack_followup, (), 12000 )
return RUN_DEFAULT
def church_attack( attachee, triggerer ):
game.particles( "sp-Fireball-Hit", location_from_axis( 490, 224 ) )
game.particles( "ef-fireburning", location_from_axis( 490, 224 ) )
game.particles( "ef-FirePit", location_from_axis( 490, 224 ) )
game.particles( "sp-Fireball-Hit", location_from_axis( 506, 217 ) )
game.particles( "ef-fireburning", location_from_axis( 506, 217 ) )
game.particles( "ef-FirePit", location_from_axis( 506, 217 ) )
game.sound( 4135, 1 )
game.shake(75,3200)
game.timevent_add( church_attack_followup, (), 12000 )
return RUN_DEFAULT
def grove_attack( attachee, triggerer ):
game.particles( "sp-Fireball-Hit", location_from_axis( 617, 523 ) )
game.particles( "ef-fireburning", location_from_axis( 617, 523 ) )
game.particles( "ef-FirePit", location_from_axis( 617, 523 ) )
game.particles( "sp-Fireball-Hit", location_from_axis( 616, 515 ) )
game.particles( "ef-fireburning", location_from_axis( 616, 515 ) )
game.particles( "ef-FirePit", location_from_axis( 616, 515 ) )
game.sound( 4136, 1 )
game.shake(75,3200)
game.timevent_add( grove_attack_followup, (), 12000 )
return RUN_DEFAULT
def wench_attack( attachee, triggerer ):
game.particles( "sp-Fireball-Hit", location_from_axis( 621, 397 ) )
game.particles( "ef-fireburning", location_from_axis( 621, 397 ) )
game.particles( "ef-FirePit", location_from_axis( 621, 397 ) )
game.particles( "sp-Fireball-Hit", location_from_axis( 609, 399 ) )
game.particles( "ef-fireburning", location_from_axis( 609, 399 ) )
game.particles( "ef-FirePit", location_from_axis( 609, 399 ) )
game.sound( 4136, 1 )
game.shake(75,3200)
game.timevent_add( wench_attack_followup, (), 12000 )
return RUN_DEFAULT
def tower_attack_followup():
if (not game.combat_is_active()):
random_x = game.random_range(428,465)
random_y = game.random_range(597,617)
game.particles( "sp-Fireball-Hit", location_from_axis( random_x, random_y ) )
game.particles( "ef-fireburning", location_from_axis( random_x, random_y ) )
game.particles( "ef-FirePit", location_from_axis( random_x, random_y ) )
game.sound( 4135, 1 )
game.shake(50,1600)
game.timevent_add( tower_attack_followup, (), 12000 )
return RUN_DEFAULT
def church_attack_followup():
if (not game.combat_is_active()):
random_x = game.random_range(478,509)
random_y = game.random_range(207,235)
game.particles( "sp-Fireball-Hit", location_from_axis( random_x, random_y ) )
game.particles( "ef-fireburning", location_from_axis( random_x, random_y ) )
game.particles( "ef-FirePit", location_from_axis( random_x, random_y ) )
game.sound( 4135, 1 )
game.shake(50,1600)
game.timevent_add( church_attack_followup, (), 12000 )
return RUN_DEFAULT
def grove_attack_followup():
if (not game.combat_is_active()):
random_x = game.random_range(593,621)
random_y = game.random_range(508,538)
game.particles( "sp-Fireball-Hit", location_from_axis( random_x, random_y ) )
game.particles( "ef-fireburning", location_from_axis( random_x, random_y ) )
game.particles( "ef-FirePit", location_from_axis( random_x, random_y ) )
game.sound( 4135, 1 )
game.shake(50,1600)
game.timevent_add( grove_attack_followup, (), 12000 )
return RUN_DEFAULT
def wench_attack_followup():
if (not game.combat_is_active()):
random_x = game.random_range(590,641)
random_y = game.random_range(370,404)
game.particles( "sp-Fireball-Hit", location_from_axis( random_x, random_y ) )
game.particles( "ef-fireburning", location_from_axis( random_x, random_y ) )
game.particles( "ef-FirePit", location_from_axis( random_x, random_y ) )
game.sound( 4135, 1 )
game.shake(50,1600)
game.timevent_add( wench_attack_followup, (), 12000 )
return RUN_DEFAULT | {
"repo_name": "GrognardsFromHell/TemplePlus",
"path": "tpdatasrc/co8fixes/scr/py00485hextor_invader.py",
"copies": "1",
"size": "16264",
"license": "mit",
"hash": 3839968362583043600,
"line_mean": 46.5584795322,
"line_max": 388,
"alpha_frac": 0.6807058534,
"autogenerated": false,
"ratio": 2.595181107387905,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.3775886960787905,
"avg_score": null,
"num_lines": null
} |
"""Added CA related tables
Revision ID: 2d21598e7e70
Revises: 3d36a26b88af
Create Date: 2015-03-11 15:47:32.292944
"""
# revision identifiers, used by Alembic.
revision = '2d21598e7e70'
down_revision = '3d36a26b88af'
from alembic import op
import sqlalchemy as sa
def upgrade():
ctx = op.get_context()
con = op.get_bind()
table_exists = ctx.dialect.has_table(con.engine, 'certificate_authorities')
if not table_exists:
op.create_table(
'certificate_authorities',
sa.Column('id', sa.String(length=36), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.Column('deleted_at', sa.DateTime(), nullable=True),
sa.Column('deleted', sa.Boolean(), nullable=False),
sa.Column('status', sa.String(length=20), nullable=False),
sa.Column('plugin_name', sa.String(length=255), nullable=False),
sa.Column('plugin_ca_id', sa.Text(), nullable=False),
sa.Column('expiration', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
table_exists = ctx.dialect.has_table(
con.engine,
'project_certificate_authorities')
if not table_exists:
op.create_table(
'project_certificate_authorities',
sa.Column('id', sa.String(length=36), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.Column('deleted_at', sa.DateTime(), nullable=True),
sa.Column('deleted', sa.Boolean(), nullable=False),
sa.Column('status', sa.String(length=20), nullable=False),
sa.Column('project_id', sa.String(length=36), nullable=False),
sa.Column('ca_id', sa.String(length=36), nullable=False),
sa.ForeignKeyConstraint(['ca_id'], ['certificate_authorities.id'],),
sa.ForeignKeyConstraint(['project_id'], ['projects.id'],),
sa.PrimaryKeyConstraint('id', 'project_id', 'ca_id'),
sa.UniqueConstraint('project_id',
'ca_id',
name='_project_certificate_authority_uc')
)
table_exists = ctx.dialect.has_table(
con.engine,
'certificate_authority_metadata')
if not table_exists:
op.create_table(
'certificate_authority_metadata',
sa.Column('id', sa.String(length=36), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.Column('deleted_at', sa.DateTime(), nullable=True),
sa.Column('deleted', sa.Boolean(), nullable=False),
sa.Column('status', sa.String(length=20), nullable=False),
sa.Column('key', sa.String(length=255), nullable=False),
sa.Column('value', sa.String(length=255), nullable=False),
sa.Column('ca_id', sa.String(length=36), nullable=False),
sa.ForeignKeyConstraint(['ca_id'], ['certificate_authorities.id'],),
sa.PrimaryKeyConstraint('id', 'key', 'ca_id'),
sa.UniqueConstraint('ca_id',
'key',
name='_certificate_authority_metadatum_uc')
)
table_exists = ctx.dialect.has_table(
con.engine,
'preferred_certificate_authorities')
if not table_exists:
op.create_table(
'preferred_certificate_authorities',
sa.Column('id', sa.String(length=36), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.Column('deleted_at', sa.DateTime(), nullable=True),
sa.Column('deleted', sa.Boolean(), nullable=False),
sa.Column('status', sa.String(length=20), nullable=False),
sa.Column('project_id', sa.String(length=36), nullable=False),
sa.Column('ca_id', sa.String(length=36), nullable=True),
sa.ForeignKeyConstraint(['ca_id'], ['certificate_authorities.id'],),
sa.ForeignKeyConstraint(['project_id'], ['projects.id'],),
sa.PrimaryKeyConstraint('id', 'project_id'),
sa.UniqueConstraint('project_id')
)
def downgrade():
op.drop_table('preferred_certificate_authorities')
op.drop_table('certificate_authority_metadata')
op.drop_table('project_certificate_authorities')
op.drop_table('certificate_authorities')
| {
"repo_name": "MCDong/barbican",
"path": "barbican/model/migration/alembic_migrations/versions/2d21598e7e70_added_ca_related_tables.py",
"copies": "2",
"size": "4683",
"license": "apache-2.0",
"hash": -2776237949852889600,
"line_mean": 42.7663551402,
"line_max": 80,
"alpha_frac": 0.5936365578,
"autogenerated": false,
"ratio": 3.87986743993372,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5473503997733721,
"avg_score": null,
"num_lines": null
} |
"""Added Cascades
Revision ID: 2351a64b05ef
Revises: 160e3f4be10a
Create Date: 2015-04-01 09:15:47.490122
"""
# revision identifiers, used by Alembic.
revision = '2351a64b05ef'
down_revision = '3fd502c152c9'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
from sqlalchemy.engine.reflection import Inspector
insp = Inspector.from_engine(op.get_bind())
tables = insp.get_table_names()
for table in tables:
fks = insp.get_foreign_keys(table)
for fk in fks:
op.execute("ALTER TABLE "+table+" DROP CONSTRAINT "+fk["name"])
op.create_foreign_key(None, 'achievements', 'achievementcategories', ['achievementcategory_id'], ['id'], ondelete="SET NULL")
op.create_foreign_key(None, 'achievements_properties', 'translationvariables', ['value_translation_id'], ['id'], ondelete="RESTRICT")
op.create_foreign_key(None, 'achievements_properties', 'properties', ['property_id'], ['id'], ondelete="CASCADE")
op.create_foreign_key(None, 'achievements_properties', 'achievements', ['achievement_id'], ['id'], ondelete="CASCADE")
op.create_foreign_key(None, 'achievements_rewards', 'achievements', ['achievement_id'], ['id'], ondelete="CASCADE")
op.create_foreign_key(None, 'achievements_rewards', 'translationvariables', ['value_translation_id'], ['id'], ondelete="RESTRICT")
op.create_foreign_key(None, 'achievements_rewards', 'rewards', ['reward_id'], ['id'], ondelete="CASCADE")
op.create_foreign_key(None, 'achievements_users', 'users', ['user_id'], ['id'], ondelete="CASCADE")
op.create_foreign_key(None, 'achievements_users', 'achievements', ['achievement_id'], ['id'], ondelete="CASCADE")
op.create_foreign_key(None, 'denials', 'achievements', ['from_id'], ['id'], ondelete="CASCADE")
op.create_foreign_key(None, 'denials', 'achievements', ['to_id'], ['id'], ondelete="CASCADE")
op.create_foreign_key(None, 'goal_evaluation_cache', 'goals', ['goal_id'], ['id'], ondelete="CASCADE")
op.create_foreign_key(None, 'goal_evaluation_cache', 'users', ['user_id'], ['id'], ondelete="CASCADE")
op.create_foreign_key(None, 'goals', 'translationvariables', ['name_translation_id'], ['id'], ondelete="RESTRICT")
op.create_foreign_key(None, 'goals', 'achievements', ['achievement_id'], ['id'], ondelete="CASCADE")
op.create_foreign_key(None, 'requirements', 'achievements', ['to_id'], ['id'], ondelete="CASCADE")
op.create_foreign_key(None, 'requirements', 'achievements', ['from_id'], ['id'], ondelete="CASCADE")
op.create_foreign_key(None, 'translations', 'languages', ['language_id'], ['id'], ondelete="CASCADE")
op.create_foreign_key(None, 'translations', 'translationvariables', ['translationvariable_id'], ['id'], ondelete="CASCADE")
op.create_foreign_key(None, 'users_groups', 'users', ['user_id'], ['id'], ondelete="CASCADE")
op.create_foreign_key(None, 'users_groups', 'groups', ['group_id'], ['id'], ondelete="CASCADE")
op.create_foreign_key(None, 'users_users', 'users', ['to_id'], ['id'], ondelete="CASCADE")
op.create_foreign_key(None, 'users_users', 'users', ['from_id'], ['id'], ondelete="CASCADE")
op.create_foreign_key(None, 'values', 'users', ['user_id'], ['id'], ondelete="CASCADE")
op.create_foreign_key(None, 'values', 'variables', ['variable_id'], ['id'], ondelete="CASCADE")
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
from sqlalchemy.engine.reflection import Inspector
insp = Inspector.from_engine(op.get_bind())
tables = insp.get_table_names()
for table in tables:
fks = insp.get_foreign_keys(table)
for fk in fks:
print "ALTER TABLE "+table+" DROP CONSTRAINT "+fk["name"]
op.execute("ALTER TABLE "+table+" DROP CONSTRAINT "+fk["name"])
op.create_foreign_key(None, 'achievements', 'achievementcategories', ['achievementcategory_id'], ['id'])
op.create_foreign_key(None, 'achievements_properties', 'translationvariables', ['value_translation_id'], ['id'])
op.create_foreign_key(None, 'achievements_properties', 'properties', ['property_id'], ['id'])
op.create_foreign_key(None, 'achievements_properties', 'achievements', ['achievement_id'], ['id'])
op.create_foreign_key(None, 'achievements_rewards', 'achievements', ['achievement_id'], ['id'])
op.create_foreign_key(None, 'achievements_rewards', 'translationvariables', ['value_translation_id'], ['id'])
op.create_foreign_key(None, 'achievements_rewards', 'rewards', ['reward_id'], ['id'])
op.create_foreign_key(None, 'achievements_users', 'users', ['user_id'], ['id'])
op.create_foreign_key(None, 'achievements_users', 'achievements', ['achievement_id'], ['id'])
op.create_foreign_key(None, 'denials', 'achievements', ['from_id'], ['id'])
op.create_foreign_key(None, 'denials', 'achievements', ['to_id'], ['id'])
op.create_foreign_key(None, 'goal_evaluation_cache', 'goals', ['goal_id'], ['id'])
op.create_foreign_key(None, 'goal_evaluation_cache', 'users', ['user_id'], ['id'])
op.create_foreign_key(None, 'goals', 'translationvariables', ['name_translation_id'], ['id'])
op.create_foreign_key(None, 'goals', 'achievements', ['achievement_id'], ['id'])
op.create_foreign_key(None, 'requirements', 'achievements', ['to_id'], ['id'])
op.create_foreign_key(None, 'requirements', 'achievements', ['from_id'], ['id'])
op.create_foreign_key(None, 'translations', 'languages', ['language_id'], ['id'])
op.create_foreign_key(None, 'translations', 'translationvariables', ['translationvariable_id'], ['id'])
op.create_foreign_key(None, 'users_groups', 'users', ['user_id'], ['id'])
op.create_foreign_key(None, 'users_groups', 'groups', ['group_id'], ['id'])
op.create_foreign_key(None, 'users_users', 'users', ['to_id'], ['id'])
op.create_foreign_key(None, 'users_users', 'users', ['from_id'], ['id'])
op.create_foreign_key(None, 'values', 'users', ['user_id'], ['id'])
op.create_foreign_key(None, 'values', 'variables', ['variable_id'], ['id'])
### end Alembic commands ###
| {
"repo_name": "ArneBab/gamification-engine",
"path": "gengine/alembic/versions/2351a64b05ef_added_cascades.py",
"copies": "1",
"size": "6281",
"license": "mit",
"hash": 3028148728785569000,
"line_mean": 61.81,
"line_max": 137,
"alpha_frac": 0.6521254577,
"autogenerated": false,
"ratio": 3.2177254098360657,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4369850867536066,
"avg_score": null,
"num_lines": null
} |
"""Added cascading for deleting cgac
Revision ID: 40f1074309d4
Revises: 26cfc98728c8
Create Date: 2017-02-16 11:53:48.764917
"""
# revision identifiers, used by Alembic.
revision = '40f1074309d4'
down_revision = '26cfc98728c8'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_data_broker():
### commands auto generated by Alembic - please adjust! ###
op.drop_constraint('fk_sub_tier_agency_cgac', 'sub_tier_agency', type_='foreignkey')
op.create_foreign_key('fk_sub_tier_agency_cgac', 'sub_tier_agency', 'cgac', ['cgac_id'], ['cgac_id'], ondelete='CASCADE')
### end Alembic commands ###
def downgrade_data_broker():
### commands auto generated by Alembic - please adjust! ###
op.drop_constraint('fk_sub_tier_agency_cgac', 'sub_tier_agency', type_='foreignkey')
op.create_foreign_key('fk_sub_tier_agency_cgac', 'sub_tier_agency', 'cgac', ['cgac_id'], ['cgac_id'])
### end Alembic commands ###
| {
"repo_name": "fedspendingtransparency/data-act-broker-backend",
"path": "dataactcore/migrations/versions/40f1074309d4_added_cascading_for_deleting_cgac.py",
"copies": "1",
"size": "1133",
"license": "cc0-1.0",
"hash": 2256183859860811300,
"line_mean": 25.9761904762,
"line_max": 125,
"alpha_frac": 0.680494263,
"autogenerated": false,
"ratio": 3.037533512064343,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9068756298220015,
"avg_score": 0.029854295368865424,
"num_lines": 42
} |
"""Added cascading for deleting submissions
Revision ID: 9889ac822e9c
Revises: 88e9b634ca1a
Create Date: 2017-02-21 09:18:18.259911
"""
# revision identifiers, used by Alembic.
revision = '9889ac822e9c'
down_revision = '88e9b634ca1a'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_data_broker():
### commands auto generated by Alembic - please adjust! ###
op.create_foreign_key('fk_appropriation_submission_id', 'appropriation', 'submission', ['submission_id'], ['submission_id'], ondelete='CASCADE')
op.create_foreign_key('fk_award_financial_submission_id', 'award_financial', 'submission', ['submission_id'], ['submission_id'], ondelete='CASCADE')
op.create_foreign_key('fk_award_financial_assistance_submission_id', 'award_financial_assistance', 'submission', ['submission_id'], ['submission_id'], ondelete='CASCADE')
op.create_foreign_key('fk_award_procurement_submission_id', 'award_procurement', 'submission', ['submission_id'], ['submission_id'], ondelete='CASCADE')
op.create_foreign_key('fk_detached_award_financial_assistance_submission_id', 'detached_award_financial_assistance', 'submission', ['submission_id'], ['submission_id'], ondelete='CASCADE')
op.create_foreign_key('fk_error_metadata_job', 'error_metadata', 'job', ['job_id'], ['job_id'], ondelete='CASCADE')
op.create_foreign_key('fk_file_job', 'file', 'job', ['job_id'], ['job_id'], ondelete='CASCADE')
op.drop_constraint('fk_generation_job', 'file_generation_task', type_='foreignkey')
op.create_foreign_key('fk_generation_job', 'file_generation_task', 'job', ['job_id'], ['job_id'], ondelete='CASCADE')
op.create_foreign_key('fk_flex_field_submission_id', 'flex_field', 'submission', ['submission_id'], ['submission_id'], ondelete='CASCADE')
op.drop_constraint('fk_prereq_job_id', 'job_dependency', type_='foreignkey')
op.drop_constraint('fk_dep_job_id', 'job_dependency', type_='foreignkey')
op.create_foreign_key('fk_dep_job_id', 'job_dependency', 'job', ['job_id'], ['job_id'], ondelete='CASCADE')
op.create_foreign_key('fk_prereq_job_id', 'job_dependency', 'job', ['prerequisite_id'], ['job_id'], ondelete='CASCADE')
op.create_foreign_key('fk_object_class_program_activity_submission_id', 'object_class_program_activity', 'submission', ['submission_id'], ['submission_id'], ondelete='CASCADE')
op.drop_constraint('fk_submission', 'submission_narrative', type_='foreignkey')
op.create_foreign_key('fk_submission', 'submission_narrative', 'submission', ['submission_id'], ['submission_id'], ondelete='CASCADE')
### end Alembic commands ###
def downgrade_data_broker():
### commands auto generated by Alembic - please adjust! ###
op.drop_constraint('fk_submission', 'submission_narrative', type_='foreignkey')
op.create_foreign_key('fk_submission', 'submission_narrative', 'submission', ['submission_id'], ['submission_id'])
op.drop_constraint('fk_object_class_program_activity_submission_id', 'object_class_program_activity', type_='foreignkey')
op.drop_constraint('fk_prereq_job_id', 'job_dependency', type_='foreignkey')
op.drop_constraint('fk_dep_job_id', 'job_dependency', type_='foreignkey')
op.create_foreign_key('fk_dep_job_id', 'job_dependency', 'job', ['job_id'], ['job_id'])
op.create_foreign_key('fk_prereq_job_id', 'job_dependency', 'job', ['prerequisite_id'], ['job_id'])
op.drop_constraint('fk_flex_field_submission_id', 'flex_field', type_='foreignkey')
op.drop_constraint('fk_generation_job', 'file_generation_task', type_='foreignkey')
op.create_foreign_key('fk_generation_job', 'file_generation_task', 'job', ['job_id'], ['job_id'])
op.drop_constraint('fk_file_job', 'file', type_='foreignkey')
op.drop_constraint('fk_error_metadata_job', 'error_metadata', type_='foreignkey')
op.drop_constraint('fk_detached_award_financial_assistance_submission_id', 'detached_award_financial_assistance', type_='foreignkey')
op.drop_constraint('fk_award_procurement_submission_id', 'award_procurement', type_='foreignkey')
op.drop_constraint('fk_award_financial_assistance_submission_id', 'award_financial_assistance', type_='foreignkey')
op.drop_constraint('fk_award_financial_submission_id', 'award_financial', type_='foreignkey')
op.drop_constraint('fk_appropriation_submission_id', 'appropriation', type_='foreignkey')
### end Alembic commands ###
| {
"repo_name": "fedspendingtransparency/data-act-broker-backend",
"path": "dataactcore/migrations/versions/9889ac822e9c_added_cascading_for_deleting_submissions.py",
"copies": "1",
"size": "4595",
"license": "cc0-1.0",
"hash": 3097669394774096000,
"line_mean": 62.8194444444,
"line_max": 192,
"alpha_frac": 0.7003264418,
"autogenerated": false,
"ratio": 3.351568198395332,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4551894640195332,
"avg_score": null,
"num_lines": null
} |
"""added categories and items models
Revision ID: 480a467cb5dc
Revises: None
Create Date: 2015-02-20 17:53:43.447362
"""
# revision identifiers, used by Alembic.
revision = '480a467cb5dc'
down_revision = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('categories',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(u'ix_categories_name', 'categories', ['name'], unique=True)
op.create_table('items',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=80), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('price', sa.Numeric(precision=10, scale=2), nullable=True),
sa.Column('timestamp', sa.DateTime(), nullable=True),
sa.Column('category_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['category_id'], ['categories.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(u'ix_items_timestamp', 'items', ['timestamp'], unique=False)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_index(u'ix_items_timestamp', table_name='items')
op.drop_table('items')
op.drop_index(u'ix_categories_name', table_name='categories')
op.drop_table('categories')
### end Alembic commands ###
| {
"repo_name": "rosariomgomez/tradyfit",
"path": "vagrant/tradyfit/migrations/versions/480a467cb5dc_added_categories_and_items_models.py",
"copies": "1",
"size": "1508",
"license": "mit",
"hash": -8610772032035612000,
"line_mean": 32.5111111111,
"line_max": 80,
"alpha_frac": 0.6710875332,
"autogenerated": false,
"ratio": 3.490740740740741,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4661828273940741,
"avg_score": null,
"num_lines": null
} |
"""added ChatRoom and Message
Revision ID: 2cbd62b3f1fb
Revises: 11d26e86341a
Create Date: 2013-02-07 20:45:27.419947
"""
# revision identifiers, used by Alembic.
revision = '2cbd62b3f1fb'
down_revision = '11d26e86341a'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table('chat_rooms',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('creator_id', sa.Integer(), nullable=False),
sa.Column('created', sa.DateTime(True), nullable=False),
sa.Column('updated', sa.DateTime(True), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_table('messages',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created', sa.DateTime(True), nullable=False),
sa.Column('updated', sa.DateTime(True), nullable=False),
sa.Column('color', sa.Integer(), nullable=False),
sa.Column('message', sa.String(length=280), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False, index=True),
sa.Column('chat_room_id', sa.Integer(), nullable=False, index=True),
sa.ForeignKeyConstraint(['chat_room_id'], ['chat_rooms.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('users_in_chat_room',
sa.Column('chat_room_id', sa.Integer(), nullable=False, index=True),
sa.Column('user_id', sa.Integer(), nullable=False, index=True),
sa.Column('created', sa.DateTime(True), nullable=False, index=True),
sa.ForeignKeyConstraint(['chat_room_id'], ['chat_rooms.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('chat_room_id', 'user_id')
)
def downgrade():
op.drop_table('users_in_chat_room')
op.drop_table('messages')
op.drop_table('chat_rooms')
| {
"repo_name": "funnyplan/Pager",
"path": "migration/versions/2cbd62b3f1fb_added_chatroom_and_m.py",
"copies": "1",
"size": "1789",
"license": "cc0-1.0",
"hash": -2577694660019055600,
"line_mean": 34.78,
"line_max": 72,
"alpha_frac": 0.6702068195,
"autogenerated": false,
"ratio": 3.2234234234234234,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9348410823244918,
"avg_score": 0.009043883935700959,
"num_lines": 50
} |
"""Added chat tables
Revision ID: 94b5cb01d6a5
Revises: 40f60062ddd5
Create Date: 2017-10-12 22:48:04.053372
"""
from alembic import op
import sqlalchemy as sa
import sqlalchemy.ext.declarative
# revision identifiers, used by Alembic.
revision = '94b5cb01d6a5'
down_revision = '40f60062ddd5'
branch_labels = None
depends_on = None
Base = sa.ext.declarative.declarative_base()
Session = sa.orm.sessionmaker()
class Story(Base):
__tablename__ = 'stories'
id = sa.Column(sa.Integer, primary_key=True)
channel_id = sa.Column(sa.Integer, sa.ForeignKey('channels.id'), nullable=False)
channel = sa.orm.relationship('Channel', backref='story')
class Channel(Base):
__tablename__ = 'channels'
id = sa.Column(sa.Integer, primary_key=True)
private = sa.Column(sa.Boolean, default=False, nullable=False)
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
s = Session(bind=conn)
op.create_table('channels',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('private', sa.Boolean(), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_channels'))
)
op.create_table('chat_messages',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('channel_id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('anon_id', sa.String(), nullable=True),
sa.Column('date', sa.DateTime(timezone=True), nullable=False),
sa.Column('text', sa.String(), nullable=True),
sa.Column('special', sa.Boolean(), nullable=False),
sa.Column('image', sa.Boolean(), nullable=False),
sa.ForeignKeyConstraint(['channel_id'], ['channels.id'], name=op.f('fk_chat_messages_channel_id_channels')),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], name=op.f('fk_chat_messages_user_id_users')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_chat_messages'))
)
op.add_column('stories', sa.Column('channel_id', sa.Integer()))
op.create_foreign_key(op.f('fk_stories_channel_id_channels'), 'stories', 'channels', ['channel_id'], ['id'])
for i in s.query(Story):
c = Channel()
s.add(c)
i.channel = c
s.commit()
op.alter_column('stories', 'channel_id', nullable=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(op.f('fk_stories_channel_id_channels'), 'stories', type_='foreignkey')
op.drop_column('stories', 'channel_id')
op.drop_table('chat_messages')
op.drop_table('channels')
# ### end Alembic commands ###
| {
"repo_name": "alethiophile/openakun",
"path": "alembic/versions/94b5cb01d6a5_added_chat_tables.py",
"copies": "1",
"size": "2642",
"license": "mit",
"hash": -4019863105558739000,
"line_mean": 35.1917808219,
"line_max": 112,
"alpha_frac": 0.6627554883,
"autogenerated": false,
"ratio": 3.265760197775031,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4428515686075031,
"avg_score": null,
"num_lines": null
} |
"""Added clients.
Revision ID: b1f1f87a08d8
Revises: 005f84c40e24
Create Date: 2018-10-11 11:06:51.823681
"""
# revision identifiers, used by Alembic.
revision = 'b1f1f87a08d8'
down_revision = '005f84c40e24'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade():
op.create_table('clients',
sa.Column('id', mysql.INTEGER(display_width=11), autoincrement=True, nullable=False),
sa.Column('name', mysql.VARCHAR(length=255), nullable=False),
sa.Column('orga', mysql.INTEGER(display_width=11), autoincrement=False, nullable=False),
sa.Column('identifier', mysql.VARCHAR(length=128), nullable=False),
sa.Column('email', mysql.VARCHAR(length=255), nullable=False),
sa.PrimaryKeyConstraint('id'),
mysql_collate=u'utf8mb4_0900_ai_ci',
mysql_default_charset=u'utf8mb4',
mysql_engine=u'InnoDB'
)
op.create_index('clients_name_uindex', 'clients', ['name'], unique=True)
op.create_index('clients_identifier_uindex', 'clients', ['identifier'], unique=True)
def downgrade():
op.drop_index('clients_identifier_uindex', table_name='clients')
op.drop_index('clients_name_uindex', table_name='clients')
op.drop_table('clients')
| {
"repo_name": "ebu/radiodns-plugit",
"path": "RadioDns-PlugIt/alembic/versions/b1f1f87a08d8_added_client_table.py",
"copies": "1",
"size": "1386",
"license": "bsd-3-clause",
"hash": 7006712231828233000,
"line_mean": 36.4594594595,
"line_max": 108,
"alpha_frac": 0.632034632,
"autogenerated": false,
"ratio": 3.5267175572519083,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4658752189251908,
"avg_score": null,
"num_lines": null
} |
"""Added code and data to publications
Revision ID: 4b239be7667e
Revises:
Create Date: 2018-01-07 01:06:47.762287
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = '4b239be7667e'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('migrate_version')
op.add_column('papers', sa.Column('code', sa.String(length=300), nullable=True))
op.add_column('papers', sa.Column('data', sa.String(length=300), nullable=True))
op.create_index(op.f('ix_papers_abstract'), 'papers', ['abstract'], unique=False)
op.create_index(op.f('ix_papers_citation'), 'papers', ['citation'], unique=True)
op.create_index(op.f('ix_papers_code'), 'papers', ['code'], unique=True)
op.create_index(op.f('ix_papers_data'), 'papers', ['data'], unique=True)
op.create_index(op.f('ix_papers_link'), 'papers', ['link'], unique=True)
op.create_index(op.f('ix_papers_nickname'), 'papers', ['nickname'], unique=True)
op.add_column('users', sa.Column('cv', sa.String(length=120), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('users', 'cv')
op.drop_index(op.f('ix_papers_nickname'), table_name='papers')
op.drop_index(op.f('ix_papers_link'), table_name='papers')
op.drop_index(op.f('ix_papers_data'), table_name='papers')
op.drop_index(op.f('ix_papers_code'), table_name='papers')
op.drop_index(op.f('ix_papers_citation'), table_name='papers')
op.drop_index(op.f('ix_papers_abstract'), table_name='papers')
op.drop_column('papers', 'data')
op.drop_column('papers', 'code')
op.create_table('migrate_version',
sa.Column('repository_id', mysql.VARCHAR(length=250), nullable=False),
sa.Column('repository_path', mysql.TEXT(), nullable=True),
sa.Column('version', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('repository_id'),
mysql_default_charset=u'utf8',
mysql_engine=u'MyISAM'
)
# ### end Alembic commands ###
| {
"repo_name": "cosanlab/cosanlab_website",
"path": "migrations/versions/4b239be7667e_added_code_and_data_to_publications.py",
"copies": "1",
"size": "2231",
"license": "mit",
"hash": -6756533330638500000,
"line_mean": 41.0943396226,
"line_max": 94,
"alpha_frac": 0.6727924697,
"autogenerated": false,
"ratio": 3.0730027548209367,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4245795224520936,
"avg_score": null,
"num_lines": null
} |
"""Added columns for duel win/lose streaks in the tb_user_duel_stats table
Revision ID: 514f4b9bc74
Revises: 1d6dbeb93c9
Create Date: 2015-12-22 00:17:51.509756
"""
# revision identifiers, used by Alembic.
revision = '514f4b9bc74'
down_revision = '1d6dbeb93c9'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('tb_user_duel_stats', sa.Column('current_streak', sa.Integer(), nullable=False))
op.add_column('tb_user_duel_stats', sa.Column('longest_losestreak', sa.Integer(), nullable=False))
op.add_column('tb_user_duel_stats', sa.Column('longest_winstreak', sa.Integer(), nullable=False))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('tb_user_duel_stats', 'longest_winstreak')
op.drop_column('tb_user_duel_stats', 'longest_losestreak')
op.drop_column('tb_user_duel_stats', 'current_streak')
### end Alembic commands ###
| {
"repo_name": "gigglearrows/anniesbot",
"path": "alembic/versions/514f4b9bc74_added_columns_for_duel_win_lose_streaks_.py",
"copies": "1",
"size": "1069",
"license": "mit",
"hash": 5059715069040605000,
"line_mean": 32.40625,
"line_max": 102,
"alpha_frac": 0.7034611787,
"autogenerated": false,
"ratio": 3.0369318181818183,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4240392996881818,
"avg_score": null,
"num_lines": null
} |
"""Added comment archive table, to split production from archive
Revision ID: aae7a3158d13
Revises: 2959fa8d7ad8
Create Date: 2017-07-26 21:49:44.733366
"""
# revision identifiers, used by Alembic.
revision = 'aae7a3158d13'
down_revision = '2959fa8d7ad8'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_development():
### commands auto generated by Alembic - please adjust! ###
op.create_table('archived_comments',
sa.Column('id', sa.String(length=256), autoincrement=False, nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('created_utc', sa.DateTime(), nullable=True),
sa.Column('subreddit_id', sa.String(length=32), nullable=True),
sa.Column('post_id', sa.String(length=32), nullable=True),
sa.Column('user_id', sa.String(length=64), nullable=True),
sa.Column('comment_data', mysql.MEDIUMTEXT(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('id')
)
op.create_index(op.f('ix_archived_comments_created_at'), 'archived_comments', ['created_at'], unique=False)
op.create_index(op.f('ix_archived_comments_post_id'), 'archived_comments', ['post_id'], unique=False)
op.create_index(op.f('ix_archived_comments_subreddit_id'), 'archived_comments', ['subreddit_id'], unique=False)
op.create_index(op.f('ix_archived_comments_user_id'), 'archived_comments', ['user_id'], unique=False)
### end Alembic commands ###
def downgrade_development():
### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_archived_comments_user_id'), table_name='archived_comments')
op.drop_index(op.f('ix_archived_comments_subreddit_id'), table_name='archived_comments')
op.drop_index(op.f('ix_archived_comments_post_id'), table_name='archived_comments')
op.drop_index(op.f('ix_archived_comments_created_at'), table_name='archived_comments')
op.drop_table('archived_comments')
### end Alembic commands ###
def upgrade_test():
### commands auto generated by Alembic - please adjust! ###
op.create_table('archived_comments',
sa.Column('id', sa.String(length=256), autoincrement=False, nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('created_utc', sa.DateTime(), nullable=True),
sa.Column('subreddit_id', sa.String(length=32), nullable=True),
sa.Column('post_id', sa.String(length=32), nullable=True),
sa.Column('user_id', sa.String(length=64), nullable=True),
sa.Column('comment_data', mysql.MEDIUMTEXT(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('id')
)
op.create_index(op.f('ix_archived_comments_created_at'), 'archived_comments', ['created_at'], unique=False)
op.create_index(op.f('ix_archived_comments_post_id'), 'archived_comments', ['post_id'], unique=False)
op.create_index(op.f('ix_archived_comments_subreddit_id'), 'archived_comments', ['subreddit_id'], unique=False)
op.create_index(op.f('ix_archived_comments_user_id'), 'archived_comments', ['user_id'], unique=False)
### end Alembic commands ###
def downgrade_test():
### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_archived_comments_user_id'), table_name='archived_comments')
op.drop_index(op.f('ix_archived_comments_subreddit_id'), table_name='archived_comments')
op.drop_index(op.f('ix_archived_comments_post_id'), table_name='archived_comments')
op.drop_index(op.f('ix_archived_comments_created_at'), table_name='archived_comments')
op.drop_table('archived_comments')
### end Alembic commands ###
def upgrade_production():
### commands auto generated by Alembic - please adjust! ###
op.create_table('archived_comments',
sa.Column('id', sa.String(length=256), autoincrement=False, nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('created_utc', sa.DateTime(), nullable=True),
sa.Column('subreddit_id', sa.String(length=32), nullable=True),
sa.Column('post_id', sa.String(length=32), nullable=True),
sa.Column('user_id', sa.String(length=64), nullable=True),
sa.Column('comment_data', mysql.MEDIUMTEXT(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('id')
)
op.create_index(op.f('ix_archived_comments_created_at'), 'archived_comments', ['created_at'], unique=False)
op.create_index(op.f('ix_archived_comments_post_id'), 'archived_comments', ['post_id'], unique=False)
op.create_index(op.f('ix_archived_comments_subreddit_id'), 'archived_comments', ['subreddit_id'], unique=False)
op.create_index(op.f('ix_archived_comments_user_id'), 'archived_comments', ['user_id'], unique=False)
### end Alembic commands ###
def downgrade_production():
### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_archived_comments_user_id'), table_name='archived_comments')
op.drop_index(op.f('ix_archived_comments_subreddit_id'), table_name='archived_comments')
op.drop_index(op.f('ix_archived_comments_post_id'), table_name='archived_comments')
op.drop_index(op.f('ix_archived_comments_created_at'), table_name='archived_comments')
op.drop_table('archived_comments')
### end Alembic commands ###
| {
"repo_name": "c4fcm/CivilServant",
"path": "alembic/versions/aae7a3158d13_added_comment_archive_table_to_split_.py",
"copies": "1",
"size": "5513",
"license": "mit",
"hash": 5937951581705005000,
"line_mean": 45.7203389831,
"line_max": 115,
"alpha_frac": 0.692182115,
"autogenerated": false,
"ratio": 3.2334310850439882,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.44256132000439885,
"avg_score": null,
"num_lines": null
} |
"""added comment model
Revision ID: e06fa0a86214
Revises: 4430bb0ac79d
Create Date: 2016-07-06 08:49:48.693801
"""
# revision identifiers, used by Alembic.
revision = 'e06fa0a86214'
down_revision = '4430bb0ac79d'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_development():
### commands auto generated by Alembic - please adjust! ###
op.create_table('comments',
sa.Column('id', sa.String(length=256), autoincrement=False, nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('created_utc', sa.DateTime(), nullable=True),
sa.Column('subreddit_id', sa.String(length=32), nullable=True),
sa.Column('post_id', sa.String(length=32), nullable=True),
sa.Column('user_id', sa.String(length=64), nullable=True),
sa.Column('comment_data', mysql.MEDIUMTEXT(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('id')
)
op.create_index(op.f('ix_comments_post_id'), 'comments', ['post_id'], unique=False)
op.create_index(op.f('ix_comments_subreddit_id'), 'comments', ['subreddit_id'], unique=False)
op.create_index(op.f('ix_comments_user_id'), 'comments', ['user_id'], unique=False)
### end Alembic commands ###
def downgrade_development():
### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_comments_user_id'), table_name='comments')
op.drop_index(op.f('ix_comments_subreddit_id'), table_name='comments')
op.drop_index(op.f('ix_comments_post_id'), table_name='comments')
op.drop_table('comments')
### end Alembic commands ###
def upgrade_test():
### commands auto generated by Alembic - please adjust! ###
op.create_table('comments',
sa.Column('id', sa.String(length=256), autoincrement=False, nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('created_utc', sa.DateTime(), nullable=True),
sa.Column('subreddit_id', sa.String(length=32), nullable=True),
sa.Column('post_id', sa.String(length=32), nullable=True),
sa.Column('user_id', sa.String(length=64), nullable=True),
sa.Column('comment_data', mysql.MEDIUMTEXT(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('id')
)
op.create_index(op.f('ix_comments_post_id'), 'comments', ['post_id'], unique=False)
op.create_index(op.f('ix_comments_subreddit_id'), 'comments', ['subreddit_id'], unique=False)
op.create_index(op.f('ix_comments_user_id'), 'comments', ['user_id'], unique=False)
### end Alembic commands ###
def downgrade_test():
### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_comments_user_id'), table_name='comments')
op.drop_index(op.f('ix_comments_subreddit_id'), table_name='comments')
op.drop_index(op.f('ix_comments_post_id'), table_name='comments')
op.drop_table('comments')
### end Alembic commands ###
def upgrade_production():
### commands auto generated by Alembic - please adjust! ###
op.create_table('comments',
sa.Column('id', sa.String(length=256), autoincrement=False, nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('created_utc', sa.DateTime(), nullable=True),
sa.Column('subreddit_id', sa.String(length=32), nullable=True),
sa.Column('post_id', sa.String(length=32), nullable=True),
sa.Column('user_id', sa.String(length=64), nullable=True),
sa.Column('comment_data', mysql.MEDIUMTEXT(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('id')
)
op.create_index(op.f('ix_comments_post_id'), 'comments', ['post_id'], unique=False)
op.create_index(op.f('ix_comments_subreddit_id'), 'comments', ['subreddit_id'], unique=False)
op.create_index(op.f('ix_comments_user_id'), 'comments', ['user_id'], unique=False)
### end Alembic commands ###
def downgrade_production():
### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_comments_user_id'), table_name='comments')
op.drop_index(op.f('ix_comments_subreddit_id'), table_name='comments')
op.drop_index(op.f('ix_comments_post_id'), table_name='comments')
op.drop_table('comments')
### end Alembic commands ###
| {
"repo_name": "c4fcm/CivilServant",
"path": "alembic/versions/e06fa0a86214_added_comment_model.py",
"copies": "1",
"size": "4484",
"license": "mit",
"hash": -3658110406395293000,
"line_mean": 39.0357142857,
"line_max": 97,
"alpha_frac": 0.6712756467,
"autogenerated": false,
"ratio": 3.284981684981685,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.44562573316816845,
"avg_score": null,
"num_lines": null
} |
"""Added Companies
Revision ID: 2e16030bde45
Revises: 14cb7744656e
Create Date: 2016-03-10 21:33:14.544867
"""
# from alembic import op
# import sqlalchemy as sa
from api.models.company import CompanyModel
revision = '2e16030bde45'
down_revision = '17d4c5d64a7b'
def upgrade():
microsoft = CompanyModel("Microsoft", "Random Address", "Redmond", "USA", "contact@mns.com", "1-800-642-7676")
microsoft.save()
oracle = CompanyModel("Oracle", "Random Oracle Address", "Redwood City", "USA", "info@oracle.com", "00 1 650-506-7000")
oracle.save()
sap = CompanyModel("SAP", "Address 123 - Floor 4th", "Walldorf", "Germany", "info@sap.com", "+1-800-872-1727")
sap.save()
amazon = CompanyModel("Amazon", "No Address", "Seattle", "USA", "contact@amazon.com", "00 1 206-266-2992")
amazon.save()
google = CompanyModel("Google", "Try Google Maps 143", "Mountain View", "USA", "hello@google.com", "1-866-246-6453")
google.save()
skynet = CompanyModel("SkyNet", "N/A", "California", "USA", "bot@skynet.com")
skynet.save()
dharma = CompanyModel("Dharma Initiative", "N/A", "Michigan", "USA", "research@dharma-initiative.org")
dharma.save()
wayne = CompanyModel("Wayne Enterprises", "Wayne Tower", "Gotham City", "USA", "bruce@wayne.com")
wayne.save()
robotics = CompanyModel("U.S. Robotics and Mechanical Men", "N/A", "New York", "USA", "susan.calvin@robotics.com", "555 5555 555")
robotics.save()
tyler = CompanyModel("Tyrell Corporation", "N/A", "Los Angeles", "USA", "douglastrumbull@tyrell.com", "N/A")
tyler.save()
ocp = CompanyModel("Omni Consumer Products", "N/A", "Detroit", "USA", "ed209@ocp.net")
ocp.save()
ingen = CompanyModel("INGen", "Palo Alto", "California", "USA", "wemakefuture@ingen.com")
ingen.save()
def downgrade():
pass
| {
"repo_name": "xeBuz/CompanyDK",
"path": "migrations/versions/9c995cbcc2cf_.py",
"copies": "1",
"size": "1846",
"license": "mpl-2.0",
"hash": 3595624728949394000,
"line_mean": 31.3859649123,
"line_max": 134,
"alpha_frac": 0.6565547129,
"autogenerated": false,
"ratio": 2.7147058823529413,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.3871260595252941,
"avg_score": null,
"num_lines": null
} |
"""Added contest table
Revision ID: dc15c595d08
Revises: 4503a2e36a01
Create Date: 2015-06-02 10:52:14.233433
"""
# revision identifiers, used by Alembic.
revision = 'dc15c595d08'
down_revision = '4503a2e36a01'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('contests',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=True),
sa.Column('description', sa.String(length=255), nullable=True),
sa.Column('start_at', sa.DateTime(), nullable=True),
sa.Column('end_at', sa.DateTime(), nullable=True),
sa.Column('result_announced_at', sa.DateTime(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('contests')
### end Alembic commands ###
| {
"repo_name": "vigov5/oshougatsu2015",
"path": "alembic/versions/dc15c595d08_added_contest_table.py",
"copies": "1",
"size": "1111",
"license": "mit",
"hash": 7407775593391427000,
"line_mean": 28.2368421053,
"line_max": 67,
"alpha_frac": 0.6786678668,
"autogenerated": false,
"ratio": 3.3263473053892216,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4505015172189222,
"avg_score": null,
"num_lines": null
} |
"""Added country code table
Revision ID: 2c1baff71070
Revises: b4009d2ab0ba
Create Date: 2019-01-16 16:18:17.001528
"""
# revision identifiers, used by Alembic.
revision = '2c1baff71070'
down_revision = 'b4009d2ab0ba'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade():
country_code = op.create_table('country_code',
sa.Column('id', mysql.INTEGER(display_width=11), autoincrement=True, nullable=False),
sa.Column('name', mysql.VARCHAR(length=255), nullable=False),
sa.Column('iso', mysql.VARCHAR(length=2), nullable=False),
sa.Column('cc', mysql.VARCHAR(length=3), nullable=False),
sa.PrimaryKeyConstraint('id'),
mysql_collate=u'utf8mb4_0900_ai_ci',
mysql_default_charset=u'utf8mb4',
mysql_engine=u'InnoDB'
)
op.bulk_insert(
country_code,
[
{"id": 1, "name": "United States", "iso": "US", "cc": "292"},
{"id": 2, "name": "Canada", "iso": "CA", "cc": "040"},
{"id": 3, "name": "Brazil", "iso": "BR", "cc": "031"},
{"id": 4, "name": "Mexico", "iso": "MX", "cc": "197"},
{"id": 5, "name": "Phillipines", "iso": "PH", "cc": "1e7"},
],
)
def downgrade():
op.drop_table('country_code')
| {
"repo_name": "ebu/radiodns-plugit",
"path": "RadioDns-PlugIt/alembic/versions/2c1baff71070_added_country_code_table.py",
"copies": "1",
"size": "1417",
"license": "bsd-3-clause",
"hash": -8849956463774481000,
"line_mean": 31.9534883721,
"line_max": 105,
"alpha_frac": 0.5377558222,
"autogenerated": false,
"ratio": 3.2800925925925926,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9301275884154285,
"avg_score": 0.003314506127661337,
"num_lines": 43
} |
"""Added course table and relationships
Revision ID: e43177bfe90b
Revises: 282d30a0218e
Create Date: 2019-05-30 09:39:31.881296
"""
from alembic import op
import sqlalchemy as sa
from nbgrader.api import Course
# revision identifiers, used by Alembic.
revision = 'e43177bfe90b'
down_revision = '282d30a0218e'
branch_labels = None
depends_on = None
def _get_or_create_table(*args):
ctx = op.get_context()
con = op.get_bind()
table_exists = ctx.dialect.has_table(con, args[0])
if not table_exists:
table = op.create_table(*args)
else:
table = sa.sql.table(*args)
return table
def upgrade():
"""
This migrations adds a course column to the assignment table
and a matching foreign key into the course table
"""
new_course_table = _get_or_create_table(
'course',
sa.Column("id", sa.String(128), unique=True,
primary_key=True, nullable=False),
)
# If the course table is empty, create a default course
connection = op.get_bind()
res = connection.execute("select id from course")
results = res.fetchall()
default_course = "default_course"
if len(results) == 0:
connection.execute(
"INSERT INTO course (id) VALUES ('{}')".format(default_course))
with op.batch_alter_table("assignment") as batch_op:
batch_op.add_column(sa.Column(
'course_id', sa.VARCHAR(128), nullable=False, default=default_course))
batch_op.create_foreign_key(
'fk_course_assignment', 'course', ['course_id'], ['id'])
def downgrade():
with op.batch_alter_table("assignment") as batch_op:
batch_op.drop_constraint('fk_course_assignment', type_='foreignkey')
batch_op.drop_column('assignment', 'course_id')
op.drop_table('course')
| {
"repo_name": "jupyter/nbgrader",
"path": "nbgrader/alembic/versions/e43177bfe90b_added_course_table_and_relationships.py",
"copies": "2",
"size": "1817",
"license": "bsd-3-clause",
"hash": -8604667171843190000,
"line_mean": 26.1194029851,
"line_max": 82,
"alpha_frac": 0.6472206935,
"autogenerated": false,
"ratio": 3.5009633911368017,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5148184084636802,
"avg_score": null,
"num_lines": null
} |
"""added currency
Revision ID: 3fcad09e63ed
Revises: d59736dd8faa
Create Date: 2017-04-24 23:42:19.881397
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '3fcad09e63ed'
down_revision = 'd59736dd8faa'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('currencies',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('iso', sa.String(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.Column('symbol', sa.String(), nullable=False),
sa.Column('left_symbol', sa.Boolean(), nullable=False),
sa.Column('space_between', sa.Boolean(), nullable=False),
sa.Column('decimals', sa.Integer(), nullable=False),
sa.Column('decimal_separator', sa.String(), nullable=False),
sa.Column('decimal_short', sa.String(), nullable=True),
sa.Column('group_by', sa.Integer(), nullable=False),
sa.Column('grouping_separator', sa.String(), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('iso'),
sa.UniqueConstraint('name')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('currencies')
# ### end Alembic commands ###
| {
"repo_name": "csdt/Pawi",
"path": "migrations/versions/3fcad09e63ed_.py",
"copies": "1",
"size": "1342",
"license": "mit",
"hash": -1515429533041801500,
"line_mean": 30.2093023256,
"line_max": 65,
"alpha_frac": 0.6713859911,
"autogenerated": false,
"ratio": 3.44987146529563,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9588336221742025,
"avg_score": 0.006584246930720934,
"num_lines": 43
} |
"""Added currency to account
Revision ID: 28bf02666f59
Revises: 3fcad09e63ed
Create Date: 2017-04-27 22:21:13.799017
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '28bf02666f59'
down_revision = '3fcad09e63ed'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('accounts', sa.Column('currency_id', sa.Integer(), nullable=False, server_default='0'))
with op.batch_alter_table('accounts') as batch_op:
batch_op.alter_column('currency_id', server_default = None)
batch_op.create_foreign_key('accounts', 'currencies', ['currency_id'], ['id'])
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
#op.drop_constraint(None, 'accounts', type_='foreignkey')
with op.batch_alter_table('accounts') as batch_op:
batch_op.drop_constraint('accounts', type_='foreignkey')
batch_op.drop_column('currency_id')
# ### end Alembic commands ###
| {
"repo_name": "csdt/Pawi",
"path": "migrations/versions/28bf02666f59_.py",
"copies": "1",
"size": "1105",
"license": "mit",
"hash": 649347669374876400,
"line_mean": 31.5,
"line_max": 105,
"alpha_frac": 0.6678733032,
"autogenerated": false,
"ratio": 3.4858044164037856,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.46536777196037854,
"avg_score": null,
"num_lines": null
} |
"""added customer model
Revision ID: c4a9e4ab3bce
Revises: a15fe29fe566
Create Date: 2016-04-17 19:01:58.191577
"""
# revision identifiers, used by Alembic.
revision = 'c4a9e4ab3bce'
down_revision = 'a15fe29fe566'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('customers',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('tax_id', sa.String(length=63), nullable=True),
sa.Column('contact_person', sa.String(length=127), nullable=True),
sa.Column('email', sa.String(length=127), nullable=True),
sa.Column('invoicing_address', sa.Text(), nullable=False),
sa.Column('shipping_address', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name'),
sa.UniqueConstraint('tax_id')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('customers')
### end Alembic commands ###
| {
"repo_name": "skazi0/yaia",
"path": "migrations/versions/c4a9e4ab3bce_added_customer_model.py",
"copies": "1",
"size": "1214",
"license": "mit",
"hash": -2607706957570837000,
"line_mean": 30.1282051282,
"line_max": 70,
"alpha_frac": 0.6754530478,
"autogenerated": false,
"ratio": 3.326027397260274,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9448343620832722,
"avg_score": 0.010627364845510353,
"num_lines": 39
} |
"""Added custom_request_forms table
Revision ID: 7e0434686370
Revises: 445e50628f6b
Create Date: 2018-04-30 20:07:13.242319
"""
# revision identifiers, used by Alembic.
revision = "7e0434686370"
down_revision = "445e50628f6b"
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table(
"custom_request_forms",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("agency_ein", sa.String(length=4), nullable=False),
sa.Column("form_name", sa.String(), nullable=False),
sa.Column("form_description", sa.String(), nullable=False),
sa.Column(
"field_definitions", postgresql.JSONB(astext_type=sa.Text()), nullable=False
),
sa.Column("repeatable", sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(["agency_ein"], ["agencies.ein"]),
sa.PrimaryKeyConstraint("id"),
)
op.add_column(
"requests",
sa.Column(
"custom_metadata", postgresql.JSONB(astext_type=sa.Text()), nullable=True
),
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column("requests", "custom_metadata")
op.drop_table("custom_request_forms")
### end Alembic commands ###
| {
"repo_name": "CityOfNewYork/NYCOpenRecords",
"path": "migrations/versions/7e0434686370_added_custom_request_forms_table.py",
"copies": "1",
"size": "1404",
"license": "apache-2.0",
"hash": 1333123939158107100,
"line_mean": 29.5217391304,
"line_max": 88,
"alpha_frac": 0.6438746439,
"autogenerated": false,
"ratio": 3.6,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.47438746439,
"avg_score": null,
"num_lines": null
} |
"""Added display_ends to skirmishes
Revision ID: 42918bb1dffe
Revises: 4638ed8c54d9
Create Date: 2013-12-05 16:35:02.513937
"""
# revision identifiers, used by Alembic.
revision = '42918bb1dffe'
down_revision = '4638ed8c54d9'
from alembic import op
import sqlalchemy as sa
def upgrade(engine_name):
eval("upgrade_%s" % engine_name)()
def downgrade(engine_name):
eval("downgrade_%s" % engine_name)()
def upgrade_engine1():
### commands auto generated by Alembic - please adjust! ###
op.add_column('skirmish_actions', sa.Column('display_ends', sa.Integer(), nullable=True))
### end Alembic commands ###
def downgrade_engine1():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('skirmish_actions', 'display_ends')
### end Alembic commands ###
def upgrade_engine2():
### commands auto generated by Alembic - please adjust! ###
op.add_column('skirmish_actions', sa.Column('display_ends', sa.Integer(), nullable=True))
### end Alembic commands ###
def downgrade_engine2():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('skirmish_actions', 'display_ends')
### end Alembic commands ###
def upgrade_engine3():
### commands auto generated by Alembic - please adjust! ###
op.add_column('skirmish_actions', sa.Column('display_ends', sa.Integer(), nullable=True))
### end Alembic commands ###
def downgrade_engine3():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('skirmish_actions', 'display_ends')
### end Alembic commands ###
| {
"repo_name": "atiaxi/chromabot",
"path": "alembic/versions/42918bb1dffe_added_display_ends_to_skirmishes.py",
"copies": "1",
"size": "1604",
"license": "mit",
"hash": -1570606331378202400,
"line_mean": 24.8709677419,
"line_max": 93,
"alpha_frac": 0.674563591,
"autogenerated": false,
"ratio": 3.456896551724138,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.46314601427241375,
"avg_score": null,
"num_lines": null
} |
"""Added display_end to Battle
Revision ID: 41fe909190d3
Revises: 2e37930ecb54
Create Date: 2013-09-16 18:47:44.229667
"""
# revision identifiers, used by Alembic.
revision = '41fe909190d3'
down_revision = '2e37930ecb54'
from alembic import op
import sqlalchemy as sa
def upgrade(engine_name):
eval("upgrade_%s" % engine_name)()
def downgrade(engine_name):
eval("downgrade_%s" % engine_name)()
def upgrade_engine1():
### commands auto generated by Alembic - please adjust! ###
op.add_column('battles', sa.Column('display_ends', sa.Integer(), nullable=True))
### end Alembic commands ###
def downgrade_engine1():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('battles', 'display_ends')
### end Alembic commands ###
def upgrade_engine2():
### commands auto generated by Alembic - please adjust! ###
op.add_column('battles', sa.Column('display_ends', sa.Integer(), nullable=True))
### end Alembic commands ###
def downgrade_engine2():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('battles', 'display_ends')
### end Alembic commands ###
def upgrade_engine3():
### commands auto generated by Alembic - please adjust! ###
op.add_column('battles', sa.Column('display_ends', sa.Integer(), nullable=True))
### end Alembic commands ###
def downgrade_engine3():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('battles', 'display_ends')
### end Alembic commands ###
| {
"repo_name": "atiaxi/chromabot",
"path": "alembic/versions/41fe909190d3_added_display_end_to.py",
"copies": "1",
"size": "1545",
"license": "mit",
"hash": 8067153866570662000,
"line_mean": 23.9193548387,
"line_max": 84,
"alpha_frac": 0.6660194175,
"autogenerated": false,
"ratio": 3.576388888888889,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4742408306388889,
"avg_score": null,
"num_lines": null
} |
"""Added ECC
Revision ID: 37826dbd06d5
Revises: 79cf16aef0b
Create Date: 2013-08-05 17:35:03.326000
"""
# revision identifiers, used by Alembic.
revision = '37826dbd06d5'
down_revision = '79cf16aef0b'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
ecc = op.create_table('ecc',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=True),
sa.Column('iso', sa.String(length=2), nullable=True),
sa.Column('pi', sa.String(length=2), nullable=True),
sa.Column('ecc', sa.String(length=3), nullable=True),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
op.bulk_insert(
ecc,
[
{"id": 1, "name": "Albania", "iso": "AL", "pi": "9", "ecc": "E0"},
{"id": 2, "name": "Algeria", "iso": "DZ", "pi": "2", "ecc": "E0"},
{"id": 3, "name": "Algeria",
"iso": "DZ", "pi": "2", "ecc": "E0"},
{"id": 4, "name": "Andorra",
"iso": "AD", "pi": "3", "ecc": "E0"},
{"id": 5, "name": "Angola", "iso": "AO",
"pi": "6", "ecc": "D0"},
{"id": 6, "name": "Anguilla",
"iso": "AI", "pi": "1", "ecc": "A2"},
{"id": 7, "name": "Antigua and Barbuda",
"iso": "AG", "pi": "2", "ecc": "A2"},
{"id": 8, "name": "Argentina",
"iso": "AR", "pi": "A", "ecc": "A2"},
{"id": 9, "name": "Armenia",
"iso": "AM", "pi": "A", "ecc": "E4"},
{"id": 10, "name": "Aruba", "iso": "AW",
"pi": "3", "ecc": "A4"},
{"id": 11, "name": "Australia - Australian Capital Territory",
"iso": "AU", "pi": "1", "ecc": "F0"},
{"id": 12, "name": "Australia - New South Wales",
"iso": "AU", "pi": "2", "ecc": "F0"},
{"id": 13, "name": "Australia - Northern Territory",
"iso": "AU", "pi": "8", "ecc": "F0"},
{"id": 14, "name": "Australia - Queensland",
"iso": "AU", "pi": "4", "ecc": "F0"},
{"id": 15, "name": "Australia - South Australia",
"iso": "AU", "pi": "5", "ecc": "F0"},
{"id": 16, "name": "Australia - Tasmania",
"iso": "AU", "pi": "7", "ecc": "F0"},
{"id": 17, "name": "Australia - Victoria",
"iso": "AU", "pi": "3", "ecc": "F0"},
{"id": 18, "name": "Australia - Western Australia",
"iso": "AU", "pi": "6", "ecc": "F0"},
{"id": 19, "name": "Austria",
"iso": "AT", "pi": "A", "ecc": "E0"},
{"id": 20, "name": "Azerbaijan",
"iso": "AZ", "pi": "B", "ecc": "E3"},
{"id": 21, "name": "Azores [Portugal]",
"iso": "PT", "pi": "8", "ecc": "E4"},
{"id": 22, "name": "Bahamas",
"iso": "BS", "pi": "F", "ecc": "A2"},
{"id": 23, "name": "Bahrain",
"iso": "BH", "pi": "E", "ecc": "F0"},
{"id": 24, "name": "Bangladesh",
"iso": "BD", "pi": "3", "ecc": "F1"},
{"id": 25, "name": "Barbados",
"iso": "BB", "pi": "5", "ecc": "A2"},
{"id": 26, "name": "Belarus",
"iso": "BY", "pi": "F", "ecc": "E3"},
{"id": 27, "name": "Belarus",
"iso": "BY", "pi": "F", "ecc": "E3"},
{"id": 28, "name": "Belgium",
"iso": "BE", "pi": "6", "ecc": "E0"},
{"id": 29, "name": "Belize",
"iso": "BZ", "pi": "6", "ecc": "A2"},
{"id": 30, "name": "Benin", "iso": "BJ",
"pi": "E", "ecc": "D0"},
{"id": 31, "name": "Bermuda",
"iso": "BM", "pi": "C", "ecc": "A2"},
{"id": 32, "name": "Bhutan",
"iso": "BT", "pi": "2", "ecc": "F1"},
{"id": 33, "name": "Bolivia",
"iso": "BO", "pi": "1", "ecc": "A3"},
{"id": 34, "name": "Bosnia-Herzegovina",
"iso": "BA", "pi": "F", "ecc": "E4"},
{"id": 35, "name": "Botswana",
"iso": "BW", "pi": "B", "ecc": "D1"},
{"id": 36, "name": "Brazil",
"iso": "BR", "pi": "B", "ecc": "A2"},
{"id": 37, "name": "Brunei Darussalam",
"iso": "BN", "pi": "B", "ecc": "F1"},
{"id": 38, "name": "Bulgaria",
"iso": "BG", "pi": "8", "ecc": "E1"},
{"id": 39, "name": "Burkina Faso",
"iso": "BF", "pi": "B", "ecc": "D0"},
{"id": 40, "name": "Burundi",
"iso": "BI", "pi": "9", "ecc": "D1"},
{"id": 41, "name": "Cambodia",
"iso": "KH", "pi": "3", "ecc": "F2"},
{"id": 42, "name": "Cameroon",
"iso": "CM", "pi": "1", "ecc": "D0"},
{"id": 43, "name": "Canada B",
"iso": "CA", "pi": "B", "ecc": "A1"},
{"id": 44, "name": "Canada C",
"iso": "CA", "pi": "C", "ecc": "A1"},
{"id": 45, "name": "Canada D",
"iso": "CA", "pi": "D", "ecc": "A1"},
{"id": 46, "name": "Canada E",
"iso": "CA", "pi": "E", "ecc": "A1"},
{"id": 47, "name": "Canaries [Spain]",
"iso": "ES", "pi": "E", "ecc": "E2"},
{"id": 48, "name": "Canary Islands [Spain]",
"iso": "ES", "pi": "E", "ecc": "E0"},
{"id": 49, "name": "Cape Verde",
"iso": "CV", "pi": "6", "ecc": "D1"},
{"id": 50, "name": "Cayman Islands",
"iso": "KY", "pi": "7", "ecc": "A2"},
{"id": 51, "name": "Central African Republic",
"iso": "CF", "pi": "2", "ecc": "D0"},
{"id": 52, "name": "Chad", "iso": "TD",
"pi": "9", "ecc": "D2"},
{"id": 53, "name": "Chile", "iso": "CL",
"pi": "C", "ecc": "A3"},
{"id": 54, "name": "China", "iso": "CN",
"pi": "C", "ecc": "F0"},
{"id": 55, "name": "Colombia",
"iso": "CO", "pi": "2", "ecc": "A3"},
{"id": 56, "name": "Comoros",
"iso": "KM", "pi": "C", "ecc": "D1"},
{"id": 57, "name": "Congo", "iso": "CG",
"pi": "C", "ecc": "D0"},
{"id": 58, "name": "Costa Rica",
"iso": "CR", "pi": "8", "ecc": "A2"},
{"id": 59, "name": "Cote d.Ivoire",
"iso": "CI", "pi": "C", "ecc": "D2"},
{"id": 60, "name": "Croatia",
"iso": "HR", "pi": "C", "ecc": "E3"},
{"id": 61, "name": "Cuba", "iso": "CU",
"pi": "9", "ecc": "A2"},
{"id": 62, "name": "Cyprus",
"iso": "CY", "pi": "2", "ecc": "E1"},
{"id": 63, "name": "Czech Republic",
"iso": "CZ", "pi": "2", "ecc": "E2"},
{"id": 64, "name": "Democratic Rep. of Congo",
"iso": "ZR", "pi": "B", "ecc": "D2"},
{"id": 65, "name": "Denmark",
"iso": "DK", "pi": "9", "ecc": "E1"},
{"id": 66, "name": "Djibouti",
"iso": "DJ", "pi": "3", "ecc": "D0"},
{"id": 67, "name": "Dominica",
"iso": "DM", "pi": "A", "ecc": "A3"},
{"id": 68, "name": "Dominican Republic",
"iso": "DO", "pi": "B", "ecc": "A3"},
{"id": 69, "name": "Ecuador",
"iso": "EC", "pi": "3", "ecc": "A2"},
{"id": 70, "name": "Egypt", "iso": "EG",
"pi": "F", "ecc": "E0"},
{"id": 71, "name": "Egypt", "iso": "EG",
"pi": "F", "ecc": "E0"},
{"id": 72, "name": "El Salvador",
"iso": "SV", "pi": "C", "ecc": "A4"},
{"id": 73, "name": "Equatorial Guinea",
"iso": "GQ", "pi": "7", "ecc": "D0"},
{"id": 74, "name": "Estonia",
"iso": "EE", "pi": "2", "ecc": "E4"},
{"id": 75, "name": "Estonia",
"iso": "EE", "pi": "2", "ecc": "E4"},
{"id": 76, "name": "Ethiopia",
"iso": "ET", "pi": "E", "ecc": "D1"},
{"id": 77, "name": "Falkland Islands",
"iso": "FK", "pi": "4", "ecc": "A2"},
{"id": 78, "name": "Faroe Islands [Denmark]",
"iso": "DK", "pi": "9", "ecc": "E1"},
{"id": 79, "name": "Fiji", "iso": "FJ",
"pi": "5", "ecc": "F1"},
{"id": 80, "name": "Finland",
"iso": "FI", "pi": "6", "ecc": "E1"},
{"id": 81, "name": "France",
"iso": "FR", "pi": "F", "ecc": "E1"},
{"id": 82, "name": "Gabon", "iso": "GA",
"pi": "8", "ecc": "D0"},
{"id": 83, "name": "Gambia",
"iso": "GM", "pi": "8", "ecc": "D1"},
{"id": 84, "name": "Georgia",
"iso": "GE", "pi": "C", "ecc": "E4"},
{"id": 85, "name": "Germany 1",
"iso": "DE", "pi": "1", "ecc": "E0"},
{"id": 86, "name": "Germany D",
"iso": "DE", "pi": "D", "ecc": "E0"},
{"id": 87, "name": "Ghana", "iso": "GH",
"pi": "3", "ecc": "D1"},
{"id": 88, "name": "Gibraltar [United Kingdom]",
"iso": "GI", "pi": "A", "ecc": "E1"},
{"id": 89, "name": "Greece",
"iso": "GR", "pi": "1", "ecc": "E1"},
{"id": 90, "name": "Greenland",
"iso": "GL", "pi": "F", "ecc": "A1"},
{"id": 91, "name": "Grenada",
"iso": "GD", "pi": "D", "ecc": "A3"},
{"id": 92, "name": "Guadeloupe",
"iso": "GP", "pi": "E", "ecc": "A2"},
{"id": 93, "name": "Guatemala",
"iso": "GT", "pi": "1", "ecc": "A4"},
{"id": 94, "name": "Guiana",
"iso": "GF", "pi": "5", "ecc": "A3"},
{"id": 95, "name": "Guinea-Bissau",
"iso": "GW", "pi": "A", "ecc": "D2"},
{"id": 96, "name": "Guyana",
"iso": "GY", "pi": "F", "ecc": "A3"},
{"id": 97, "name": "Haiti", "iso": "HT",
"pi": "D", "ecc": "A4"},
{"id": 98, "name": "Honduras",
"iso": "HN", "pi": "2", "ecc": "A4"},
{"id": 99, "name": "Hong Kong",
"iso": "HK", "pi": "F", "ecc": "F1"},
{"id": 100, "name": "Hungary",
"iso": "HU", "pi": "B", "ecc": "E0"},
{"id": 101, "name": "Iceland",
"iso": "IS", "pi": "A", "ecc": "E2"},
{"id": 102, "name": "India",
"iso": "IN", "pi": "5", "ecc": "F2"},
{"id": 103, "name": "Indonesia",
"iso": "ID", "pi": "C", "ecc": "F2"},
{"id": 104, "name": "Iran", "iso": "IR",
"pi": "8", "ecc": "F0"},
{"id": 105, "name": "Iraq", "iso": "IQ",
"pi": "B", "ecc": "E1"},
{"id": 106, "name": "Iraq", "iso": "IQ",
"pi": "B", "ecc": "E1"},
{"id": 107, "name": "Ireland",
"iso": "IE", "pi": "2", "ecc": "E3"},
{"id": 108, "name": "Israel",
"iso": "IL", "pi": "4", "ecc": "E0"},
{"id": 109, "name": "Italy",
"iso": "IT", "pi": "5", "ecc": "E0"},
{"id": 110, "name": "Jamaica",
"iso": "JM", "pi": "3", "ecc": "A3"},
{"id": 111, "name": "Japan",
"iso": "JP", "pi": "9", "ecc": "F2"},
{"id": 112, "name": "Jordan",
"iso": "JO", "pi": "5", "ecc": "E1"},
{"id": 113, "name": "Kazakhstan",
"iso": "KZ", "pi": "D", "ecc": "E3"},
{"id": 114, "name": "Kenya",
"iso": "KE", "pi": "6", "ecc": "D2"},
{"id": 115, "name": "Kiribati",
"iso": "KI", "pi": "1", "ecc": "F1"},
{"id": 116, "name": "Korea [North]",
"iso": "KP", "pi": "D", "ecc": "F0"},
{"id": 117, "name": "Korea [South]",
"iso": "KR", "pi": "E", "ecc": "F1"},
{"id": 118, "name": "Kuwait",
"iso": "KW", "pi": "1", "ecc": "F2"},
{"id": 119, "name": "Kyrgyzstan",
"iso": "KG", "pi": "3", "ecc": "E4"},
{"id": 120, "name": "Laos", "iso": "LA",
"pi": "1", "ecc": "F3"},
{"id": 121, "name": "Latvia",
"iso": "LV", "pi": "9", "ecc": "E3"},
{"id": 122, "name": "Latvia",
"iso": "LV", "pi": "9", "ecc": "E3"},
{"id": 123, "name": "Lebanon",
"iso": "LB", "pi": "A", "ecc": "E3"},
{"id": 124, "name": "Lesotho",
"iso": "LS", "pi": "6", "ecc": "D3"},
{"id": 125, "name": "Liberia",
"iso": "LR", "pi": "2", "ecc": "D1"},
{"id": 126, "name": "Libya",
"iso": "LY", "pi": "D", "ecc": "E1"},
{"id": 127, "name": "Libya",
"iso": "LY", "pi": "D", "ecc": "E1"},
{"id": 128, "name": "Liechtenstein",
"iso": "LI", "pi": "9", "ecc": "E2"},
{"id": 129, "name": "Lithuania",
"iso": "LT", "pi": "C", "ecc": "E2"},
{"id": 130, "name": "Lithuania",
"iso": "LT", "pi": "C", "ecc": "E2"},
{"id": 131, "name": "Luxembourg",
"iso": "LU", "pi": "7", "ecc": "E1"},
{"id": 132, "name": "Macau",
"iso": "MO", "pi": "6", "ecc": "F2"},
{"id": 133, "name": "Macedonia",
"iso": "MK", "pi": "4", "ecc": "E3"},
{"id": 134, "name": "Madagascar",
"iso": "MG", "pi": "4", "ecc": "D0"},
{"id": 135, "name": "Madeira [Portugal]",
"iso": "PT", "pi": "8", "ecc": "E4"},
{"id": 136, "name": "Malawi",
"iso": "MW", "pi": "F", "ecc": "D0"},
{"id": 137, "name": "Malaysia",
"iso": "MY", "pi": "F", "ecc": "F0"},
{"id": 138, "name": "Maldives",
"iso": "MV", "pi": "B", "ecc": "F2"},
{"id": 139, "name": "Mali", "iso": "ML",
"pi": "5", "ecc": "D0"},
{"id": 140, "name": "Malta",
"iso": "MT", "pi": "C", "ecc": "E0"},
{"id": 141, "name": "Martinique",
"iso": "MQ", "pi": "4", "ecc": "A3"},
{"id": 142, "name": "Mauritania",
"iso": "MR", "pi": "4", "ecc": "D1"},
{"id": 143, "name": "Mauritius",
"iso": "MU", "pi": "A", "ecc": "D3"},
{"id": 144, "name": "Mexico B",
"iso": "MX", "pi": "B", "ecc": "A5"},
{"id": 145, "name": "Mexico D",
"iso": "MX", "pi": "D", "ecc": "A5"},
{"id": 146, "name": "Mexico E",
"iso": "MX", "pi": "E", "ecc": "A5"},
{"id": 147, "name": "Mexico F",
"iso": "MX", "pi": "F", "ecc": "A5"},
{"id": 148, "name": "Micronesia",
"iso": "FM", "pi": "E", "ecc": "F3"},
{"id": 149, "name": "Moldova",
"iso": "MD", "pi": "1", "ecc": "E4"},
{"id": 150, "name": "Moldova",
"iso": "MD", "pi": "1", "ecc": "E4"},
{"id": 151, "name": "Monaco",
"iso": "MC", "pi": "B", "ecc": "E2"},
{"id": 152, "name": "Mongolia",
"iso": "MN", "pi": "F", "ecc": "F3"},
{"id": 153, "name": "Montserrat",
"iso": "MS", "pi": "5", "ecc": "A4"},
{"id": 154, "name": "Morocco",
"iso": "MA", "pi": "1", "ecc": "E2"},
{"id": 155, "name": "Morocco",
"iso": "MA", "pi": "1", "ecc": "E2"},
{"id": 156, "name": "Mozambique",
"iso": "MZ", "pi": "3", "ecc": "D2"},
{"id": 157, "name": "Myanmar [Burma]",
"iso": "MM", "pi": "B", "ecc": "F0"},
{"id": 158, "name": "Namibia",
"iso": "NA", "pi": "1", "ecc": "D1"},
{"id": 159, "name": "Nauru",
"iso": "NR", "pi": "7", "ecc": "F1"},
{"id": 160, "name": "Nepal",
"iso": "NP", "pi": "E", "ecc": "F2"},
{"id": 161, "name": "Netherlands",
"iso": "NL", "pi": "8", "ecc": "E3"},
{"id": 162, "name": "Netherlands Antilles",
"iso": "AN", "pi": "D", "ecc": "A2"},
{"id": 163, "name": "New Zealand",
"iso": "NZ", "pi": "9", "ecc": "F1"},
{"id": 164, "name": "Nicaragua",
"iso": "NI", "pi": "7", "ecc": "A3"},
{"id": 165, "name": "Niger",
"iso": "NE", "pi": "8", "ecc": "D2"},
{"id": 166, "name": "Nigeria",
"iso": "NG", "pi": "F", "ecc": "D1"},
{"id": 167, "name": "Norway",
"iso": "NO", "pi": "F", "ecc": "E2"},
{"id": 168, "name": "Oman", "iso": "OM",
"pi": "6", "ecc": "F1"},
{"id": 169, "name": "Pakistan",
"iso": "PK", "pi": "4", "ecc": "F1"},
{"id": 170, "name": "Palestine",
"iso": "PS", "pi": "8", "ecc": "E0"},
{"id": 171, "name": "Panama",
"iso": "A", "pi": "9", "ecc": "A3"},
{"id": 172, "name": "Papua New Guinea",
"iso": "PG", "pi": "9", "ecc": "F3"},
{"id": 173, "name": "Paraguay",
"iso": "PY", "pi": "6", "ecc": "A3"},
{"id": 174, "name": "Peru", "iso": "PE",
"pi": "7", "ecc": "A4"},
{"id": 175, "name": "Philippines",
"iso": "PH", "pi": "8", "ecc": "F2"},
{"id": 176, "name": "Poland",
"iso": "PL", "pi": "3", "ecc": "E2"},
{"id": 177, "name": "Portugal",
"iso": "PT", "pi": "8", "ecc": "E4"},
{"id": 191, "name": "Qatar",
"iso": "QA", "pi": "2", "ecc": "F2"},
{"id": 192, "name": "Republic of Guinea",
"iso": "GN", "pi": "9", "ecc": "D0"},
{"id": 193, "name": "Romania",
"iso": "RO", "pi": "E", "ecc": "E1"},
{"id": 194, "name": "Russian Federation",
"iso": "RU", "pi": "7", "ecc": "E0"},
{"id": 195, "name": "Russian Federation",
"iso": "RU", "pi": "7", "ecc": "E0"},
{"id": 196, "name": "Rwanda",
"iso": "RW", "pi": "5", "ecc": "D3"},
{"id": 197, "name": "Saint Kitts",
"iso": "KN", "pi": "A", "ecc": "A4"},
{"id": 198, "name": "Saint Lucia",
"iso": "LC", "pi": "B", "ecc": "A4"},
{"id": 199, "name": "Saint Vincent",
"iso": "VC", "pi": "C", "ecc": "A5"},
{"id": 200, "name": "San Marino",
"iso": "SM", "pi": "3", "ecc": "E1"},
{"id": 201, "name": "Sao Tome & Principe",
"iso": "ST", "pi": "5", "ecc": "D1"},
{"id": 202, "name": "Saudi Arabia",
"iso": "SA", "pi": "9", "ecc": "F0"},
{"id": 203, "name": "Senegal",
"iso": "SN", "pi": "7", "ecc": "D1"},
{"id": 204, "name": "Seychelles",
"iso": "SC", "pi": "8", "ecc": "D3"},
{"id": 205, "name": "Sierra Leone",
"iso": "SL", "pi": "1", "ecc": "D2"},
{"id": 206, "name": "Singapore",
"iso": "SG", "pi": "A", "ecc": "F2"},
{"id": 207, "name": "Slovakia",
"iso": "SK", "pi": "5", "ecc": "E2"},
{"id": 208, "name": "Slovenia",
"iso": "SI", "pi": "9", "ecc": "E4"},
{"id": 209, "name": "Solomon Islands",
"iso": "SB", "pi": "A", "ecc": "F1"},
{"id": 210, "name": "Somalia",
"iso": "SO", "pi": "7", "ecc": "D2"},
{"id": 211, "name": "South Africa",
"iso": "ZA", "pi": "A", "ecc": "D0"},
{"id": 212, "name": "Spain",
"iso": "ES", "pi": "E", "ecc": "E2"},
{"id": 213, "name": "Sri Lanka",
"iso": "LK", "pi": "C", "ecc": "F1"},
{"id": 214, "name": "St Pierre and Miquelon",
"iso": "PM", "pi": "F", "ecc": "A6"},
{"id": 215, "name": "Sudan",
"iso": "SD", "pi": "C", "ecc": "D3"},
{"id": 216, "name": "Suriname",
"iso": "SR", "pi": "8", "ecc": "A4"},
{"id": 217, "name": "Swaziland",
"iso": "SZ", "pi": "5", "ecc": "D2"},
{"id": 218, "name": "Sweden",
"iso": "SE", "pi": "E", "ecc": "E3"},
{"id": 219, "name": "Switzerland",
"iso": "CH", "pi": "4", "ecc": "E1"},
{"id": 220, "name": "Syrian Arab Republic",
"iso": "SY", "pi": "6", "ecc": "E2"},
{"id": 221, "name": "Taiwan",
"iso": "TW", "pi": "D", "ecc": "F1"},
{"id": 222, "name": "Tajikistan",
"iso": "TJ", "pi": "5", "ecc": "E3"},
{"id": 223, "name": "Tanzania",
"iso": "TZ", "pi": "D", "ecc": "D1"},
{"id": 224, "name": "Thailand",
"iso": "TH", "pi": "2", "ecc": "F3"},
{"id": 225, "name": "Togo", "iso": "TG",
"pi": "D", "ecc": "D0"},
{"id": 226, "name": "Tonga",
"iso": "TO", "pi": "3", "ecc": "F3"},
{"id": 227, "name": "Trinidad and Tobago",
"iso": "TT", "pi": "6", "ecc": "A4"},
{"id": 228, "name": "Tunisia",
"iso": "TN", "pi": "7", "ecc": "E2"},
{"id": 229, "name": "Tunisia",
"iso": "TN", "pi": "7", "ecc": "E2"},
{"id": 230, "name": "Turkey",
"iso": "TR", "pi": "3", "ecc": "E3"},
{"id": 231, "name": "Turkmenistan",
"iso": "TM", "pi": "E", "ecc": "E4"},
{"id": 232, "name": "Turks and Caicos Islands",
"iso": "TC", "pi": "E", "ecc": "A3"},
{"id": 233, "name": "UAE", "iso": "AE",
"pi": "D", "ecc": "F2"},
{"id": 234, "name": "Uganda",
"iso": "UG", "pi": "4", "ecc": "D2"},
{"id": 235, "name": "Ukraine",
"iso": "UA", "pi": "6", "ecc": "E4"},
{"id": 236, "name": "Ukraine",
"iso": "UA", "pi": "6", "ecc": "E4"},
{"id": 237, "name": "United Kingdom",
"iso": "GB", "pi": "C", "ecc": "E1"},
{"id": 238, "name": "United States of America 1",
"iso": "US", "pi": "1", "ecc": "A0"},
{"id": 239, "name": "United States of America 2",
"iso": "US", "pi": "2", "ecc": "A0"},
{"id": 240, "name": "United States of America 3",
"iso": "US", "pi": "3", "ecc": "A0"},
{"id": 241, "name": "United States of America 4",
"iso": "US", "pi": "4", "ecc": "A0"},
{"id": 242, "name": "United States of America 5",
"iso": "US", "pi": "5", "ecc": "A0"},
{"id": 243, "name": "United States of America 6",
"iso": "US", "pi": "6", "ecc": "A0"},
{"id": 244, "name": "United States of America 7",
"iso": "US", "pi": "7", "ecc": "A0"},
{"id": 245, "name": "United States of America 8",
"iso": "US", "pi": "8", "ecc": "A0"},
{"id": 246, "name": "United States of America 9",
"iso": "US", "pi": "9", "ecc": "A0"},
{"id": 247, "name": "United States of America A",
"iso": "US", "pi": "A", "ecc": "A0"},
{"id": 248, "name": "United States of America B",
"iso": "US", "pi": "B", "ecc": "A0"},
{"id": 249, "name": "United States of America D",
"iso": "US", "pi": "D", "ecc": "A0"},
{"id": 250, "name": "United States of America E",
"iso": "US", "pi": "E", "ecc": "A0"},
{"id": 251, "name": "Uruguay",
"iso": "UY", "pi": "9", "ecc": "A4"},
{"id": 252, "name": "Uzbekistan",
"iso": "UZ", "pi": "B", "ecc": "E4"},
{"id": 253, "name": "Vanuatu",
"iso": "VU", "pi": "F", "ecc": "F2"},
{"id": 254, "name": "Vatican City State",
"iso": "VA", "pi": "4", "ecc": "E2"},
{"id": 255, "name": "Venezuela",
"iso": "VE", "pi": "E", "ecc": "A4"},
{"id": 256, "name": "Vietnam",
"iso": "VN", "pi": "7", "ecc": "F2"},
{"id": 257, "name": "Virgin Islands [British]",
"iso": "VG", "pi": "F", "ecc": "A5"},
{"id": 271, "name": "Western Sahara",
"iso": "EH", "pi": "3", "ecc": "D3"},
{"id": 272, "name": "Western Samoa",
"iso": "WS", "pi": "4", "ecc": "F2"},
{"id": 273, "name": "Yemen",
"iso": "YE", "pi": "B", "ecc": "F3"},
{"id": 274, "name": "Yugoslavia",
"iso": "YU", "pi": "D", "ecc": "E2"},
{"id": 275, "name": "Zambia",
"iso": "ZM", "pi": "E", "ecc": "D2"},
{"id": 276, "name": "Zimbabwe", "iso": "ZW", "pi": "2", "ecc": "D2"}
]
)
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('ecc')
### end Alembic commands ###
| {
"repo_name": "ebu/radiodns-plugit",
"path": "RadioDns-PlugIt/alembic/versions/37826dbd06d5_added_ecc.py",
"copies": "1",
"size": "25312",
"license": "bsd-3-clause",
"hash": 3739691244353518600,
"line_mean": 46.223880597,
"line_max": 82,
"alpha_frac": 0.3330041087,
"autogenerated": false,
"ratio": 2.6933390082996382,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.8524464433604808,
"avg_score": 0.0003757366789659002,
"num_lines": 536
} |
"""Added EPG fields to Station
Revision ID: 2d5b0c8383e0
Revises: 30a24ab759f9
Create Date: 2014-12-05 12:22:16.897709
"""
# revision identifiers, used by Alembic.
revision = '2d5b0c8383e0'
down_revision = '30a24ab759f9'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('station', sa.Column('long_description', sa.String(length=1200), nullable=True))
op.add_column('station', sa.Column('long_name', sa.String(length=128), nullable=True))
op.add_column('station', sa.Column('medium_name', sa.String(length=16), nullable=True))
op.add_column('station', sa.Column('url_default', sa.String(length=255), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('station', 'url_default')
op.drop_column('station', 'medium_name')
op.drop_column('station', 'long_name')
op.drop_column('station', 'long_description')
### end Alembic commands ###
| {
"repo_name": "ebu/radiodns-plugit",
"path": "RadioDns-PlugIt/alembic/versions/2d5b0c8383e0_added_epg_fields_to_station.py",
"copies": "1",
"size": "1060",
"license": "bsd-3-clause",
"hash": 8304563664877293000,
"line_mean": 32.125,
"line_max": 98,
"alpha_frac": 0.691509434,
"autogenerated": false,
"ratio": 3.202416918429003,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4393926352429003,
"avg_score": null,
"num_lines": null
} |
"""Added Event table with foreign key in Facts.
Revision ID: 96489d0aa37
Revises: 3bbac6e7d8ee
Create Date: 2013-06-06 11:36:35.557406
"""
# revision identifiers, used by Alembic.
revision = '96489d0aa37'
down_revision = '3bbac6e7d8ee'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('event',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=80), nullable=False),
sa.Column('description', sa.String(length=255), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name')
)
Event = sa.sql.table('event',
sa.sql.column('name', sa.String(80)),
sa.sql.column('description', sa.String(255))
)
op.bulk_insert(Event,
[
{'id':'1', 'name':'First Event',
'description':'The first event.'},
])
op.add_column(u'facts', sa.Column('event_id', sa.Integer(), nullable=False,
server_default='1'))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column(u'facts', 'event_id')
op.drop_table('event')
### end Alembic commands ###
| {
"repo_name": "msscully/datamart",
"path": "alembic/versions/96489d0aa37_added_event_table_wi.py",
"copies": "1",
"size": "1302",
"license": "mit",
"hash": -5819142440353471000,
"line_mean": 28.5909090909,
"line_max": 79,
"alpha_frac": 0.60906298,
"autogenerated": false,
"ratio": 3.472,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9501999267846537,
"avg_score": 0.015812742430692528,
"num_lines": 44
} |
"""Added Executive Compensation Table
Revision ID: 41350e71ae8c
Revises: 20b5109967bf
Create Date: 2017-04-14 08:06:01.597691
"""
# revision identifiers, used by Alembic.
revision = '41350e71ae8c'
down_revision = '20b5109967bf'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_data_broker():
### commands auto generated by Alembic - please adjust! ###
op.create_table('executive_compensation',
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('executive_compensation_id', sa.Integer(), nullable=False),
sa.Column('awardee_or_recipient_uniqu', sa.Text(), nullable=True),
sa.Column('ultimate_parent_unique_ide', sa.Text(), nullable=True),
sa.Column('ultimate_parent_legal_enti', sa.Text(), nullable=True),
sa.Column('high_comp_officer1_full_na', sa.Text(), nullable=True),
sa.Column('high_comp_officer1_amount', sa.Text(), nullable=True),
sa.Column('high_comp_officer2_full_na', sa.Text(), nullable=True),
sa.Column('high_comp_officer2_amount', sa.Text(), nullable=True),
sa.Column('high_comp_officer3_full_na', sa.Text(), nullable=True),
sa.Column('high_comp_officer3_amount', sa.Text(), nullable=True),
sa.Column('high_comp_officer4_full_na', sa.Text(), nullable=True),
sa.Column('high_comp_officer4_amount', sa.Text(), nullable=True),
sa.Column('high_comp_officer5_full_na', sa.Text(), nullable=True),
sa.Column('high_comp_officer5_amount', sa.Text(), nullable=True),
sa.PrimaryKeyConstraint('executive_compensation_id')
)
### end Alembic commands ###
def downgrade_data_broker():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('executive_compensation')
### end Alembic commands ###
| {
"repo_name": "fedspendingtransparency/data-act-broker-backend",
"path": "dataactcore/migrations/versions/41350e71ae8c_added_executive_compensation_table.py",
"copies": "1",
"size": "1987",
"license": "cc0-1.0",
"hash": -7759910261634433000,
"line_mean": 33.2586206897,
"line_max": 73,
"alpha_frac": 0.6899849019,
"autogenerated": false,
"ratio": 3.134069400630915,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9197925136048732,
"avg_score": 0.025225833296436396,
"num_lines": 58
} |
"""Added existing tables
Revision ID: f19fc04ba856
Revises:
Create Date: 2017-09-24 03:10:27.208231
"""
from alembic import op
import sqlalchemy as sa
import sys
from pathlib import Path
monocle_dir = str(Path(__file__).resolve().parents[2])
if monocle_dir not in sys.path:
sys.path.append(monocle_dir)
from monocle import db as db
# revision identifiers, used by Alembic.
revision = 'f19fc04ba856'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
#if db._engine.dialect.has_table(db._engine, 'sightings'):
# return
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('forts',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('external_id', sa.String(length=35), nullable=True),
sa.Column('lat', db.FLOAT_TYPE, nullable=True),
sa.Column('lon', db.FLOAT_TYPE, nullable=True),
sa.Column('name', sa.String(length=128), nullable=True),
sa.Column('url', sa.String(length=200), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('external_id')
)
op.create_index('ix_coords', 'forts', ['lat', 'lon'], unique=False)
op.create_table('mystery_sightings',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('pokemon_id', db.TINY_TYPE, nullable=True),
sa.Column('spawn_id', sa.BigInteger(), nullable=True),
sa.Column('encounter_id', db.UNSIGNED_HUGE_TYPE, nullable=True),
sa.Column('lat', db.FLOAT_TYPE, nullable=True),
sa.Column('lon', db.FLOAT_TYPE, nullable=True),
sa.Column('first_seen', sa.Integer(), nullable=True),
sa.Column('first_seconds', sa.SmallInteger(), nullable=True),
sa.Column('last_seconds', sa.SmallInteger(), nullable=True),
sa.Column('seen_range', sa.SmallInteger(), nullable=True),
sa.Column('atk_iv', db.TINY_TYPE, nullable=True),
sa.Column('def_iv', db.TINY_TYPE, nullable=True),
sa.Column('sta_iv', db.TINY_TYPE, nullable=True),
sa.Column('move_1', sa.SmallInteger(), nullable=True),
sa.Column('move_2', sa.SmallInteger(), nullable=True),
sa.Column('gender', sa.SmallInteger(), nullable=True),
sa.Column('form', sa.SmallInteger(), nullable=True),
sa.Column('cp', sa.SmallInteger(), nullable=True),
sa.Column('level', sa.SmallInteger(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('encounter_id', 'spawn_id', name='unique_encounter')
)
op.create_index(op.f('ix_mystery_sightings_encounter_id'), 'mystery_sightings', ['encounter_id'], unique=False)
op.create_index(op.f('ix_mystery_sightings_first_seen'), 'mystery_sightings', ['first_seen'], unique=False)
op.create_index(op.f('ix_mystery_sightings_spawn_id'), 'mystery_sightings', ['spawn_id'], unique=False)
op.create_table('pokestops',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('external_id', sa.String(length=35), nullable=True),
sa.Column('lat', db.FLOAT_TYPE, nullable=True),
sa.Column('lon', db.FLOAT_TYPE, nullable=True),
sa.Column('name', sa.String(length=128), nullable=True),
sa.Column('url', sa.String(length=200), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('external_id')
)
op.create_index(op.f('ix_pokestops_lat'), 'pokestops', ['lat'], unique=False)
op.create_index(op.f('ix_pokestops_lon'), 'pokestops', ['lon'], unique=False)
op.create_table('sightings',
sa.Column('id', db.PRIMARY_HUGE_TYPE, nullable=False),
sa.Column('pokemon_id', db.TINY_TYPE, nullable=True),
sa.Column('spawn_id', sa.BigInteger(), nullable=True),
sa.Column('expire_timestamp', sa.Integer(), nullable=True),
sa.Column('encounter_id', db.UNSIGNED_HUGE_TYPE, nullable=True),
sa.Column('lat', db.FLOAT_TYPE, nullable=True),
sa.Column('lon', db.FLOAT_TYPE, nullable=True),
sa.Column('atk_iv', db.TINY_TYPE, nullable=True),
sa.Column('def_iv', db.TINY_TYPE, nullable=True),
sa.Column('sta_iv', db.TINY_TYPE, nullable=True),
sa.Column('move_1', sa.SmallInteger(), nullable=True),
sa.Column('move_2', sa.SmallInteger(), nullable=True),
sa.Column('gender', sa.SmallInteger(), nullable=True),
sa.Column('form', sa.SmallInteger(), nullable=True),
sa.Column('cp', sa.SmallInteger(), nullable=True),
sa.Column('level', sa.SmallInteger(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('encounter_id', 'expire_timestamp', name='timestamp_encounter_id_unique')
)
op.create_index(op.f('ix_sightings_encounter_id'), 'sightings', ['encounter_id'], unique=False)
op.create_index(op.f('ix_sightings_expire_timestamp'), 'sightings', ['expire_timestamp'], unique=False)
op.create_table('spawnpoints',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('spawn_id', sa.BigInteger(), nullable=True),
sa.Column('despawn_time', sa.SmallInteger(), nullable=True),
sa.Column('lat', db.FLOAT_TYPE, nullable=True),
sa.Column('lon', db.FLOAT_TYPE, nullable=True),
sa.Column('updated', sa.Integer(), nullable=True),
sa.Column('duration', db.TINY_TYPE, nullable=True),
sa.Column('failures', db.TINY_TYPE, nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_spawnpoints_despawn_time'), 'spawnpoints', ['despawn_time'], unique=False)
op.create_index(op.f('ix_spawnpoints_spawn_id'), 'spawnpoints', ['spawn_id'], unique=True)
op.create_index(op.f('ix_spawnpoints_updated'), 'spawnpoints', ['updated'], unique=False)
op.create_table('fort_sightings',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('fort_id', sa.Integer(), nullable=True),
sa.Column('last_modified', sa.Integer(), nullable=True),
sa.Column('team', db.TINY_TYPE, nullable=True),
sa.Column('guard_pokemon_id', db.TINY_TYPE, nullable=True),
sa.Column('slots_available', sa.SmallInteger(), nullable=True),
sa.Column('is_in_battle', sa.Boolean(), nullable=True),
sa.ForeignKeyConstraint(['fort_id'], ['forts.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('fort_id', 'last_modified', name='fort_id_last_modified_unique')
)
op.create_index(op.f('ix_fort_sightings_last_modified'), 'fort_sightings', ['last_modified'], unique=False)
op.create_table('gym_defenders',
sa.Column('id', db.PRIMARY_HUGE_TYPE, nullable=False),
sa.Column('fort_id', sa.Integer(), nullable=False),
sa.Column('external_id', db.UNSIGNED_HUGE_TYPE, nullable=False),
sa.Column('pokemon_id', sa.Integer(), nullable=True),
sa.Column('owner_name', sa.String(length=128), nullable=True),
sa.Column('nickname', sa.String(length=128), nullable=True),
sa.Column('cp', sa.Integer(), nullable=True),
sa.Column('stamina', sa.Integer(), nullable=True),
sa.Column('stamina_max', sa.Integer(), nullable=True),
sa.Column('atk_iv', sa.SmallInteger(), nullable=True),
sa.Column('def_iv', sa.SmallInteger(), nullable=True),
sa.Column('sta_iv', sa.SmallInteger(), nullable=True),
sa.Column('move_1', sa.SmallInteger(), nullable=True),
sa.Column('move_2', sa.SmallInteger(), nullable=True),
sa.Column('battles_attacked', sa.Integer(), nullable=True),
sa.Column('battles_defended', sa.Integer(), nullable=True),
sa.Column('num_upgrades', sa.SmallInteger(), nullable=True),
sa.Column('created', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['fort_id'], ['forts.id'], onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_gym_defenders_created'), 'gym_defenders', ['created'], unique=False)
op.create_index(op.f('ix_gym_defenders_fort_id'), 'gym_defenders', ['fort_id'], unique=False)
op.create_table('raids',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('external_id', sa.BigInteger(), nullable=True),
sa.Column('fort_id', sa.Integer(), nullable=True),
sa.Column('level', db.TINY_TYPE, nullable=True),
sa.Column('pokemon_id', db.TINY_TYPE, nullable=True),
sa.Column('move_1', sa.SmallInteger(), nullable=True),
sa.Column('move_2', sa.SmallInteger(), nullable=True),
sa.Column('time_spawn', sa.Integer(), nullable=True),
sa.Column('time_battle', sa.Integer(), nullable=True),
sa.Column('time_end', sa.Integer(), nullable=True),
sa.Column('cp', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['fort_id'], ['forts.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('external_id')
)
op.create_index(op.f('ix_raids_time_spawn'), 'raids', ['time_spawn'], unique=False)
op.create_table('sighting_users',
sa.Column('id', db.PRIMARY_HUGE_TYPE, nullable=False),
sa.Column('username', sa.String(length=32), nullable=True),
sa.Column('sighting_id', db.HUGE_TYPE, nullable=False),
sa.ForeignKeyConstraint(['sighting_id'], ['sightings.id'], onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('username', 'sighting_id', name='ix_username_sighting_id')
)
op.create_index(op.f('ix_sighting_users_sighting_id'), 'sighting_users', ['sighting_id'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
#op.drop_index(op.f('ix_sighting_users_sighting_id'), table_name='sighting_users')
op.drop_table('sighting_users')
#op.drop_index(op.f('ix_raids_time_spawn'), table_name='raids')
op.drop_table('raids')
#op.drop_index(op.f('ix_gym_defenders_fort_id'), table_name='gym_defenders')
#op.drop_index(op.f('ix_gym_defenders_created'), table_name='gym_defenders')
op.drop_table('gym_defenders')
#op.drop_index(op.f('ix_fort_sightings_last_modified'), table_name='fort_sightings')
op.drop_table('fort_sightings')
#op.drop_index(op.f('ix_spawnpoints_updated'), table_name='spawnpoints')
#op.drop_index(op.f('ix_spawnpoints_spawn_id'), table_name='spawnpoints')
#op.drop_index(op.f('ix_spawnpoints_despawn_time'), table_name='spawnpoints')
op.drop_table('spawnpoints')
#op.drop_index(op.f('ix_sightings_expire_timestamp'), table_name='sightings')
#op.drop_index(op.f('ix_sightings_encounter_id'), table_name='sightings')
op.drop_table('sightings')
#op.drop_index(op.f('ix_pokestops_lon'), table_name='pokestops')
#op.drop_index(op.f('ix_pokestops_lat'), table_name='pokestops')
op.drop_table('pokestops')
#op.drop_index(op.f('ix_mystery_sightings_spawn_id'), table_name='mystery_sightings')
#op.drop_index(op.f('ix_mystery_sightings_first_seen'), table_name='mystery_sightings')
#op.drop_index(op.f('ix_mystery_sightings_encounter_id'), table_name='mystery_sightings')
op.drop_table('mystery_sightings')
#op.drop_index('ix_coords', table_name='forts')
op.drop_table('forts')
# ### end Alembic commands ###
| {
"repo_name": "DavisPoGo/Monocle",
"path": "migrations/versions/f19fc04ba856_added_existing_tables.py",
"copies": "1",
"size": "10836",
"license": "mit",
"hash": 3584454675175706600,
"line_mean": 51.6019417476,
"line_max": 115,
"alpha_frac": 0.6724806202,
"autogenerated": false,
"ratio": 3.11200459506031,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.921810989425429,
"avg_score": 0.013275064201203996,
"num_lines": 206
} |
"""added external data load date
Revision ID: 9199891101c5
Revises: 321af67fae11
Create Date: 2018-06-05 10:35:23.538191
"""
# revision identifiers, used by Alembic.
revision = '9199891101c5'
down_revision = '321af67fae11'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_data_broker():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('external_data_type',
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('external_data_type_id', sa.Integer(), nullable=False),
sa.Column('name', sa.Text(), nullable=True),
sa.Column('description', sa.Text(), nullable=True),
sa.PrimaryKeyConstraint('external_data_type_id')
)
op.create_table('external_data_load_date',
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('external_data_load_date_id', sa.Integer(), nullable=False),
sa.Column('last_load_date', sa.Date(), nullable=True),
sa.Column('external_data_type_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['external_data_type_id'], ['external_data_type.external_data_type_id'], name='fk_external_data_type_id'),
sa.PrimaryKeyConstraint('external_data_load_date_id'),
sa.UniqueConstraint('external_data_type_id')
)
# ### end Alembic commands ###
def downgrade_data_broker():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('external_data_load_date')
op.drop_table('external_data_type')
# ### end Alembic commands ###
| {
"repo_name": "fedspendingtransparency/data-act-broker-backend",
"path": "dataactcore/migrations/versions/9199891101c5_added_external_data_load_date.py",
"copies": "1",
"size": "1823",
"license": "cc0-1.0",
"hash": -5636486437184854000,
"line_mean": 30.4310344828,
"line_max": 134,
"alpha_frac": 0.6812945694,
"autogenerated": false,
"ratio": 3.3696857670979665,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9433881639101361,
"avg_score": 0.023419739479320932,
"num_lines": 58
} |
"""Added favorites column to notes
Revision ID: 10c695123c4d
Revises: 4c1fb76895a6
Create Date: 2015-12-09 04:40:23.995528
"""
# revision identifiers, used by Alembic.
revision = '10c695123c4d'
down_revision = '4c1fb76895a6'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
#op.drop_table('roles')
op.add_column('notes', sa.Column('is_favorite', sa.Boolean(), nullable=True))
op.create_index(op.f('ix_notes_created_date'), 'notes', ['created_date'], unique=False)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_notes_created_date'), table_name='notes')
op.drop_column('notes', 'is_favorite')
op.create_table('roles',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('name', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('id', name=u'roles_pkey'),
sa.UniqueConstraint('name', name=u'roles_name_key')
)
### end Alembic commands ###
| {
"repo_name": "iamgroot42/braindump",
"path": "migrations/versions/10c695123c4d_added_favorites_column_to_notes.py",
"copies": "1",
"size": "1099",
"license": "mit",
"hash": -7657587804047609000,
"line_mean": 30.4,
"line_max": 91,
"alpha_frac": 0.6787989081,
"autogenerated": false,
"ratio": 3.2134502923976607,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.43922492004976604,
"avg_score": null,
"num_lines": null
} |
"""Added fields for local & remote sockets to network table
Revision ID: 140c0dd3605
Revises: 20c114cc162
Create Date: 2015-11-29 14:40:04.862179
"""
# revision identifiers, used by Alembic.
revision = '140c0dd3605'
down_revision = '20c114cc162'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('networks', sa.Column('lhost', sa.String(length=100), nullable=True))
op.add_column('networks', sa.Column('lport', sa.Integer(), nullable=True))
op.add_column('networks', sa.Column('rhost', sa.String(length=100), nullable=True))
op.add_column('networks', sa.Column('rport', sa.Integer(), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('networks', 'rport')
op.drop_column('networks', 'rhost')
op.drop_column('networks', 'lport')
op.drop_column('networks', 'lhost')
### end Alembic commands ###
| {
"repo_name": "waartaa/ircb",
"path": "alembic/versions/140c0dd3605_added_fields_for_local_remote_sockets_.py",
"copies": "1",
"size": "1060",
"license": "mit",
"hash": 4105893711000645600,
"line_mean": 30.1764705882,
"line_max": 87,
"alpha_frac": 0.6858490566,
"autogenerated": false,
"ratio": 3.3544303797468356,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9510907633277782,
"avg_score": 0.005874360613810741,
"num_lines": 34
} |
"""Added Floating IP to Fixed IP mapping table
Revision ID: 33e9e23ba761
Revises: 356d6c0623c8
Create Date: 2015-05-11 14:14:23.619952
"""
# revision identifiers, used by Alembic.
revision = '33e9e23ba761'
down_revision = '356d6c0623c8'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table('quark_floating_to_fixed_ip_address_associations',
sa.Column('floating_ip_address_id', sa.String(length=36),
nullable=False),
sa.Column('fixed_ip_address_id', sa.String(length=36),
nullable=False),
sa.Column('enabled', sa.Boolean(), server_default='1',
nullable=False),
sa.ForeignKeyConstraint(['fixed_ip_address_id'],
['quark_ip_addresses.id'], ),
sa.ForeignKeyConstraint(['floating_ip_address_id'],
['quark_ip_addresses.id'], ),
sa.PrimaryKeyConstraint('floating_ip_address_id',
'fixed_ip_address_id'),
mysql_engine='InnoDB')
def downgrade():
op.drop_table('quark_floating_to_fixed_ip_address_associations')
| {
"repo_name": "rackerlabs/quark",
"path": "quark/db/migration/alembic/versions/33e9e23ba761_add_floating_to_fixed_ip_mapping_table.py",
"copies": "7",
"size": "1303",
"license": "apache-2.0",
"hash": -2930885444913436700,
"line_mean": 36.2285714286,
"line_max": 77,
"alpha_frac": 0.5364543361,
"autogenerated": false,
"ratio": 4.071875,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.81083293361,
"avg_score": null,
"num_lines": null
} |
# Added Fortran compiler support to config. Currently useful only for
# try_compile call. try_run works but is untested for most of Fortran
# compilers (they must define linker_exe first).
# Pearu Peterson
from __future__ import division, absolute_import, print_function
import os, signal
import warnings
import sys
import subprocess
import textwrap
from distutils.command.config import config as old_config
from distutils.command.config import LANG_EXT
from distutils import log
from distutils.file_util import copy_file
from distutils.ccompiler import CompileError, LinkError
import distutils
from numpy.distutils.exec_command import filepath_from_subprocess_output
from numpy.distutils.mingw32ccompiler import generate_manifest
from numpy.distutils.command.autodist import (check_gcc_function_attribute,
check_gcc_function_attribute_with_intrinsics,
check_gcc_variable_attribute,
check_inline,
check_restrict,
check_compiler_gcc4)
from numpy.distutils.compat import get_exception
LANG_EXT['f77'] = '.f'
LANG_EXT['f90'] = '.f90'
class config(old_config):
old_config.user_options += [
('fcompiler=', None, "specify the Fortran compiler type"),
]
def initialize_options(self):
self.fcompiler = None
old_config.initialize_options(self)
def _check_compiler (self):
old_config._check_compiler(self)
from numpy.distutils.fcompiler import FCompiler, new_fcompiler
if sys.platform == 'win32' and (self.compiler.compiler_type in
('msvc', 'intelw', 'intelemw')):
# XXX: hack to circumvent a python 2.6 bug with msvc9compiler:
# initialize call query_vcvarsall, which throws an IOError, and
# causes an error along the way without much information. We try to
# catch it here, hoping it is early enough, and print an helpful
# message instead of Error: None.
if not self.compiler.initialized:
try:
self.compiler.initialize()
except IOError:
e = get_exception()
msg = textwrap.dedent("""\
Could not initialize compiler instance: do you have Visual Studio
installed? If you are trying to build with MinGW, please use "python setup.py
build -c mingw32" instead. If you have Visual Studio installed, check it is
correctly installed, and the right version (VS 2008 for python 2.6, 2.7 and 3.2,
VS 2010 for >= 3.3).
Original exception was: %s, and the Compiler class was %s
============================================================================""") \
% (e, self.compiler.__class__.__name__)
print(textwrap.dedent("""\
============================================================================"""))
raise distutils.errors.DistutilsPlatformError(msg)
# After MSVC is initialized, add an explicit /MANIFEST to linker
# flags. See issues gh-4245 and gh-4101 for details. Also
# relevant are issues 4431 and 16296 on the Python bug tracker.
from distutils import msvc9compiler
if msvc9compiler.get_build_version() >= 10:
for ldflags in [self.compiler.ldflags_shared,
self.compiler.ldflags_shared_debug]:
if '/MANIFEST' not in ldflags:
ldflags.append('/MANIFEST')
if not isinstance(self.fcompiler, FCompiler):
self.fcompiler = new_fcompiler(compiler=self.fcompiler,
dry_run=self.dry_run, force=1,
c_compiler=self.compiler)
if self.fcompiler is not None:
self.fcompiler.customize(self.distribution)
if self.fcompiler.get_version():
self.fcompiler.customize_cmd(self)
self.fcompiler.show_customization()
def _wrap_method(self, mth, lang, args):
from distutils.ccompiler import CompileError
from distutils.errors import DistutilsExecError
save_compiler = self.compiler
if lang in ['f77', 'f90']:
self.compiler = self.fcompiler
try:
ret = mth(*((self,)+args))
except (DistutilsExecError, CompileError):
str(get_exception())
self.compiler = save_compiler
raise CompileError
self.compiler = save_compiler
return ret
def _compile (self, body, headers, include_dirs, lang):
src, obj = self._wrap_method(old_config._compile, lang,
(body, headers, include_dirs, lang))
# _compile in unixcompiler.py sometimes creates .d dependency files.
# Clean them up.
self.temp_files.append(obj + '.d')
return src, obj
def _link (self, body,
headers, include_dirs,
libraries, library_dirs, lang):
if self.compiler.compiler_type=='msvc':
libraries = (libraries or [])[:]
library_dirs = (library_dirs or [])[:]
if lang in ['f77', 'f90']:
lang = 'c' # always use system linker when using MSVC compiler
if self.fcompiler:
for d in self.fcompiler.library_dirs or []:
# correct path when compiling in Cygwin but with
# normal Win Python
if d.startswith('/usr/lib'):
try:
d = subprocess.check_output(['cygpath',
'-w', d])
except (OSError, subprocess.CalledProcessError):
pass
else:
d = filepath_from_subprocess_output(d)
library_dirs.append(d)
for libname in self.fcompiler.libraries or []:
if libname not in libraries:
libraries.append(libname)
for libname in libraries:
if libname.startswith('msvc'): continue
fileexists = False
for libdir in library_dirs or []:
libfile = os.path.join(libdir, '%s.lib' % (libname))
if os.path.isfile(libfile):
fileexists = True
break
if fileexists: continue
# make g77-compiled static libs available to MSVC
fileexists = False
for libdir in library_dirs:
libfile = os.path.join(libdir, 'lib%s.a' % (libname))
if os.path.isfile(libfile):
# copy libname.a file to name.lib so that MSVC linker
# can find it
libfile2 = os.path.join(libdir, '%s.lib' % (libname))
copy_file(libfile, libfile2)
self.temp_files.append(libfile2)
fileexists = True
break
if fileexists: continue
log.warn('could not find library %r in directories %s' \
% (libname, library_dirs))
elif self.compiler.compiler_type == 'mingw32':
generate_manifest(self)
return self._wrap_method(old_config._link, lang,
(body, headers, include_dirs,
libraries, library_dirs, lang))
def check_header(self, header, include_dirs=None, library_dirs=None, lang='c'):
self._check_compiler()
return self.try_compile(
"/* we need a dummy line to make distutils happy */",
[header], include_dirs)
def check_decl(self, symbol,
headers=None, include_dirs=None):
self._check_compiler()
body = textwrap.dedent("""
int main(void)
{
#ifndef %s
(void) %s;
#endif
;
return 0;
}""") % (symbol, symbol)
return self.try_compile(body, headers, include_dirs)
def check_macro_true(self, symbol,
headers=None, include_dirs=None):
self._check_compiler()
body = textwrap.dedent("""
int main(void)
{
#if %s
#else
#error false or undefined macro
#endif
;
return 0;
}""") % (symbol,)
return self.try_compile(body, headers, include_dirs)
def check_type(self, type_name, headers=None, include_dirs=None,
library_dirs=None):
"""Check type availability. Return True if the type can be compiled,
False otherwise"""
self._check_compiler()
# First check the type can be compiled
body = textwrap.dedent(r"""
int main(void) {
if ((%(name)s *) 0)
return 0;
if (sizeof (%(name)s))
return 0;
}
""") % {'name': type_name}
st = False
try:
try:
self._compile(body % {'type': type_name},
headers, include_dirs, 'c')
st = True
except distutils.errors.CompileError:
st = False
finally:
self._clean()
return st
def check_type_size(self, type_name, headers=None, include_dirs=None, library_dirs=None, expected=None):
"""Check size of a given type."""
self._check_compiler()
# First check the type can be compiled
body = textwrap.dedent(r"""
typedef %(type)s npy_check_sizeof_type;
int main (void)
{
static int test_array [1 - 2 * !(((long) (sizeof (npy_check_sizeof_type))) >= 0)];
test_array [0] = 0
;
return 0;
}
""")
self._compile(body % {'type': type_name},
headers, include_dirs, 'c')
self._clean()
if expected:
body = textwrap.dedent(r"""
typedef %(type)s npy_check_sizeof_type;
int main (void)
{
static int test_array [1 - 2 * !(((long) (sizeof (npy_check_sizeof_type))) == %(size)s)];
test_array [0] = 0
;
return 0;
}
""")
for size in expected:
try:
self._compile(body % {'type': type_name, 'size': size},
headers, include_dirs, 'c')
self._clean()
return size
except CompileError:
pass
# this fails to *compile* if size > sizeof(type)
body = textwrap.dedent(r"""
typedef %(type)s npy_check_sizeof_type;
int main (void)
{
static int test_array [1 - 2 * !(((long) (sizeof (npy_check_sizeof_type))) <= %(size)s)];
test_array [0] = 0
;
return 0;
}
""")
# The principle is simple: we first find low and high bounds of size
# for the type, where low/high are looked up on a log scale. Then, we
# do a binary search to find the exact size between low and high
low = 0
mid = 0
while True:
try:
self._compile(body % {'type': type_name, 'size': mid},
headers, include_dirs, 'c')
self._clean()
break
except CompileError:
#log.info("failure to test for bound %d" % mid)
low = mid + 1
mid = 2 * mid + 1
high = mid
# Binary search:
while low != high:
mid = (high - low) // 2 + low
try:
self._compile(body % {'type': type_name, 'size': mid},
headers, include_dirs, 'c')
self._clean()
high = mid
except CompileError:
low = mid + 1
return low
def check_func(self, func,
headers=None, include_dirs=None,
libraries=None, library_dirs=None,
decl=False, call=False, call_args=None):
# clean up distutils's config a bit: add void to main(), and
# return a value.
self._check_compiler()
body = []
if decl:
if type(decl) == str:
body.append(decl)
else:
body.append("int %s (void);" % func)
# Handle MSVC intrinsics: force MS compiler to make a function call.
# Useful to test for some functions when built with optimization on, to
# avoid build error because the intrinsic and our 'fake' test
# declaration do not match.
body.append("#ifdef _MSC_VER")
body.append("#pragma function(%s)" % func)
body.append("#endif")
body.append("int main (void) {")
if call:
if call_args is None:
call_args = ''
body.append(" %s(%s);" % (func, call_args))
else:
body.append(" %s;" % func)
body.append(" return 0;")
body.append("}")
body = '\n'.join(body) + "\n"
return self.try_link(body, headers, include_dirs,
libraries, library_dirs)
def check_funcs_once(self, funcs,
headers=None, include_dirs=None,
libraries=None, library_dirs=None,
decl=False, call=False, call_args=None):
"""Check a list of functions at once.
This is useful to speed up things, since all the functions in the funcs
list will be put in one compilation unit.
Arguments
---------
funcs : seq
list of functions to test
include_dirs : seq
list of header paths
libraries : seq
list of libraries to link the code snippet to
library_dirs : seq
list of library paths
decl : dict
for every (key, value), the declaration in the value will be
used for function in key. If a function is not in the
dictionary, no declaration will be used.
call : dict
for every item (f, value), if the value is True, a call will be
done to the function f.
"""
self._check_compiler()
body = []
if decl:
for f, v in decl.items():
if v:
body.append("int %s (void);" % f)
# Handle MS intrinsics. See check_func for more info.
body.append("#ifdef _MSC_VER")
for func in funcs:
body.append("#pragma function(%s)" % func)
body.append("#endif")
body.append("int main (void) {")
if call:
for f in funcs:
if f in call and call[f]:
if not (call_args and f in call_args and call_args[f]):
args = ''
else:
args = call_args[f]
body.append(" %s(%s);" % (f, args))
else:
body.append(" %s;" % f)
else:
for f in funcs:
body.append(" %s;" % f)
body.append(" return 0;")
body.append("}")
body = '\n'.join(body) + "\n"
return self.try_link(body, headers, include_dirs,
libraries, library_dirs)
def check_inline(self):
"""Return the inline keyword recognized by the compiler, empty string
otherwise."""
return check_inline(self)
def check_restrict(self):
"""Return the restrict keyword recognized by the compiler, empty string
otherwise."""
return check_restrict(self)
def check_compiler_gcc4(self):
"""Return True if the C compiler is gcc >= 4."""
return check_compiler_gcc4(self)
def check_gcc_function_attribute(self, attribute, name):
return check_gcc_function_attribute(self, attribute, name)
def check_gcc_function_attribute_with_intrinsics(self, attribute, name,
code, include):
return check_gcc_function_attribute_with_intrinsics(self, attribute,
name, code, include)
def check_gcc_variable_attribute(self, attribute):
return check_gcc_variable_attribute(self, attribute)
def get_output(self, body, headers=None, include_dirs=None,
libraries=None, library_dirs=None,
lang="c", use_tee=None):
"""Try to compile, link to an executable, and run a program
built from 'body' and 'headers'. Returns the exit status code
of the program and its output.
"""
# 2008-11-16, RemoveMe
warnings.warn("\n+++++++++++++++++++++++++++++++++++++++++++++++++\n"
"Usage of get_output is deprecated: please do not \n"
"use it anymore, and avoid configuration checks \n"
"involving running executable on the target machine.\n"
"+++++++++++++++++++++++++++++++++++++++++++++++++\n",
DeprecationWarning, stacklevel=2)
self._check_compiler()
exitcode, output = 255, ''
try:
grabber = GrabStdout()
try:
src, obj, exe = self._link(body, headers, include_dirs,
libraries, library_dirs, lang)
grabber.restore()
except Exception:
output = grabber.data
grabber.restore()
raise
exe = os.path.join('.', exe)
try:
# specify cwd arg for consistency with
# historic usage pattern of exec_command()
# also, note that exe appears to be a string,
# which exec_command() handled, but we now
# use a list for check_output() -- this assumes
# that exe is always a single command
output = subprocess.check_output([exe], cwd='.')
except subprocess.CalledProcessError as exc:
exitstatus = exc.returncode
output = ''
except OSError:
# preserve the EnvironmentError exit status
# used historically in exec_command()
exitstatus = 127
output = ''
else:
output = filepath_from_subprocess_output(output)
if hasattr(os, 'WEXITSTATUS'):
exitcode = os.WEXITSTATUS(exitstatus)
if os.WIFSIGNALED(exitstatus):
sig = os.WTERMSIG(exitstatus)
log.error('subprocess exited with signal %d' % (sig,))
if sig == signal.SIGINT:
# control-C
raise KeyboardInterrupt
else:
exitcode = exitstatus
log.info("success!")
except (CompileError, LinkError):
log.info("failure.")
self._clean()
return exitcode, output
class GrabStdout(object):
def __init__(self):
self.sys_stdout = sys.stdout
self.data = ''
sys.stdout = self
def write (self, data):
self.sys_stdout.write(data)
self.data += data
def flush (self):
self.sys_stdout.flush()
def restore(self):
sys.stdout = self.sys_stdout
| {
"repo_name": "pizzathief/numpy",
"path": "numpy/distutils/command/config.py",
"copies": "4",
"size": "20473",
"license": "bsd-3-clause",
"hash": -251013584580283100,
"line_mean": 38.9083820663,
"line_max": 109,
"alpha_frac": 0.4947491818,
"autogenerated": false,
"ratio": 4.7534246575342465,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0014763273020321351,
"num_lines": 513
} |
"""added geoalchemy point to search items nearby
Revision ID: 49b6d8f6d4c8
Revises: e0673de42c3
Create Date: 2015-04-17 21:19:36.464638
"""
# revision identifiers, used by Alembic.
revision = '49b6d8f6d4c8'
down_revision = 'e0673de42c3'
from alembic import op
import sqlalchemy as sa
import geoalchemy2
def upgrade():
### commands auto generated by Alembic - please adjust! ###
# op.drop_table('spatial_ref_sys')
op.add_column('items', sa.Column('location', geoalchemy2.types.Geometry(geometry_type='POINT', srid=4326), nullable=True))
op.drop_column('items', 'latitude')
op.drop_column('items', 'longitude')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('items', sa.Column('longitude', sa.NUMERIC(precision=10, scale=6), autoincrement=False, nullable=True))
op.add_column('items', sa.Column('latitude', sa.NUMERIC(precision=10, scale=6), autoincrement=False, nullable=True))
op.drop_column('items', 'location')
op.create_table('spatial_ref_sys',
sa.Column('srid', sa.INTEGER(), autoincrement=False, nullable=False),
sa.Column('auth_name', sa.VARCHAR(length=256), autoincrement=False, nullable=True),
sa.Column('auth_srid', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('srtext', sa.VARCHAR(length=2048), autoincrement=False, nullable=True),
sa.Column('proj4text', sa.VARCHAR(length=2048), autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('srid', name=u'spatial_ref_sys_pkey')
)
### end Alembic commands ###
| {
"repo_name": "rosariomgomez/tradyfit",
"path": "vagrant/tradyfit/migrations/versions/49b6d8f6d4c8_added_geoalchemy_point_to_search_items_.py",
"copies": "1",
"size": "1593",
"license": "mit",
"hash": 5989775841331796000,
"line_mean": 38.825,
"line_max": 126,
"alpha_frac": 0.7068424357,
"autogenerated": false,
"ratio": 3.3607594936708862,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9522113391216168,
"avg_score": 0.009097707630943683,
"num_lines": 40
} |
"""Added groups
Revision ID: 3fd502c152c9
Revises: 42ab7edc19e2
Create Date: 2015-03-31 14:48:03.675985
"""
# revision identifiers, used by Alembic.
revision = '3fd502c152c9'
down_revision = '42ab7edc19e2'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('groups',
sa.Column('id', sa.BigInteger(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_table('users_groups',
sa.Column('user_id', sa.BigInteger(), nullable=False),
sa.Column('group_id', sa.BigInteger(), nullable=False),
sa.ForeignKeyConstraint(['group_id'], ['groups.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('user_id', 'group_id')
)
op.create_index(op.f('ix_achievements_achievementcategory_id'), 'achievements', ['achievementcategory_id'], unique=False)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_achievements_achievementcategory_id'), table_name='achievements')
op.drop_table('users_groups')
op.drop_table('groups')
### end Alembic commands ###
| {
"repo_name": "ArneBab/gamification-engine",
"path": "gengine/alembic/versions/3fd502c152c9_added_groups.py",
"copies": "1",
"size": "1261",
"license": "mit",
"hash": 7322183246404327000,
"line_mean": 29.756097561,
"line_max": 125,
"alpha_frac": 0.681998414,
"autogenerated": false,
"ratio": 3.208651399491094,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.43906498134910943,
"avg_score": null,
"num_lines": null
} |
"""Added group to custom form.
Revision ID: c0687b086ee3
Revises: b364a538f0db
Create Date: 2018-07-09 20:09:05.368186
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, relationship
# revision identifiers, used by Alembic.
revision = 'c0687b086ee3'
down_revision = 'b364a538f0db'
Base = declarative_base()
db = sa
db.Model = Base
db.relationship = relationship
def create_session():
connection = op.get_bind()
session_maker = sa.orm.sessionmaker()
session = session_maker(bind=connection)
db.session = session
def upgrade():
create_session()
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('custom_form', sa.Column('group_id', sa.Integer(), nullable=True))
op.drop_index('idx_36097_owner_id', table_name='custom_form')
op.drop_constraint('custom_form_ibfk_1', 'custom_form', type_='foreignkey')
op.create_foreign_key(op.f('fk_custom_form_group_id_group'), 'custom_form', 'group', ['group_id'], ['id'])
op.drop_column('custom_form', 'owner_id')
# ### end Alembic commands ###
def downgrade():
create_session()
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('custom_form', sa.Column('owner_id', sa.BIGINT(), autoincrement=False, nullable=True))
op.drop_constraint(op.f('fk_custom_form_group_id_group'), 'custom_form', type_='foreignkey')
op.create_foreign_key('custom_form_ibfk_1', 'custom_form', 'user', ['owner_id'], ['id'], onupdate='RESTRICT', ondelete='RESTRICT')
op.create_index('idx_36097_owner_id', 'custom_form', ['owner_id'], unique=False)
op.drop_column('custom_form', 'group_id')
# ### end Alembic commands ###
# vim: ft=python
| {
"repo_name": "viaict/viaduct",
"path": "migrations/versions/2018_07_09_c0687b086ee3_added_group_to_custom_form.py",
"copies": "1",
"size": "1789",
"license": "mit",
"hash": 984853868239011700,
"line_mean": 30.9464285714,
"line_max": 134,
"alpha_frac": 0.6847400783,
"autogenerated": false,
"ratio": 3.229241877256318,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4413981955556318,
"avg_score": null,
"num_lines": null
} |
"""Added image order field
Revision ID: 52b34ac18e7
Revises: 4f3f55688877
Create Date: 2015-07-19 21:14:27.551788
"""
# revision identifiers, used by Alembic.
revision = '52b34ac18e7'
down_revision = '4f3f55688877'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
from sqlalchemy.orm import sessionmaker, Session as BaseSession, relationship
# Import the model
import os
import sys
sys.path.insert(1, os.path.join(sys.path[0], '..', '..'))
from app import models
Session = sessionmaker()
def upgrade():
bind = op.get_bind()
session = Session(bind=bind)
op.add_column('image', sa.Column('imageOrder', sa.Integer(), nullable=False, server_default="0"))
# Ensure the image order new image order column is initialized for all images
image_order_map = {}
for album in session.query(models.Album):
image_order_map[album.id] = 1
for image in session.query(models.Image):
image.imageOrder = image_order_map[image.albumId]
image_order_map[image.albumId] += 1
session.commit()
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('image', 'imageOrder')
### end Alembic commands ###
| {
"repo_name": "jsaxton/riGallery",
"path": "alembic/versions/52b34ac18e7_added_image_order_field.py",
"copies": "1",
"size": "1227",
"license": "mit",
"hash": 5514644814475565000,
"line_mean": 25.6739130435,
"line_max": 101,
"alpha_frac": 0.6984515077,
"autogenerated": false,
"ratio": 3.389502762430939,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.45879542701309395,
"avg_score": null,
"num_lines": null
} |
"""added images table
Revision ID: 3511e3ab31d9
Revises: 499185bcd720
Create Date: 2014-09-11 09:01:44.696226
"""
# revision identifiers, used by Alembic.
revision = '3511e3ab31d9'
down_revision = '499185bcd720'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('images',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('pos_product_id', sa.Integer(), nullable=True),
sa.Column('position', sa.Integer(), nullable=True),
sa.Column('product_id', sa.Integer(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('src', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['product_id'], ['products.id'], ),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('images')
### end Alembic commands ###
| {
"repo_name": "nkenealy/shopify-product-load",
"path": "migrations/versions/3511e3ab31d9_added_images_table.py",
"copies": "1",
"size": "1048",
"license": "mit",
"hash": 2610605839356227600,
"line_mean": 28.1111111111,
"line_max": 63,
"alpha_frac": 0.6679389313,
"autogenerated": false,
"ratio": 3.4473684210526314,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4615307352352631,
"avg_score": null,
"num_lines": null
} |
"""added indexes
Revision ID: 4adfaec7c3ed
Revises: 9444e4934401
Create Date: 2017-06-04 17:07:10.880138
"""
# revision identifiers, used by Alembic.
revision = '4adfaec7c3ed'
down_revision = '9444e4934401'
branch_labels = None
depends_on = None
import datetime
import websauna.system.model.columns
from sqlalchemy.types import Text # Needed from proper creation of JSON fields as Alembic inserts astext_type=Text() row
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column('group', 'created_at',
existing_type=postgresql.TIMESTAMP(timezone=True),
type_=websauna.system.model.columns.UTCDateTime(),
existing_nullable=True)
op.alter_column('group', 'updated_at',
existing_type=postgresql.TIMESTAMP(timezone=True),
type_=websauna.system.model.columns.UTCDateTime(),
existing_nullable=True)
op.alter_column('media', 'created_at',
existing_type=postgresql.TIMESTAMP(timezone=True),
type_=websauna.system.model.columns.UTCDateTime(),
existing_nullable=False)
op.alter_column('media', 'published_at',
existing_type=postgresql.TIMESTAMP(timezone=True),
type_=websauna.system.model.columns.UTCDateTime(),
existing_nullable=True)
op.alter_column('media', 'updated_at',
existing_type=postgresql.TIMESTAMP(timezone=True),
type_=websauna.system.model.columns.UTCDateTime(),
existing_nullable=True)
op.alter_column('posts', 'created_at',
existing_type=postgresql.TIMESTAMP(timezone=True),
type_=websauna.system.model.columns.UTCDateTime(),
existing_nullable=False)
op.alter_column('posts', 'published_at',
existing_type=postgresql.TIMESTAMP(timezone=True),
type_=websauna.system.model.columns.UTCDateTime(),
existing_nullable=True)
op.alter_column('posts', 'updated_at',
existing_type=postgresql.TIMESTAMP(timezone=True),
type_=websauna.system.model.columns.UTCDateTime(),
existing_nullable=True)
op.create_index(op.f('ix_posts_published_at'), 'posts', ['published_at'], unique=False)
op.create_index(op.f('ix_posts_state'), 'posts', ['state'], unique=False)
op.alter_column('user_activation', 'created_at',
existing_type=postgresql.TIMESTAMP(timezone=True),
type_=websauna.system.model.columns.UTCDateTime(),
existing_nullable=True)
op.alter_column('user_activation', 'expires_at',
existing_type=postgresql.TIMESTAMP(timezone=True),
type_=websauna.system.model.columns.UTCDateTime(),
existing_nullable=False)
op.alter_column('user_activation', 'updated_at',
existing_type=postgresql.TIMESTAMP(timezone=True),
type_=websauna.system.model.columns.UTCDateTime(),
existing_nullable=True)
op.alter_column('users', 'activated_at',
existing_type=postgresql.TIMESTAMP(timezone=True),
type_=websauna.system.model.columns.UTCDateTime(),
existing_nullable=True)
op.alter_column('users', 'created_at',
existing_type=postgresql.TIMESTAMP(timezone=True),
type_=websauna.system.model.columns.UTCDateTime(),
existing_nullable=True)
op.alter_column('users', 'last_auth_sensitive_operation_at',
existing_type=postgresql.TIMESTAMP(timezone=True),
type_=websauna.system.model.columns.UTCDateTime(),
existing_nullable=True)
op.alter_column('users', 'last_login_at',
existing_type=postgresql.TIMESTAMP(timezone=True),
type_=websauna.system.model.columns.UTCDateTime(),
existing_nullable=True)
op.alter_column('users', 'updated_at',
existing_type=postgresql.TIMESTAMP(timezone=True),
type_=websauna.system.model.columns.UTCDateTime(),
existing_nullable=True)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column('users', 'updated_at',
existing_type=websauna.system.model.columns.UTCDateTime(),
type_=postgresql.TIMESTAMP(timezone=True),
existing_nullable=True)
op.alter_column('users', 'last_login_at',
existing_type=websauna.system.model.columns.UTCDateTime(),
type_=postgresql.TIMESTAMP(timezone=True),
existing_nullable=True)
op.alter_column('users', 'last_auth_sensitive_operation_at',
existing_type=websauna.system.model.columns.UTCDateTime(),
type_=postgresql.TIMESTAMP(timezone=True),
existing_nullable=True)
op.alter_column('users', 'created_at',
existing_type=websauna.system.model.columns.UTCDateTime(),
type_=postgresql.TIMESTAMP(timezone=True),
existing_nullable=True)
op.alter_column('users', 'activated_at',
existing_type=websauna.system.model.columns.UTCDateTime(),
type_=postgresql.TIMESTAMP(timezone=True),
existing_nullable=True)
op.alter_column('user_activation', 'updated_at',
existing_type=websauna.system.model.columns.UTCDateTime(),
type_=postgresql.TIMESTAMP(timezone=True),
existing_nullable=True)
op.alter_column('user_activation', 'expires_at',
existing_type=websauna.system.model.columns.UTCDateTime(),
type_=postgresql.TIMESTAMP(timezone=True),
existing_nullable=False)
op.alter_column('user_activation', 'created_at',
existing_type=websauna.system.model.columns.UTCDateTime(),
type_=postgresql.TIMESTAMP(timezone=True),
existing_nullable=True)
op.drop_index(op.f('ix_posts_state'), table_name='posts')
op.drop_index(op.f('ix_posts_published_at'), table_name='posts')
op.alter_column('posts', 'updated_at',
existing_type=websauna.system.model.columns.UTCDateTime(),
type_=postgresql.TIMESTAMP(timezone=True),
existing_nullable=True)
op.alter_column('posts', 'published_at',
existing_type=websauna.system.model.columns.UTCDateTime(),
type_=postgresql.TIMESTAMP(timezone=True),
existing_nullable=True)
op.alter_column('posts', 'created_at',
existing_type=websauna.system.model.columns.UTCDateTime(),
type_=postgresql.TIMESTAMP(timezone=True),
existing_nullable=False)
op.alter_column('media', 'updated_at',
existing_type=websauna.system.model.columns.UTCDateTime(),
type_=postgresql.TIMESTAMP(timezone=True),
existing_nullable=True)
op.alter_column('media', 'published_at',
existing_type=websauna.system.model.columns.UTCDateTime(),
type_=postgresql.TIMESTAMP(timezone=True),
existing_nullable=True)
op.alter_column('media', 'created_at',
existing_type=websauna.system.model.columns.UTCDateTime(),
type_=postgresql.TIMESTAMP(timezone=True),
existing_nullable=False)
op.alter_column('group', 'updated_at',
existing_type=websauna.system.model.columns.UTCDateTime(),
type_=postgresql.TIMESTAMP(timezone=True),
existing_nullable=True)
op.alter_column('group', 'created_at',
existing_type=websauna.system.model.columns.UTCDateTime(),
type_=postgresql.TIMESTAMP(timezone=True),
existing_nullable=True)
# ### end Alembic commands ###
| {
"repo_name": "enkidulan/enkiblog",
"path": "src/enkiblog/alembic/versions/4adfaec7c3ed_added_indexes.py",
"copies": "1",
"size": "8030",
"license": "apache-2.0",
"hash": 7852104816715408000,
"line_mean": 48.5679012346,
"line_max": 121,
"alpha_frac": 0.6292652553,
"autogenerated": false,
"ratio": 4.05146316851665,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.518072842381665,
"avg_score": null,
"num_lines": null
} |
"""Added indices on created_at
Revision ID: 05a831a5db7b
Revises: a571e57d884a
Create Date: 2017-07-24 23:44:23.301874
"""
# revision identifiers, used by Alembic.
revision = '05a831a5db7b'
down_revision = 'a571e57d884a'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_development():
### commands auto generated by Alembic - please adjust! ###
op.create_index(op.f('ix_comments_created_at'), 'comments', ['created_at'], unique=False)
op.create_index(op.f('ix_event_hooks_created_at'), 'event_hooks', ['created_at'], unique=False)
op.create_index(op.f('ix_experiment_actions_created_at'), 'experiment_actions', ['created_at'], unique=False)
op.create_index(op.f('ix_experiment_thing_snapshots_created_at'), 'experiment_thing_snapshots', ['created_at'], unique=False)
op.create_index(op.f('ix_experiment_things_created_at'), 'experiment_things', ['created_at'], unique=False)
op.create_index(op.f('ix_experiments_created_at'), 'experiments', ['created_at'], unique=False)
op.create_index(op.f('ix_front_pages_created_at'), 'front_pages', ['created_at'], unique=False)
op.create_index(op.f('ix_mod_actions_created_at'), 'mod_actions', ['created_at'], unique=False)
op.create_index(op.f('ix_posts_created_at'), 'posts', ['created_at'], unique=False)
op.create_index(op.f('ix_praw_keys_created_at'), 'praw_keys', ['created_at'], unique=False)
op.create_index(op.f('ix_subreddit_pages_created_at'), 'subreddit_pages', ['created_at'], unique=False)
op.create_index(op.f('ix_subreddits_created_at'), 'subreddits', ['created_at'], unique=False)
### end Alembic commands ###
def downgrade_development():
### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_subreddits_created_at'), table_name='subreddits')
op.drop_index(op.f('ix_subreddit_pages_created_at'), table_name='subreddit_pages')
op.drop_index(op.f('ix_praw_keys_created_at'), table_name='praw_keys')
op.drop_index(op.f('ix_posts_created_at'), table_name='posts')
op.drop_index(op.f('ix_mod_actions_created_at'), table_name='mod_actions')
op.drop_index(op.f('ix_front_pages_created_at'), table_name='front_pages')
op.drop_index(op.f('ix_experiments_created_at'), table_name='experiments')
op.drop_index(op.f('ix_experiment_things_created_at'), table_name='experiment_things')
op.drop_index(op.f('ix_experiment_thing_snapshots_created_at'), table_name='experiment_thing_snapshots')
op.drop_index(op.f('ix_experiment_actions_created_at'), table_name='experiment_actions')
op.drop_index(op.f('ix_event_hooks_created_at'), table_name='event_hooks')
op.drop_index(op.f('ix_comments_created_at'), table_name='comments')
### end Alembic commands ###
def upgrade_test():
### commands auto generated by Alembic - please adjust! ###
op.create_index(op.f('ix_comments_created_at'), 'comments', ['created_at'], unique=False)
op.create_index(op.f('ix_event_hooks_created_at'), 'event_hooks', ['created_at'], unique=False)
op.create_index(op.f('ix_experiment_actions_created_at'), 'experiment_actions', ['created_at'], unique=False)
op.create_index(op.f('ix_experiment_thing_snapshots_created_at'), 'experiment_thing_snapshots', ['created_at'], unique=False)
op.create_index(op.f('ix_experiment_things_created_at'), 'experiment_things', ['created_at'], unique=False)
op.create_index(op.f('ix_experiments_created_at'), 'experiments', ['created_at'], unique=False)
op.create_index(op.f('ix_front_pages_created_at'), 'front_pages', ['created_at'], unique=False)
op.create_index(op.f('ix_mod_actions_created_at'), 'mod_actions', ['created_at'], unique=False)
op.create_index(op.f('ix_posts_created_at'), 'posts', ['created_at'], unique=False)
op.create_index(op.f('ix_praw_keys_created_at'), 'praw_keys', ['created_at'], unique=False)
op.create_index(op.f('ix_subreddit_pages_created_at'), 'subreddit_pages', ['created_at'], unique=False)
op.create_index(op.f('ix_subreddits_created_at'), 'subreddits', ['created_at'], unique=False)
### end Alembic commands ###
def downgrade_test():
### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_subreddits_created_at'), table_name='subreddits')
op.drop_index(op.f('ix_subreddit_pages_created_at'), table_name='subreddit_pages')
op.drop_index(op.f('ix_praw_keys_created_at'), table_name='praw_keys')
op.drop_index(op.f('ix_posts_created_at'), table_name='posts')
op.drop_index(op.f('ix_mod_actions_created_at'), table_name='mod_actions')
op.drop_index(op.f('ix_front_pages_created_at'), table_name='front_pages')
op.drop_index(op.f('ix_experiments_created_at'), table_name='experiments')
op.drop_index(op.f('ix_experiment_things_created_at'), table_name='experiment_things')
op.drop_index(op.f('ix_experiment_thing_snapshots_created_at'), table_name='experiment_thing_snapshots')
op.drop_index(op.f('ix_experiment_actions_created_at'), table_name='experiment_actions')
op.drop_index(op.f('ix_event_hooks_created_at'), table_name='event_hooks')
op.drop_index(op.f('ix_comments_created_at'), table_name='comments')
### end Alembic commands ###
def upgrade_production():
### commands auto generated by Alembic - please adjust! ###
op.create_index(op.f('ix_comments_created_at'), 'comments', ['created_at'], unique=False)
op.create_index(op.f('ix_event_hooks_created_at'), 'event_hooks', ['created_at'], unique=False)
op.create_index(op.f('ix_experiment_actions_created_at'), 'experiment_actions', ['created_at'], unique=False)
op.create_index(op.f('ix_experiment_thing_snapshots_created_at'), 'experiment_thing_snapshots', ['created_at'], unique=False)
op.create_index(op.f('ix_experiment_things_created_at'), 'experiment_things', ['created_at'], unique=False)
op.create_index(op.f('ix_experiments_created_at'), 'experiments', ['created_at'], unique=False)
op.create_index(op.f('ix_front_pages_created_at'), 'front_pages', ['created_at'], unique=False)
op.create_index(op.f('ix_mod_actions_created_at'), 'mod_actions', ['created_at'], unique=False)
op.create_index(op.f('ix_posts_created_at'), 'posts', ['created_at'], unique=False)
op.create_index(op.f('ix_praw_keys_created_at'), 'praw_keys', ['created_at'], unique=False)
op.create_index(op.f('ix_subreddit_pages_created_at'), 'subreddit_pages', ['created_at'], unique=False)
op.create_index(op.f('ix_subreddits_created_at'), 'subreddits', ['created_at'], unique=False)
### end Alembic commands ###
def downgrade_production():
### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_subreddits_created_at'), table_name='subreddits')
op.drop_index(op.f('ix_subreddit_pages_created_at'), table_name='subreddit_pages')
op.drop_index(op.f('ix_praw_keys_created_at'), table_name='praw_keys')
op.drop_index(op.f('ix_posts_created_at'), table_name='posts')
op.drop_index(op.f('ix_mod_actions_created_at'), table_name='mod_actions')
op.drop_index(op.f('ix_front_pages_created_at'), table_name='front_pages')
op.drop_index(op.f('ix_experiments_created_at'), table_name='experiments')
op.drop_index(op.f('ix_experiment_things_created_at'), table_name='experiment_things')
op.drop_index(op.f('ix_experiment_thing_snapshots_created_at'), table_name='experiment_thing_snapshots')
op.drop_index(op.f('ix_experiment_actions_created_at'), table_name='experiment_actions')
op.drop_index(op.f('ix_event_hooks_created_at'), table_name='event_hooks')
op.drop_index(op.f('ix_comments_created_at'), table_name='comments')
### end Alembic commands ###
| {
"repo_name": "c4fcm/CivilServant",
"path": "alembic/versions/05a831a5db7b_added_indices_on_created_at.py",
"copies": "1",
"size": "7893",
"license": "mit",
"hash": -6976711200432442000,
"line_mean": 59.7153846154,
"line_max": 129,
"alpha_frac": 0.6892182947,
"autogenerated": false,
"ratio": 3.0195103289977046,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4208728623697704,
"avg_score": null,
"num_lines": null
} |
"""Added in_use column to Variables.
Revision ID: 6da215bb5f1
Revises: 23dcdff2dad2
Create Date: 2013-05-27 15:33:18.800755
"""
# revision identifiers, used by Alembic.
revision = '6da215bb5f1'
down_revision = '23dcdff2dad2'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.alter_column('role', u'description',
existing_type=sa.VARCHAR(length=255),
nullable=False)
op.alter_column('role', u'name',
existing_type=sa.VARCHAR(length=80),
nullable=False)
op.alter_column('user', u'username',
existing_type=sa.VARCHAR(length=80),
nullable=False)
op.alter_column('user', u'email',
existing_type=sa.VARCHAR(length=255),
nullable=False)
op.alter_column('user', u'password',
existing_type=sa.VARCHAR(length=255),
nullable=False)
op.add_column('variable', sa.Column('in_use', sa.Boolean(), nullable=False,
server_default='1'))
op.alter_column('variable', u'description',
existing_type=sa.VARCHAR(length=300),
nullable=False)
op.alter_column('variable', u'dimension_id',
existing_type=sa.INTEGER(),
nullable=False)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.alter_column('variable', u'dimension_id',
existing_type=sa.INTEGER(),
nullable=True)
op.alter_column('variable', u'description',
existing_type=sa.VARCHAR(length=300),
nullable=True)
op.drop_column('variable', 'in_use')
op.alter_column('user', u'password',
existing_type=sa.VARCHAR(length=255),
nullable=True)
op.alter_column('user', u'email',
existing_type=sa.VARCHAR(length=255),
nullable=True)
op.alter_column('user', u'username',
existing_type=sa.VARCHAR(length=80),
nullable=True)
op.alter_column('role', u'name',
existing_type=sa.VARCHAR(length=80),
nullable=True)
op.alter_column('role', u'description',
existing_type=sa.VARCHAR(length=255),
nullable=True)
### end Alembic commands ###
| {
"repo_name": "msscully/datamart",
"path": "alembic/versions/6da215bb5f1_added_in_use_column_.py",
"copies": "1",
"size": "2406",
"license": "mit",
"hash": 3429002757887900000,
"line_mean": 33.8695652174,
"line_max": 79,
"alpha_frac": 0.5831255195,
"autogenerated": false,
"ratio": 3.788976377952756,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9804215327625316,
"avg_score": 0.013577313965488145,
"num_lines": 69
} |
"""Add editing file_types table
Revision ID: 39a25a873063
Revises: bb522e9f9066
Create Date: 2019-11-08 14:02:33.351292
"""
import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import postgresql
from sqlalchemy.sql.ddl import CreateSchema, DropSchema
# revision identifiers, used by Alembic.
revision = '39a25a873063'
down_revision = 'bb522e9f9066'
branch_labels = None
depends_on = None
def upgrade():
op.execute(CreateSchema('event_editing'))
op.create_table(
'file_types',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('event_id', sa.Integer(), nullable=False, index=True),
sa.Column('name', sa.String(), nullable=False),
sa.Column('extensions', postgresql.ARRAY(sa.String()), nullable=False),
sa.Column('allow_multiple_files', sa.Boolean(), nullable=False),
sa.Column('required', sa.Boolean(), nullable=False),
sa.Column('publishable', sa.Boolean(), nullable=False),
sa.ForeignKeyConstraint(['event_id'], ['events.events.id']),
sa.PrimaryKeyConstraint('id'),
schema='event_editing'
)
op.create_index('ix_uq_file_types_event_id_name_lower', 'file_types', ['event_id', sa.text('lower(name)')],
unique=True, schema='event_editing')
def downgrade():
op.drop_table('file_types', schema='event_editing')
op.execute(DropSchema('event_editing'))
| {
"repo_name": "indico/indico",
"path": "indico/migrations/versions/20191108_1402_39a25a873063_add_editing_file_types_table.py",
"copies": "5",
"size": "1412",
"license": "mit",
"hash": -8095365537780695000,
"line_mean": 32.619047619,
"line_max": 111,
"alpha_frac": 0.6720963173,
"autogenerated": false,
"ratio": 3.477832512315271,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00021258503401360543,
"num_lines": 42
} |
"""Added ix_comments_subreddit_id_created_at index
Revision ID: 879b6b345bd4
Revises: aae7a3158d13
Create Date: 2017-07-27 13:15:32.403889
"""
# revision identifiers, used by Alembic.
revision = '879b6b345bd4'
down_revision = 'aae7a3158d13'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_development():
### commands auto generated by Alembic - please adjust! ###
op.create_index('ix_comments_subreddit_id_created_at', 'comments', ['subreddit_id', 'created_at'], unique=False)
### end Alembic commands ###
def downgrade_development():
### commands auto generated by Alembic - please adjust! ###
op.drop_index('ix_comments_subreddit_id_created_at', table_name='comments')
### end Alembic commands ###
def upgrade_test():
### commands auto generated by Alembic - please adjust! ###
op.create_index('ix_comments_subreddit_id_created_at', 'comments', ['subreddit_id', 'created_at'], unique=False)
### end Alembic commands ###
def downgrade_test():
### commands auto generated by Alembic - please adjust! ###
op.drop_index('ix_comments_subreddit_id_created_at', table_name='comments')
### end Alembic commands ###
def upgrade_production():
### commands auto generated by Alembic - please adjust! ###
## COMMENTED OUT BECAUSE THIS WAS ADDED MANUALLY ON PRODUCTION.
## YOU WILL NEED TO UNCOMMENT THIS IF YOU ARE SETTING UP A NEW PRODUCTION ENVIRONMENT
#op.create_index('ix_comments_subreddit_id_created_at', 'comments', ['subreddit_id', 'created_at'], unique=False)
### end Alembic commands ###
pass
def downgrade_production():
### commands auto generated by Alembic - please adjust! ###
op.drop_index('ix_comments_subreddit_id_created_at', table_name='comments')
### end Alembic commands ###
| {
"repo_name": "c4fcm/CivilServant",
"path": "alembic/versions/879b6b345bd4_added_ix_comments_subreddit_id_created_.py",
"copies": "1",
"size": "1988",
"license": "mit",
"hash": 1149687031941865100,
"line_mean": 28.671641791,
"line_max": 117,
"alpha_frac": 0.6881287726,
"autogenerated": false,
"ratio": 3.4634146341463414,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9541937330581938,
"avg_score": 0.021921215232880657,
"num_lines": 67
} |
"""Added Key to auth
Revision ID: 00000003
Revises: 00000002
Create Date: 2012-09-29 20:54:46.332465
"""
# revision identifiers, used by Alembic.
revision = '00000003'
down_revision = '00000002'
from alembic import op
import sqlalchemy as db
from datetime import datetime
def upgrade():
op.create_table(
'auth_key',
db.Column('id', db.Integer, primary_key=True),
db.Column('created_at', db.DateTime,
default=datetime.utcnow, nullable=False),
db.Column('updated_at', db.DateTime,
onupdate=datetime.utcnow, default=datetime.utcnow),
db.Column('service_alias', db.String),
db.Column('service_id', db.String),
db.Column('access_token', db.String),
db.Column('secret', db.String),
db.Column('expires', db.DateTime),
db.Column('refresh_token', db.String),
db.Column('user_id', db.Integer, db.ForeignKey('auth_user.id')),
db.UniqueConstraint('service_alias', 'service_id'),
)
def downgrade():
op.drop_table('auth_key')
| {
"repo_name": "klen/Flask-Foundation",
"path": "migrate/versions/00000003_added_key_to_auth.py",
"copies": "2",
"size": "1067",
"license": "bsd-3-clause",
"hash": -8004719473153730000,
"line_mean": 25.0243902439,
"line_max": 72,
"alpha_frac": 0.6307403936,
"autogenerated": false,
"ratio": 3.5448504983388704,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.517559089193887,
"avg_score": null,
"num_lines": null
} |
"""Added Key to auth
Revision ID: 00000004
Revises: 00000003
Create Date: 2012-10-01 20:54:46.332465
"""
# revision identifiers, used by Alembic.
revision = '00000004'
down_revision = '00000003'
from alembic import op
import sqlalchemy as db
from datetime import datetime
def upgrade():
op.create_table(
'fquest_guild',
db.Column('id', db.Integer, primary_key=True),
db.Column('created_at', db.DateTime,
default=datetime.utcnow, nullable=False),
db.Column('updated_at', db.DateTime,
onupdate=datetime.utcnow, default=datetime.utcnow),
db.Column('name', db.String, nullable=False),
)
op.create_table(
'fquest_character',
db.Column('id', db.Integer, primary_key=True),
db.Column('created_at', db.DateTime,
default=datetime.utcnow, nullable=False),
db.Column('updated_at', db.DateTime,
onupdate=datetime.utcnow, default=datetime.utcnow),
db.Column('name', db.String, nullable=False),
db.Column('cls', db.SmallInteger, default=0, nullable=False),
db.Column('race', db.SmallInteger, default=0, nullable=False),
db.Column('sex', db.Boolean, default=True, nullable=False),
db.Column('moto', db.String),
db.Column('level', db.Integer, default=1, nullable=False),
db.Column('health', db.Integer, default=20, nullable=False),
db.Column('strenght', db.Integer, default=15, nullable=False),
db.Column('dexterity', db.Integer, default=15, nullable=False),
db.Column('intellect', db.Integer, default=15, nullable=False),
db.Column('luck', db.Integer, default=15, nullable=False),
db.Column('current_health', db.Integer, default=20, nullable=False),
db.Column('alignment', db.SmallInteger, default=1, nullable=False),
db.Column('gold', db.Integer, default=0, nullable=False),
db.Column('death', db.Integer, default=0, nullable=False),
db.Column('win', db.Integer, default=0, nullable=False),
db.Column('lose', db.Integer, default=0, nullable=False),
db.Column('exp', db.Integer, default=0, nullable=False),
db.Column('guild_id', db.Integer, db.ForeignKey('fquest_guild.id')),
db.Column('user_id', db.Integer, db.ForeignKey('auth_user.id'), nullable=False),
db.Column('facebook_id', db.String, nullable=False),
db.Column('facebook_token', db.String, nullable=False),
db.Column('facebook_synced', db.DateTime, default=datetime.utcnow, nullable=False),
)
op.create_table(
'fquest_monster',
db.Column('id', db.Integer, primary_key=True),
db.Column('created_at', db.DateTime,
default=datetime.utcnow, nullable=False),
db.Column('updated_at', db.DateTime,
onupdate=datetime.utcnow, default=datetime.utcnow),
db.Column('name', db.String, nullable=False, unique=True),
db.Column('level', db.Integer, default=0, nullable=False),
db.Column('race', db.Integer, default=7, nullable=False),
)
op.create_table(
'fquest_stuff',
db.Column('id', db.Integer, primary_key=True),
db.Column('created_at', db.DateTime,
default=datetime.utcnow, nullable=False),
db.Column('updated_at', db.DateTime,
onupdate=datetime.utcnow, default=datetime.utcnow),
db.Column('name', db.String, nullable=False),
db.Column('level', db.Integer, default=0, nullable=False),
db.Column('mode', db.SmallInteger, default=0, nullable=False),
)
op.create_table(
'fquest_event',
db.Column('id', db.Integer, primary_key=True),
db.Column('created_at', db.DateTime,
default=datetime.utcnow, nullable=False),
db.Column('updated_at', db.DateTime,
onupdate=datetime.utcnow, default=datetime.utcnow),
db.Column('message', db.String, nullable=False),
db.Column('gold', db.Integer, default=0, nullable=False),
db.Column('exp', db.Integer, default=0, nullable=False),
db.Column('level', db.Integer),
db.Column('character_id', db.Integer, db.ForeignKey('fquest_character.id'), nullable=False),
db.Column('facebook_id', db.String, unique=True),
db.Column('facebook_created_time', db.String),
)
op.create_table(
'fquest_achievement',
db.Column('id', db.Integer, primary_key=True),
db.Column('created_at', db.DateTime,
default=datetime.utcnow, nullable=False),
db.Column('updated_at', db.DateTime,
onupdate=datetime.utcnow, default=datetime.utcnow),
db.Column('type', db.SmallInteger, nullable=False),
db.Column('name', db.String(50), nullable=False),
db.Column('message', db.String(100), nullable=False),
db.Column('character_id', db.Integer, db.ForeignKey('fquest_character.id'), nullable=False),
)
op.create_table(
'fquest_inventory',
db.Column('id', db.Integer, primary_key=True),
db.Column('stuff_id', db.Integer, db.ForeignKey('fquest_stuff.id'), nullable=False),
db.Column('character_id', db.Integer, db.ForeignKey('fquest_character.id'), nullable=False),
db.Column('wearing', db.Boolean, default=False, nullable=False),
)
def downgrade():
op.drop_table('fquest_character')
op.drop_table('fquest_monster')
op.drop_table('fquest_stuff')
op.drop_table('fquest_guild')
op.drop_table('fquest_event')
op.drop_table('fquest_achievement')
op.drop_table('fquest_inventory')
| {
"repo_name": "klen/fquest",
"path": "migrate/versions/00000004_fquest_models.py",
"copies": "1",
"size": "5690",
"license": "bsd-3-clause",
"hash": -4667234792463190000,
"line_mean": 36.9333333333,
"line_max": 100,
"alpha_frac": 0.6263620387,
"autogenerated": false,
"ratio": 3.6012658227848102,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9721593213497737,
"avg_score": 0.0012069295974147264,
"num_lines": 150
} |
"""Added Key to auth
Revision ID: 25a30eafd911
Revises: 00000002
Create Date: 2012-09-29 20:54:46.332465
"""
# revision identifiers, used by Alembic.
revision = '00000003'
down_revision = '00000002'
from alembic import op
import sqlalchemy as db
from datetime import datetime
def upgrade():
op.create_table(
'auth_key',
db.Column('id', db.Integer, primary_key=True),
db.Column('created_at', db.DateTime,
default=datetime.utcnow, nullable=False),
db.Column('updated_at', db.DateTime,
onupdate=datetime.utcnow, default=datetime.utcnow),
db.Column('service_alias', db.String),
db.Column('service_id', db.String),
db.Column('access_token', db.String),
db.Column('secret', db.String),
db.Column('expires', db.DateTime),
db.Column('refresh_token', db.String),
db.Column('user_id', db.Integer, db.ForeignKey('auth_user.id')),
db.UniqueConstraint('service_alias', 'service_id'),
)
def downgrade():
op.drop_table('auth_key')
| {
"repo_name": "klen/tweetchi",
"path": "migrate/versions/00000003_added_key_to_auth.py",
"copies": "1",
"size": "1071",
"license": "bsd-3-clause",
"hash": -2358433985236295000,
"line_mean": 25.1219512195,
"line_max": 72,
"alpha_frac": 0.6321195145,
"autogenerated": false,
"ratio": 3.5114754098360654,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9629146355937339,
"avg_score": 0.002889713679745493,
"num_lines": 41
} |
"""added key to comments.subreddit_id
Revision ID: 2959fa8d7ad8
Revises: 05a831a5db7b
Create Date: 2017-07-26 19:53:29.818965
"""
# revision identifiers, used by Alembic.
revision = '2959fa8d7ad8'
down_revision = '05a831a5db7b'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_development():
### commands auto generated by Alembic - please adjust! ###
op.create_index(op.f('ix_posts_subreddit_id'), 'posts', ['subreddit_id'], unique=False)
### end Alembic commands ###
def downgrade_development():
### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_posts_subreddit_id'), table_name='posts')
### end Alembic commands ###
def upgrade_test():
### commands auto generated by Alembic - please adjust! ###
op.create_index(op.f('ix_posts_subreddit_id'), 'posts', ['subreddit_id'], unique=False)
### end Alembic commands ###
def downgrade_test():
### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_posts_subreddit_id'), table_name='posts')
### end Alembic commands ###
def upgrade_production():
### commands auto generated by Alembic - please adjust! ###
op.create_index(op.f('ix_posts_subreddit_id'), 'posts', ['subreddit_id'], unique=False)
### end Alembic commands ###
def downgrade_production():
### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_posts_subreddit_id'), table_name='posts')
### end Alembic commands ###
| {
"repo_name": "c4fcm/CivilServant",
"path": "alembic/versions/2959fa8d7ad8_added_key_to_comments_subreddit_id.py",
"copies": "1",
"size": "1699",
"license": "mit",
"hash": 6393123956724782000,
"line_mean": 25.546875,
"line_max": 91,
"alpha_frac": 0.6680400235,
"autogenerated": false,
"ratio": 3.3643564356435642,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4532396459143564,
"avg_score": null,
"num_lines": null
} |
"""Added line item table
Revision ID: 20e5ee0480f6
Revises: 252e31376860
Create Date: 2015-05-12 13:48:39.460366
"""
# revision identifiers, used by Alembic.
revision = '20e5ee0480f6'
down_revision = '252e31376860'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('line_item',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('contract_id', sa.Integer(), nullable=False),
sa.Column('description', sa.Text(), nullable=False),
sa.Column('manufacturer', sa.Text(), nullable=True),
sa.Column('model_number', sa.Text(), nullable=True),
sa.Column('quantity', sa.Integer(), nullable=True),
sa.Column('unit_of_measure', sa.String(length=255), nullable=True),
sa.Column('unit_cost', sa.Float(), nullable=True),
sa.Column('total_cost', sa.Float(), nullable=True),
sa.ForeignKeyConstraint(['contract_id'], ['contract.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('line_item')
### end Alembic commands ###
| {
"repo_name": "ajb/pittsburgh-purchasing-suite",
"path": "migrations/versions/20e5ee0480f6_.py",
"copies": "3",
"size": "1204",
"license": "bsd-3-clause",
"hash": -6820812087446150000,
"line_mean": 30.6842105263,
"line_max": 82,
"alpha_frac": 0.6727574751,
"autogenerated": false,
"ratio": 3.44,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.56127574751,
"avg_score": null,
"num_lines": null
} |
"""Added loan parameters to book and user models
Revision ID: 51af81b7c713
Revises: 0e923b42f8b0
Create Date: 2016-01-08 18:02:10.576204
"""
# revision identifiers, used by Alembic.
revision = '51af81b7c713'
down_revision = '0e923b42f8b0'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade():
op.add_column('books', sa.Column('conditionOfLoan', sa.Text(), nullable=True))
op.add_column('books', sa.Column('loaned_id', sa.Integer(), nullable=True))
op.add_column('books', sa.Column('possession_id', sa.Integer(), nullable=True))
op.add_column('books', sa.Column('termOfLoan', sa.Date(), nullable=True))
op.create_foreign_key(u'books_ibfk_2', 'books', 'users', ['loaned_id'], ['id'])
op.create_foreign_key(u'books_ibfk_3', 'books', 'users', ['possession_id'], ['id'])
op.drop_constraint(u'users_ibfk_2', 'users', type_='foreignkey')
op.drop_column('users', 'books_loaned')
op.drop_column('users', 'books_checked_out')
def downgrade():
op.add_column('users', sa.Column('books_checked_out', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True))
op.add_column('users', sa.Column('books_loaned', mysql.INTEGER(display_width=11), autoincrement=False, nullable=False))
op.create_foreign_key(u'users_ibfk_2', 'users', 'books', ['books_checked_out'], ['id'])
op.drop_constraint(u'books_ibfk_2', 'books', type_='foreignkey')
op.drop_constraint(u'books_ibfk_3', 'books', type_='foreignkey')
op.drop_column('books', 'termOfLoan')
op.drop_column('books', 'possession_id')
op.drop_column('books', 'loaned_id')
op.drop_column('books', 'conditionOfLoan')
| {
"repo_name": "brotherjack/Rood-Kamer",
"path": "migrations/versions/51af81b7c713_added_loan_parameters_to_book_and_user_.py",
"copies": "1",
"size": "1679",
"license": "bsd-3-clause",
"hash": 2295285573601419300,
"line_mean": 43.1842105263,
"line_max": 127,
"alpha_frac": 0.6873138773,
"autogenerated": false,
"ratio": 2.9301919720767886,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4117505849376789,
"avg_score": null,
"num_lines": null
} |
"""Added local passwords
Revision ID: 92a86812ebac
Revises: 04e921e19a65
Create Date: 2018-11-07 21:18:14.679902
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '92a86812ebac'
down_revision = '04e921e19a65'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('history',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('timestamp', sa.DateTime(), nullable=True),
sa.Column('password', sa.String(length=256), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.add_column('users', sa.Column('expiration_date', sa.DateTime(), nullable=True))
op.add_column('users', sa.Column('password', sa.String(length=256), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('users', 'password')
op.drop_column('users', 'expiration_date')
op.drop_table('history')
# ### end Alembic commands ###
| {
"repo_name": "nycrecords/intranet",
"path": "migrations/versions/92a86812ebac_added_local_passwords.py",
"copies": "1",
"size": "1192",
"license": "mit",
"hash": -7339797556849324000,
"line_mean": 29.5641025641,
"line_max": 87,
"alpha_frac": 0.6719798658,
"autogenerated": false,
"ratio": 3.3863636363636362,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.45583435021636365,
"avg_score": null,
"num_lines": null
} |
"""Added locks tables
Revision ID: 341d2e702dc4
Revises: e249ebc4f51
Create Date: 2015-09-03 09:24:29.941684
"""
# revision identifiers, used by Alembic.
revision = '341d2e702dc4'
down_revision = 'e249ebc4f51'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table('quark_locks',
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('type', sa.Enum('ip_address'), nullable=False),
sa.PrimaryKeyConstraint('id'),
mysql_engine='InnoDB')
op.create_table('quark_lock_holders',
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('lock_id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=True),
sa.ForeignKeyConstraint(['lock_id'], ['quark_locks.id'], ),
sa.PrimaryKeyConstraint('id'),
mysql_engine='InnoDB')
op.add_column(u'quark_ip_addresses', sa.Column('lock_id', sa.Integer(),
nullable=True))
def downgrade():
op.drop_column(u'quark_ip_addresses', 'lock_id')
op.drop_table('quark_lock_holders')
op.drop_table('quark_locks')
| {
"repo_name": "rackerlabs/quark",
"path": "quark/db/migration/alembic/versions/341d2e702dc4_added_locks_tables.py",
"copies": "6",
"size": "1422",
"license": "apache-2.0",
"hash": 1508498922961291300,
"line_mean": 35.4615384615,
"line_max": 79,
"alpha_frac": 0.5632911392,
"autogenerated": false,
"ratio": 3.6649484536082473,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.7228239592808247,
"avg_score": null,
"num_lines": null
} |
"""Added Market table
Revision ID: 2eb561cab06a
Revises: 2a240390d131
Create Date: 2016-05-10 12:40:08.397000
"""
# revision identifiers, used by Alembic.
revision = '2eb561cab06a'
down_revision = '2a240390d131'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('market',
sa.Column('owner_id', sa.Integer(), nullable=False),
sa.Column('player_id', sa.Integer(), nullable=False),
sa.Column('date', sa.Date(), nullable=False),
sa.Column('mkt_price', sa.Integer(), nullable=True),
sa.Column('min_price', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['owner_id'], ['users.id'], ),
sa.ForeignKeyConstraint(['player_id'], ['players.id'], ),
sa.PrimaryKeyConstraint('owner_id', 'player_id', 'date')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('market')
### end Alembic commands ###
| {
"repo_name": "jotacor/tradunio-web",
"path": "migrations/versions/2eb561cab06a_added_market_table.py",
"copies": "1",
"size": "1097",
"license": "mit",
"hash": 5091971583039263000,
"line_mean": 17.2456140351,
"line_max": 63,
"alpha_frac": 0.6162260711,
"autogenerated": false,
"ratio": 3.516025641025641,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9545256885696884,
"avg_score": 0.017398965285751185,
"num_lines": 57
} |
"""added message log
Revision ID: a0cca001e8a7
Revises: df498825ec28
Create Date: 2018-04-21 12:04:18.535978
"""
# revision identifiers, used by Alembic.
revision = 'a0cca001e8a7'
down_revision = 'df498825ec28'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_development():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('message_logs',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('message_sent', sa.Boolean(), nullable=True),
sa.Column('message_failure_reason', sa.String(length=63), nullable=True),
sa.Column('platform', sa.String(length=64), nullable=True),
sa.Column('username', sa.String(length=256), nullable=True),
sa.Column('subject', sa.String(length=256), nullable=True),
sa.Column('message_task_id', sa.String(length=256), nullable=True),
sa.Column('body', mysql.MEDIUMTEXT(), nullable=True),
sa.Column('metadata_json', mysql.MEDIUMTEXT(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_message_logs_created_at'), 'message_logs', ['created_at'], unique=False)
op.create_index(op.f('ix_message_logs_message_task_id'), 'message_logs', ['message_task_id'], unique=False)
op.create_index(op.f('ix_message_logs_platform'), 'message_logs', ['platform'], unique=False)
op.create_index(op.f('ix_message_logs_username'), 'message_logs', ['username'], unique=False)
op.drop_index('ix_mod_actions_created_utc_index', table_name='mod_actions')
# ### end Alembic commands ###
def downgrade_development():
# ### commands auto generated by Alembic - please adjust! ###
op.create_index('ix_mod_actions_created_utc_index', 'mod_actions', ['created_utc'], unique=False)
op.drop_index(op.f('ix_message_logs_username'), table_name='message_logs')
op.drop_index(op.f('ix_message_logs_platform'), table_name='message_logs')
op.drop_index(op.f('ix_message_logs_message_task_id'), table_name='message_logs')
op.drop_index(op.f('ix_message_logs_created_at'), table_name='message_logs')
op.drop_table('message_logs')
# ### end Alembic commands ###
def upgrade_test():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('message_logs',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('message_sent', sa.Boolean(), nullable=True),
sa.Column('message_failure_reason', sa.String(length=63), nullable=True),
sa.Column('platform', sa.String(length=64), nullable=True),
sa.Column('username', sa.String(length=256), nullable=True),
sa.Column('subject', sa.String(length=256), nullable=True),
sa.Column('message_task_id', sa.String(length=256), nullable=True),
sa.Column('body', mysql.MEDIUMTEXT(), nullable=True),
sa.Column('metadata_json', mysql.MEDIUMTEXT(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_message_logs_created_at'), 'message_logs', ['created_at'], unique=False)
op.create_index(op.f('ix_message_logs_message_task_id'), 'message_logs', ['message_task_id'], unique=False)
op.create_index(op.f('ix_message_logs_platform'), 'message_logs', ['platform'], unique=False)
op.create_index(op.f('ix_message_logs_username'), 'message_logs', ['username'], unique=False)
op.drop_index('ix_mod_actions_created_utc_index', table_name='mod_actions')
# ### end Alembic commands ###
def downgrade_test():
# ### commands auto generated by Alembic - please adjust! ###
op.create_index('ix_mod_actions_created_utc_index', 'mod_actions', ['created_utc'], unique=False)
op.drop_index(op.f('ix_message_logs_username'), table_name='message_logs')
op.drop_index(op.f('ix_message_logs_platform'), table_name='message_logs')
op.drop_index(op.f('ix_message_logs_message_task_id'), table_name='message_logs')
op.drop_index(op.f('ix_message_logs_created_at'), table_name='message_logs')
op.drop_table('message_logs')
# ### end Alembic commands ###
def upgrade_production():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('message_logs',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('message_sent', sa.Boolean(), nullable=True),
sa.Column('message_failure_reason', sa.String(length=63), nullable=True),
sa.Column('platform', sa.String(length=64), nullable=True),
sa.Column('username', sa.String(length=256), nullable=True),
sa.Column('subject', sa.String(length=256), nullable=True),
sa.Column('message_task_id', sa.String(length=256), nullable=True),
sa.Column('body', mysql.MEDIUMTEXT(), nullable=True),
sa.Column('metadata_json', mysql.MEDIUMTEXT(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_message_logs_created_at'), 'message_logs', ['created_at'], unique=False)
op.create_index(op.f('ix_message_logs_message_task_id'), 'message_logs', ['message_task_id'], unique=False)
op.create_index(op.f('ix_message_logs_platform'), 'message_logs', ['platform'], unique=False)
op.create_index(op.f('ix_message_logs_username'), 'message_logs', ['username'], unique=False)
op.drop_index('ix_mod_actions_created_utc_index', table_name='mod_actions')
# ### end Alembic commands ###
def downgrade_production():
# ### commands auto generated by Alembic - please adjust! ###
op.create_index('ix_mod_actions_created_utc_index', 'mod_actions', ['created_utc'], unique=False)
op.drop_index(op.f('ix_message_logs_username'), table_name='message_logs')
op.drop_index(op.f('ix_message_logs_platform'), table_name='message_logs')
op.drop_index(op.f('ix_message_logs_message_task_id'), table_name='message_logs')
op.drop_index(op.f('ix_message_logs_created_at'), table_name='message_logs')
op.drop_table('message_logs')
# ### end Alembic commands ###
| {
"repo_name": "c4fcm/CivilServant",
"path": "alembic/versions/a0cca001e8a7_added_message_log.py",
"copies": "1",
"size": "6255",
"license": "mit",
"hash": -3186444305722514400,
"line_mean": 47.1153846154,
"line_max": 111,
"alpha_frac": 0.6799360512,
"autogenerated": false,
"ratio": 3.2493506493506494,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9353511609252889,
"avg_score": 0.015155018259551997,
"num_lines": 130
} |
"""added message table and relations in user and item
Revision ID: 4815c5261520
Revises: 1e68ebff1102
Create Date: 2015-04-15 17:58:27.189227
"""
# revision identifiers, used by Alembic.
revision = '4815c5261520'
down_revision = '1e68ebff1102'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('messages',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('subject', sa.String(length=120), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('unread', sa.Boolean(), nullable=True),
sa.Column('sender_id', sa.Integer(), nullable=True),
sa.Column('receiver_id', sa.Integer(), nullable=True),
sa.Column('item_id', sa.Integer(), nullable=True),
sa.Column('timestamp', sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(['item_id'], ['items.id'], ),
sa.ForeignKeyConstraint(['receiver_id'], ['users.id'], ),
sa.ForeignKeyConstraint(['sender_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_messages_timestamp'), 'messages', ['timestamp'], unique=False)
op.add_column(u'users', sa.Column('has_notifications', sa.Boolean(), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column(u'users', 'has_notifications')
op.drop_index(op.f('ix_messages_timestamp'), table_name='messages')
op.drop_table('messages')
### end Alembic commands ###
| {
"repo_name": "rosariomgomez/tradyfit",
"path": "vagrant/tradyfit/migrations/versions/4815c5261520_added_message_table_and_relations_in_.py",
"copies": "1",
"size": "1565",
"license": "mit",
"hash": -3980212069805111000,
"line_mean": 35.3953488372,
"line_max": 91,
"alpha_frac": 0.6728434505,
"autogenerated": false,
"ratio": 3.470066518847007,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9594091401796075,
"avg_score": 0.009763713510186333,
"num_lines": 43
} |
"""added more report types
Revision ID: 13401065bd29
Revises: aaef03da98
Create Date: 2014-06-14 22:22:12.048324
"""
# revision identifiers, used by Alembic.
revision = '13401065bd29'
down_revision = 'aaef03da98'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.sql import table, column
def upgrade():
report_types = table('report_types',
column('id', sa.Integer),
column('name', sa.String(60)),
column('description', sa.Text),
column('symbol', sa.String(40))
)
op.bulk_insert(report_types, [
dict(
id=2,
name='1 week',
description='Generates a report containing your gains/losses for the latest week.',
symbol='1-week'
),
dict(
id=3,
name='3 months',
description='Generates a report containing your gains/losses for the latest 3 months.',
symbol='3-months'
),
dict(
id=4,
name='6 months',
description='Generates a report containing your gains/losses for the latest 6 months.',
symbol='6-months'
),
dict(
id=5,
name='1 year',
description='Generates a report containing your gains/losses for the latest year.',
symbol='1-year'
),
])
def downgrade():
raise NotImplemented()
| {
"repo_name": "thomaserlang/stocksum",
"path": "src/stocksum/web/alembic/versions/13401065bd29_added_more_report_types.py",
"copies": "1",
"size": "1401",
"license": "mit",
"hash": 342654340634928700,
"line_mean": 25.4339622642,
"line_max": 99,
"alpha_frac": 0.5731620271,
"autogenerated": false,
"ratio": 4.084548104956268,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5157710132056268,
"avg_score": null,
"num_lines": null
} |
"""Added multiple indexes
Revision ID: 4e2dde2d15e6
Revises: 3e4640ec7bef
Create Date: 2015-06-01 20:55:32.714080
"""
# revision identifiers, used by Alembic.
revision = '4e2dde2d15e6'
down_revision = '3e4640ec7bef'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_index(op.f('ix_channel_type_id'), 'channel', ['type_id'], unique=False)
op.create_index(op.f('ix_ecc_iso'), 'ecc', ['iso'], unique=False)
op.create_index(op.f('ix_service_provider_codops'), 'service_provider', ['codops'], unique=False)
op.create_index(op.f('ix_station_orga'), 'station', ['orga'], unique=False)
op.create_index(op.f('ix_station_radioepg_enabled'), 'station', ['radioepg_enabled'], unique=False)
op.create_index(op.f('ix_station_radiotag_enabled'), 'station', ['radiotag_enabled'], unique=False)
op.create_index(op.f('ix_station_radiovis_enabled'), 'station', ['radiovis_enabled'], unique=False)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_station_radiovis_enabled'), table_name='station')
op.drop_index(op.f('ix_station_radiotag_enabled'), table_name='station')
op.drop_index(op.f('ix_station_radioepg_enabled'), table_name='station')
op.drop_index(op.f('ix_station_orga'), table_name='station')
op.drop_index(op.f('ix_service_provider_codops'), table_name='service_provider')
op.drop_index(op.f('ix_ecc_iso'), table_name='ecc')
op.drop_index(op.f('ix_channel_type_id'), table_name='channel')
### end Alembic commands ###
| {
"repo_name": "ebu/radiodns-plugit",
"path": "RadioDns-PlugIt/alembic/versions/4e2dde2d15e6_added_multiple_indexes.py",
"copies": "1",
"size": "1652",
"license": "bsd-3-clause",
"hash": 2621091394848311000,
"line_mean": 42.4736842105,
"line_max": 103,
"alpha_frac": 0.6840193705,
"autogenerated": false,
"ratio": 2.9658886894075405,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.414990805990754,
"avg_score": null,
"num_lines": null
} |
"""added_music_table
Revision ID: 5718a1d1744
Revises: 46a6cae8de5
Create Date: 2014-05-02 12:08:18.693503
"""
# revision identifiers, used by Alembic.
revision = '5718a1d1744'
down_revision = '46a6cae8de5'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('musics',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('title', sa.Unicode(length=512), nullable=True),
sa.Column('album', sa.Unicode(length=512), nullable=True),
sa.Column('artist', sa.Unicode(length=512), nullable=True),
sa.Column('date', sa.Unicode(length=128), nullable=True),
sa.Column('tracknumber', sa.Unicode(length=64), nullable=True),
sa.Column('organization', sa.Unicode(length=512), nullable=True),
sa.Column('genre', sa.Unicode(length=128), nullable=True),
sa.Column('performer', sa.Unicode(length=512), nullable=True),
sa.Column('location', sa.Unicode(length=512), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_musics_album'), 'musics', ['album'], unique=False)
op.create_index(op.f('ix_musics_artist'), 'musics', ['artist'], unique=False)
op.create_index(op.f('ix_musics_title'), 'musics', ['title'], unique=False)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_musics_title'), table_name='musics')
op.drop_index(op.f('ix_musics_artist'), table_name='musics')
op.drop_index(op.f('ix_musics_album'), table_name='musics')
op.drop_table('musics')
### end Alembic commands ###
| {
"repo_name": "kironono/renchon",
"path": "db_versions/versions/5718a1d1744_added_music_table.py",
"copies": "1",
"size": "1658",
"license": "mit",
"hash": 6605638227275990000,
"line_mean": 36.6818181818,
"line_max": 81,
"alpha_frac": 0.6749095296,
"autogenerated": false,
"ratio": 3.1106941838649154,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.42856037134649155,
"avg_score": null,
"num_lines": null
} |
"""added_named_job_dep_fks
Revision ID: 361fbffcf08b
Revises: 9058e0136aba
Create Date: 2016-08-26 10:33:18.803848
"""
# revision identifiers, used by Alembic.
revision = '361fbffcf08b'
down_revision = '9058e0136aba'
branch_labels = None
depends_on = None
from alembic import op
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_data_broker():
### commands auto generated by Alembic - please adjust! ###
op.create_foreign_key('fk_dep_job_id', 'job_dependency', 'job', ['job_id'], ['job_id'])
op.create_foreign_key('fk_prereq_job_id', 'job_dependency', 'job', ['prerequisite_id'], ['job_id'])
### end Alembic commands ###
def downgrade_data_broker():
### commands auto generated by Alembic - please adjust! ###
op.drop_constraint('fk_prereq_job_id', 'job_dependency', type_='foreignkey')
op.drop_constraint('fk_dep_job_id', 'job_dependency', type_='foreignkey')
### end Alembic commands ###
| {
"repo_name": "fedspendingtransparency/data-act-broker-backend",
"path": "dataactcore/migrations/versions/361fbffcf08b_added_named_job_dep_fks.py",
"copies": "2",
"size": "1044",
"license": "cc0-1.0",
"hash": 6527093103113482000,
"line_mean": 24.4634146341,
"line_max": 103,
"alpha_frac": 0.6695402299,
"autogenerated": false,
"ratio": 3.144578313253012,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9667142691036209,
"avg_score": 0.029395170423360595,
"num_lines": 41
} |
"""Added new column for ranked searching of terms and definitions
Revision ID: 201bae6698f6
Revises: 4614666a279f
Create Date: 2015-10-21 00:31:05.525848
"""
# revision identifiers, used by Alembic.
revision = '201bae6698f6'
down_revision = '4614666a279f'
from alembic import op
import sqlalchemy as sa
def upgrade():
db_bind = op.get_bind()
#
# Replace the index for terms with one including varchar_pattern_ops
#
# drop the old index
db_bind.execute(sa.sql.text('''
DROP INDEX IF EXISTS ix_definitions_term;
'''))
# create the new index
db_bind.execute(sa.sql.text('''
CREATE INDEX ix_definitions_term ON definitions (term varchar_pattern_ops);
'''))
#
# Support full-text search on terms and definitions
#
# add the tsv_search column to the definitions table
db_bind.execute(sa.sql.text('''
ALTER TABLE definitions ADD COLUMN tsv_search tsvector;
'''))
# set up a trigger to populate tsv_search when records are created or altered
db_bind.execute(sa.sql.text('''
DROP FUNCTION IF EXISTS definitions_search_trigger();
'''))
db_bind.execute(sa.sql.text('''
CREATE FUNCTION definitions_search_trigger() RETURNS trigger AS $$
begin
new.tsv_search :=
setweight(to_tsvector('pg_catalog.english', COALESCE(new.term,'')), 'A') ||
setweight(to_tsvector('pg_catalog.english', COALESCE(new.definition,'')), 'B');
return new;
end
$$ LANGUAGE plpgsql;
'''))
db_bind.execute(sa.sql.text('''
DROP TRIGGER IF EXISTS tsvupdate_definitions_trigger ON definitions;
'''))
db_bind.execute(sa.sql.text('''
CREATE TRIGGER tsvupdate_definitions_trigger BEFORE INSERT OR UPDATE ON definitions FOR EACH ROW EXECUTE PROCEDURE definitions_search_trigger();
'''))
# create an index for tsv_search
db_bind.execute(sa.sql.text('''
DROP INDEX IF EXISTS ix_definitions_tsv_search;
'''))
db_bind.execute(sa.sql.text('''
CREATE INDEX ix_definitions_tsv_search ON definitions USING gin(tsv_search);
'''))
# populate tsv_search for existing records
db_bind.execute(sa.sql.text('''
UPDATE definitions SET tsv_search = setweight(to_tsvector('pg_catalog.english', COALESCE(term,'')), 'A') || setweight(to_tsvector('pg_catalog.english', COALESCE(definition,'')), 'B');
'''))
def downgrade():
db_bind = op.get_bind()
#
# Revert the terms index to the original style
#
# drop the varchar_pattern_ops index
db_bind.execute(sa.sql.text('''
DROP INDEX IF EXISTS ix_definitions_term;
'''))
# re-create the standard index
db_bind.execute(sa.sql.text('''
CREATE INDEX ix_definitions_term ON definitions(term);
'''))
#
# Remove support for full-text search on terms and definitions
#
# drop the tsv_search column from the definitions table
db_bind.execute(sa.sql.text('''
ALTER TABLE definitions DROP COLUMN IF EXISTS tsv_search;
'''))
# drop the search trigger and function
db_bind.execute(sa.sql.text('''
DROP TRIGGER IF EXISTS tsvupdate_definitions_trigger ON definitions;
'''))
db_bind.execute(sa.sql.text('''
DROP FUNCTION IF EXISTS definitions_search_trigger();
'''))
# drop the index
db_bind.execute(sa.sql.text('''
DROP INDEX IF EXISTS ix_definitions_tsv_search;
'''))
| {
"repo_name": "codeforamerica/glossary-bot",
"path": "migrations/versions/201bae6698f6_added_search.py",
"copies": "1",
"size": "3474",
"license": "mit",
"hash": 5272534375500872000,
"line_mean": 30.5818181818,
"line_max": 191,
"alpha_frac": 0.6456534254,
"autogenerated": false,
"ratio": 3.70362473347548,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9836540311710618,
"avg_score": 0.002547569432972473,
"num_lines": 110
} |
"""Added Owner and Community classes
Revision ID: 549e55d55f1
Revises: 2eb561cab06a
Create Date: 2016-05-26 14:50:49.133000
"""
import sqlalchemy as sa
from alembic import op
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
from app import db
# revision identifiers, used by Alembic.
revision = '549e55d55f1'
down_revision = '2eb561cab06a'
Base = declarative_base()
class Owner(Base):
__tablename__ = 'owners'
player_id = sa.Column(sa.Integer, sa.ForeignKey('players.id'), primary_key=True)
owner_id = sa.Column(sa.Integer, sa.ForeignKey('users.id'), primary_key=True)
class Player(Base):
__tablename__ = 'players'
id = sa.Column(sa.Integer, primary_key=True)
owner = sa.Column(sa.Integer, sa.ForeignKey('users.id'), nullable=True)
class User(Base):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('communities',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=64), nullable=False),
sa.PrimaryKeyConstraint('id', 'name')
)
op.create_table('owners',
sa.Column('player_id', sa.Integer(), nullable=False),
sa.Column('owner_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['owner_id'], ['users.id'], ),
sa.ForeignKeyConstraint(['player_id'], ['players.id'], ),
sa.PrimaryKeyConstraint('player_id', 'owner_id')
)
op.add_column(u'users', sa.Column('community_id', sa.Integer(), nullable=True))
for player_id, owner_id in db.session.query(Player.id, Player.owner).distinct():
if not player_id or not owner_id:
continue
db.session.add(Owner(player_id=player_id, owner_id=owner_id))
db.session.commit()
drop_column_sqlite('players', 'owner')
# op.drop_column(u'players', 'owner') # it fails in sqlite because it is not allowed
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column(u'users', 'community_id')
op.add_column(u'players', sa.Column('owner', sa.INTEGER(), nullable=True))
op.drop_table('owners')
op.drop_table('communities')
### end Alembic commands ###
def drop_column_sqlite(tablename, columns):
""" column dropping functionality for SQLite """
from copy import copy
# get the db engine and reflect database tables
engine = op.get_bind()
meta = sa.MetaData(bind=engine)
meta.reflect()
# create a select statement from the old table
old_table = meta.tables[tablename]
select = sa.sql.select([c for c in old_table.c if c.name not in columns])
# get remaining columns without table attribute attached
remaining_columns = [copy(c) for c in old_table.columns
if c.name not in columns]
for column in remaining_columns:
column.table = None
# create a temporary new table
new_tablename = '{0}_new'.format(tablename)
op.create_table(new_tablename, *remaining_columns)
meta.reflect()
new_table = meta.tables[new_tablename]
# copy data from old table
insert = sa.sql.insert(new_table).from_select(
[c.name for c in remaining_columns], select)
engine.execute(insert)
# drop the old table and rename the new table to take the old tables
# position
op.drop_table(tablename)
op.rename_table(new_tablename, tablename)
| {
"repo_name": "jotacor/tradunio-web",
"path": "migrations/versions/549e55d55f1_added_owner_and_community_classes.py",
"copies": "1",
"size": "3778",
"license": "mit",
"hash": -5119862447328477000,
"line_mean": 32.036036036,
"line_max": 88,
"alpha_frac": 0.6228163049,
"autogenerated": false,
"ratio": 3.7931726907630523,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.49159889956630526,
"avg_score": null,
"num_lines": null
} |
"""Added PageBanner mapped class.
Revision ID: 2c0bfc379e01
Revises: 2637e08f3c15
Create Date: 2012-04-02 14:24:16.271417
"""
# downgrade revision identifier, used by Alembic.
revision = '2c0bfc379e01'
down_revision = '18da1d3c685a'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
# create instance_groups table
op.create_table(u'_pagebanner_secondary_tmp',
sa.Column('files_id',
sa.Integer,
sa.ForeignKey('files.id',
onupdate='cascade',
ondelete='cascade'),
primary_key=True),
sa.Column('nodes_id',
sa.Integer,
sa.ForeignKey('nodes.id',
onupdate='cascade',
ondelete='cascade'),
primary_key=True),
sa.Column(u'weight',
sa.Integer,
default=0),
mysql_engine=u'InnoDB')
# populate instances_groups
connection = op.get_bind()
connection.execute(
'INSERT INTO _pagebanner_secondary_tmp (nodes_id, files_id) '
'SELECT nodes_id, files_id '
'FROM nodes_banners__files'
)
op.drop_table(u'nodes_banners__files')
op.rename_table(u'_pagebanner_secondary_tmp', u'nodes_banners__files')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table(u'_pagebanner_secondary_tmp',
sa.Column('files_id',
sa.Integer,
sa.ForeignKey('files.id',
onupdate='cascade',
ondelete='cascade')),
sa.Column('nodes_id',
sa.Integer,
sa.ForeignKey('nodes.id',
onupdate='cascade',
ondelete='cascade')),
mysql_engine=u'InnoDB')
# populate instances_groups
connection = op.get_bind()
connection.execute(
'INSERT INTO _pagebanner_secondary_tmp (nodes_id, files_id) '
'SELECT nodes_id, files_id '
'FROM nodes_banners__files'
)
op.drop_table(u'nodes_banners__files')
op.rename_table(u'_pagebanner_secondary_tmp', u'nodes_banners__files')
### end Alembic commands ###
| {
"repo_name": "asidev/aybu-core",
"path": "aybu/core/models/migrations/versions/2c0bfc379e01_added_pagebanner_map.py",
"copies": "1",
"size": "2746",
"license": "apache-2.0",
"hash": 7783652111655238000,
"line_mean": 37.1388888889,
"line_max": 74,
"alpha_frac": 0.4752367079,
"autogenerated": false,
"ratio": 4.599664991624791,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5574901699524791,
"avg_score": null,
"num_lines": null
} |
"""Added parameters to channels
Revision ID: 1461ccb47bf0
Revises: 37826dbd06d5
Create Date: 2013-08-05 17:40:16.244000
"""
# revision identifiers, used by Alembic.
revision = '1461ccb47bf0'
down_revision = '37826dbd06d5'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('channel', sa.Column('appty_uatype', sa.String(length=6), nullable=True))
op.add_column('channel', sa.Column('cc', sa.String(length=3), nullable=True))
op.add_column('channel', sa.Column('ecc_id', sa.Integer(), nullable=True))
op.add_column('channel', sa.Column('eid', sa.String(length=4), nullable=True))
op.add_column('channel', sa.Column('fqdn', sa.String(length=255), nullable=True))
op.add_column('channel', sa.Column('frequency', sa.String(length=5), nullable=True))
op.add_column('channel', sa.Column('pa', sa.Integer(), nullable=True))
op.add_column('channel', sa.Column('pi', sa.String(length=4), nullable=True))
op.add_column('channel', sa.Column('scids', sa.String(length=3), nullable=True))
op.add_column('channel', sa.Column('serviceIdentifier', sa.String(length=16), nullable=True))
op.add_column('channel', sa.Column('sid', sa.String(length=8), nullable=True))
op.add_column('channel', sa.Column('tx', sa.String(length=5), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('channel', 'tx')
op.drop_column('channel', 'sid')
op.drop_column('channel', 'serviceIdentifier')
op.drop_column('channel', 'scids')
op.drop_column('channel', 'pi')
op.drop_column('channel', 'pa')
op.drop_column('channel', 'frequency')
op.drop_column('channel', 'fqdn')
op.drop_column('channel', 'eid')
op.drop_column('channel', 'ecc_id')
op.drop_column('channel', 'cc')
op.drop_column('channel', 'appty_uatype')
### end Alembic commands ###
| {
"repo_name": "ebu/radiodns-plugit",
"path": "RadioDns-PlugIt/alembic/versions/1461ccb47bf0_added_parameters_to_.py",
"copies": "1",
"size": "1994",
"license": "bsd-3-clause",
"hash": -6997912288694819000,
"line_mean": 40.5416666667,
"line_max": 97,
"alpha_frac": 0.6710130391,
"autogenerated": false,
"ratio": 3.1853035143769968,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4356316553476997,
"avg_score": null,
"num_lines": null
} |
"""Added PI enable option
Revision ID: 10249d62fb55
Revises: 5783809706d2
Create Date: 2015-07-15 18:43:49.238335
"""
# revision identifiers, used by Alembic.
revision = '10249d62fb55'
down_revision = '5783809706d2'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_foreign_key(None, 'channel', 'picture', ['default_picture_id'], ['id'])
op.create_foreign_key(None, 'channel', 'ecc', ['ecc_id'], ['id'])
op.create_index(op.f('ix_log_entry_reception_timestamp'), 'log_entry', ['reception_timestamp'], unique=False)
op.create_index(op.f('ix_log_entry_topic'), 'log_entry', ['topic'], unique=False)
op.create_foreign_key('fk_default_logo_id', 'service_provider', 'logo_image', ['default_logo_image_id'], ['id'])
op.create_foreign_key(None, 'show', 'station', ['station_id'], ['id'])
op.add_column('station', sa.Column('radioepgpi_enabled', sa.Boolean(), nullable=True))
op.create_index(op.f('ix_station_radioepgpi_enabled'), 'station', ['radioepgpi_enabled'], unique=False)
op.create_foreign_key(None, 'station', 'service_provider', ['service_provider_id'], ['id'])
op.create_foreign_key('fk_epg_default_logo_id', 'station', 'logo_image', ['default_logo_image_id'], ['id'])
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_constraint('fk_epg_default_logo_id', 'station', type_='foreignkey')
op.drop_constraint(None, 'station', type_='foreignkey')
op.drop_index(op.f('ix_station_radioepgpi_enabled'), table_name='station')
op.drop_column('station', 'radioepgpi_enabled')
op.drop_constraint(None, 'show', type_='foreignkey')
op.drop_constraint('fk_default_logo_id', 'service_provider', type_='foreignkey')
op.drop_index(op.f('ix_log_entry_topic'), table_name='log_entry')
op.drop_index(op.f('ix_log_entry_reception_timestamp'), table_name='log_entry')
op.drop_constraint(None, 'channel', type_='foreignkey')
op.drop_constraint(None, 'channel', type_='foreignkey')
### end Alembic commands ###
| {
"repo_name": "ebu/radiodns-plugit",
"path": "RadioDns-PlugIt/alembic/versions/10249d62fb55_added_pi_enable_option.py",
"copies": "1",
"size": "2139",
"license": "bsd-3-clause",
"hash": -587128645813764000,
"line_mean": 47.6136363636,
"line_max": 116,
"alpha_frac": 0.6760168303,
"autogenerated": false,
"ratio": 3.168888888888889,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4344905719188889,
"avg_score": null,
"num_lines": null
} |
"""Added pkeys to m2m rel
Revision ID: 18da1d3c685a
Revises: 2637e08f3c15
Create Date: 2012-03-15 22:47:08.646132
"""
# downgrade revision identifier, used by Alembic.
revision = '18da1d3c685a'
down_revision = '2637e08f3c15'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.drop_table(u'node_infos__keywords')
op.drop_table(u'keywords')
op.alter_column('node_infos_files__files', u'files_id',
existing_type=sa.INTEGER(),
nullable=False)
op.alter_column('node_infos_files__files', u'node_infos_id',
existing_type=sa.INTEGER(),
nullable=False)
op.alter_column('node_infos_images__files', u'files_id',
existing_type=sa.INTEGER(),
nullable=False)
op.alter_column('node_infos_images__files', u'node_infos_id',
existing_type=sa.INTEGER(),
nullable=False)
op.alter_column('node_infos_links__node_infos', u'links_id',
existing_type=sa.INTEGER(),
nullable=False)
op.alter_column('node_infos_links__node_infos', u'inverse_id',
existing_type=sa.INTEGER(),
nullable=False)
op.alter_column('nodes_banners__files', u'nodes_id',
existing_type=sa.INTEGER(),
nullable=False)
op.alter_column('nodes_banners__files', u'files_id',
existing_type=sa.INTEGER(),
nullable=False)
connection = op.get_bind()
for table, fields in {
'node_infos_files__files': ('files_id', 'node_infos_id'),
'node_infos_images__files': ('files_id', 'node_infos_id'),
'node_infos_links__node_infos': ('links_id', 'inverse_id'),
'nodes_banners__files': ('nodes_id', 'files_id')}.items():
connection.execute('ALTER TABLE {0} ADD CONSTRAINT {0}_pkey '
'PRIMARY KEY ({1}, {2});'.format(table, fields[0],
fields[1]))
def downgrade():
op.create_table(u'keywords',
sa.Column(u'name', sa.VARCHAR(length=64), nullable=False),
sa.PrimaryKeyConstraint(u'name', name=u'keywords_pkey')
)
op.create_table(u'node_infos__keywords',
sa.Column(u'node_infos_id', sa.INTEGER(), nullable=True),
sa.Column(u'keyword_name', sa.VARCHAR(length=64), nullable=True),
sa.ForeignKeyConstraint(['keyword_name'], [u'keywords.name'],
name=u'node_infos__keywords_keyword_name_fkey'),
sa.ForeignKeyConstraint(['node_infos_id'], [u'node_infos.id'],
name=u'node_infos__keywords_node_infos_id_fkey'),
sa.PrimaryKeyConstraint()
)
op.alter_column('node_infos_files__files', u'files_id',
existing_type=sa.INTEGER(),
nullable=True)
op.alter_column('node_infos_files__files', u'node_infos_id',
existing_type=sa.INTEGER(),
nullable=True)
op.alter_column('node_infos_images__files', u'files_id',
existing_type=sa.INTEGER(),
nullable=True)
op.alter_column('node_infos_images__files', u'node_infos_id',
existing_type=sa.INTEGER(),
nullable=True)
op.alter_column('node_infos_links__node_infos', u'links_id',
existing_type=sa.INTEGER(),
nullable=True)
op.alter_column('node_infos_links__node_infos', u'inverse_id',
existing_type=sa.INTEGER(),
nullable=True)
op.alter_column('nodes_banners__files', u'nodes_id',
existing_type=sa.INTEGER(),
nullable=True)
op.alter_column('nodes_banners__files', u'files_id',
existing_type=sa.INTEGER(),
nullable=True)
| {
"repo_name": "asidev/aybu-core",
"path": "aybu/core/models/migrations/versions/18da1d3c685a_added_pkeys_to_m2m_r.py",
"copies": "1",
"size": "3768",
"license": "apache-2.0",
"hash": 8404451996600571000,
"line_mean": 39.085106383,
"line_max": 77,
"alpha_frac": 0.5774946921,
"autogenerated": false,
"ratio": 3.5681818181818183,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4645676510281818,
"avg_score": null,
"num_lines": null
} |
""" Added --prefilter_percent_id to pick_open_reference_otus.py on August 28, 2014"""
import os
import commands
import sys
import re
import subprocess
import logging
from datetime import datetime
def input_file_help():
print """
Help me please!!
The input file should be tab-delimited file with .txt extension. The first column should be
folder name of the MiSeq run. The second column should be the name of the mapping file of the run along
with its .txt extension. There should be no trailing white spaces or empty last lines.
Following is how a correct file should be:
140401_M01869_0071_000000000-A7YEF mapping_file_run1.txt
140407_M01869_0073_000000000-A7WVG mapping_file_run2.txt
"""
def input_check(infile):
""" Checks if input file name is entered correctly """
if infile == "":
print "Error: File name not provided!"
mapfile = raw_input("1) Please provide the full name of the input-file (Type help for input-file format): ")
return input_check(mapfile)
elif infile.lower() == "help":
input_file_help()
mapfile = raw_input("1) Please provide the full name of the input-file (Type help for input-file format): ")
return input_check(mapfile)
else:
working_folder = commands.getstatusoutput('pwd')[1]
filelist = os.listdir(working_folder)
if infile not in filelist:
print "Error: File doesn't exist!"
mapfile = raw_input("1) Please provide the full name of the input-file (Type help for input-file format): ")
return input_check(mapfile)
else:
maplist = []
infl = open(infile, 'rU')
for line in infl:
spline = line.strip().split("\t")
if len(spline) != 2:
print "Error: File is not in proper format. There's missing data, no tab-seperation and/or extra empty line(s)."
mapfile = raw_input("1) Please provide the full name of the input-file (Type help for input-file format): ")
return input_check(mapfile)
else:
maplist.append(spline[1])
return maplist, infile # Returns list of mapping files along with name of input file
def mapping_check(maplist):
"""Checks if mapping file name is correct and runs validate_mapping_file.py script """
for mapfile in maplist:
with open(os.devnull, "w") as fnull:
result = subprocess.call(["ls", mapfile], stdout = fnull, stderr = fnull)
if result != 0: # Return code is 0 is ls command is successful
print "Error: One or more of your mapfiles is not present in your current working directory"
mapfile2 = raw_input("1) Please provide the full name of the input-file (Type help for input-file format): ")
inp_check = input_check(mapfile2)
return mapping_check(inp_check[0])
for mapfile in maplist:
filename = mapfile.strip().split(".txt")[0]
os.system("validate_mapping_file.py -m %s -o corrected_%s" % (mapfile.strip(),filename))
os.system("mv $PWD/corrected_%s/%s_corrected.txt ." % (filename,filename))
corrected_files = [mapfile.strip().split(".txt")[0]+"_corrected.txt" for mapfile in maplist]
return corrected_files
def check_value(expression,question,arg):
""" Function to check if input parameters are correct """
try:
if arg == "integer":
return str(int(expression))
if arg == "float":
return str(float(expression))
except ValueError:
if expression == "":
return expression
else:
print "Invalid value. Please enter a number or just hit enter for default value."
checker = raw_input(question)
return check_value(checker,question,arg)
def log_output(statement):
""" Logs and prints output messages """
logging.basicConfig(filename='logging_module_output.txt',level=logging.INFO)
logging.info(statement)
print statement
def log_parse(outfile,inputfile):
output = open(outfile, "w")
infile = open(inputfile, 'rU')
for line in infile:
if line.startswith("INFO:root:"):
linename = line.strip().split("INFO:root:")
if linename[1] != '':
output.write(linename[1]+"\n")
else:
output.write(line.strip()+"\n")
output.close()
return output
def order_index(flashread,indexfile):
""" Shoko's script as a function """
headerData = open(flashread,"rU").read().strip()
headers = headerData.split("\n")
IndexData = open(indexfile,"rU")
IndexSeqs = {}
while IndexData:
headerline=IndexData.readline().split("\n")[0]
Contents = ''
if headerline == '':
break
for i in range(0,3):
Contents += IndexData.readline().split("\n")[0] + "\n"
IndexSeqs[headerline]=Contents
outdata=''
for j in headers:
outdata += j + "\n" + IndexSeqs[j]
of = open("Index_filtered_ordered.fastq","w")
of.write(outdata)
IndexData.close()
of.close()
return of
def preprocess_steps(seq_data,m_min,read_len,runmap_file):
""" Unzipping, flashing, pre-processing """
if m_min == "":
m_min += "225"
if read_len == "":
read_len += "251"
if seq_data == "":
seq_data += "/data/MiSeq_16S_data/MiSeqAnalysis"
log_output("\nRead length for flash: %s" % read_len)
log_output("Min. overlap for flash: %s" % m_min)
folders = []
infile = open(runmap_file, 'rU')
for line in infile:
spline = line.strip().split("\t")
folders.append(spline[0])
for seqs_id in folders:
working_folder = commands.getstatusoutput('pwd')[1]
seq_path = "%s/%s/Data/Intensities/BaseCalls/" % (seq_data,seqs_id)
os.chdir(seq_path)
log_output("\n#Step 1: Gunzipping sequence reads files in MiSeqAnalysis folder...")
os.system("gunzip Undetermined_*")
log_output("Gunzipping complete!")
log_output("\n#Step 2: Assembling R1 and R2 using flash...")
os.system("flash -r %s -f 300 -s 30 -m %s -d $PWD/Output_folder_%s/ -q Undetermined_S0_L001_R1_001.fastq Undetermined_S0_L001_R2_001.fastq" % (read_len, m_min,seqs_id))
log_output("flash complete!")
os.system("mv -f Output_folder_%s/ %s" % (seqs_id,working_folder))
os.chdir(working_folder)
log_output("\n#Step 3: Removing barcode reads from index file that are not in assembled file...")
os.system("sed -n '1~4'p $PWD/Output_folder_%s/out.extendedFrags.fastq >FLAShReads.txt" % seqs_id) # Select the headers of all sequences generated. -n flag is for quiet output. '1~4'p means starting from 1, select every 4 lines after it.
log_output("Barcode removal complete!")
log_output("\n#Step 4: Extracting those reads from index file and order them the same as flash reads")
order_index("FLAShReads.txt","%s/Undetermined_S0_L001_I1_001.fastq" % seq_path)
log_output("Extraction complete!")
os.chdir(seq_path)
log_output("\n#Step 5: Gzipping back the sequence files in MiSeqAnalysis folder...")
os.system("gzip Undetermined_S0_L001_*")
os.chdir(working_folder)
os.system("mv Index_filtered_ordered.fastq Index_filtered_ordered_run_%s.fastq" % seqs_id)
log_output("Gzip complete!")
return
def split_library(runmap_file,phred,max_bad_run,min_rl_frac,n_chars,barcode,start_seq):
""" Function to split libraries """
if phred == "":
phred += "30"
if max_bad_run == "":
max_bad_run += "3"
if min_rl_frac == "":
min_rl_frac += "0.75"
if n_chars == "":
n_chars += "0"
if barcode == "":
barcode += "12"
if start_seq == "":
start_seq += "0"
log_output("Phred score: %s" % phred)
log_output("Max number of consecutive low quality base calls allowed before truncating a read: %s" % max_bad_run)
log_output("Min number of consecutive high quality base calls to include a \
read (per single end read) as a fraction of the input read length: %s" % min_rl_frac)
log_output("Max number of N characters allowed in a sequence to retain it: %s" % n_chars)
log_output("The type of barcode used: %s" % barcode)
log_output("The start seq_ids as ascending integers beginning with start_seq_id: %s" % start_seq)
os.system("mkdir fna_files/")
run_map_dict = {}
infile = open(runmap_file, 'rU')
for line in infile:
spline = line.strip().split("\t")
run_map_dict[spline[0]] = spline[1].strip().split(".txt")[0]+"_corrected"+".txt" #Run IDs as keys and mapping filenames as values
for fold_id in run_map_dict:
folder = "Output_folder_"+fold_id
mapfile = run_map_dict[fold_id]
log_output("\n#Step 6: Splitting libraries using 'split_libraries_fastq.py'...")
os.system('split_libraries_fastq.py -i %s/out.extendedFrags.fastq -m %s \
-o split_lib_output_%s/ -q %s -r %s -p %s -n %s\
--rev_comp_barcode -b Index_filtered_ordered_run_%s.fastq \
--barcode_type %s -s %s' % (folder,mapfile,fold_id,phred,max_bad_run,min_rl_frac,n_chars,fold_id,barcode,start_seq))
log_output("split_libraries_fastq.py complete!")
os.system("mv $PWD/split_lib_output_%s/seqs.fna seqs_%s.fna" % (fold_id,fold_id))
os.system("mv seqs_%s.fna fna_files/" % fold_id)
return
def open_otus_till_biom(parallel,ref_db,prefilt_id):
""" Open OTU picking and other steps """
if parallel == "":
parallel += "4"
if ref_db == "":
ref_db += "/data/Greengenes_Database_May_2013/gg_13_5_otus/rep_set/97_otus.fasta"
if prefilt_id == "":
prefilt_id = "0.6"
os.system("cat fna_files/*.fna > fna_files/seqs_cat.fna")
log_output("\n#Step 7: Picking open-references OTUs using 'pick_open_reference_otus.py'...")
os.system("pick_open_reference_otus.py -i fna_files/seqs_cat.fna -o open_otus_picked/ -aO %s -r %s --prefilter_percent_id %s" % (parallel,ref_db,prefilt_id)) #4:57:19.015901
log_output("OTU picking caused errors, but we'll be able to proceed!")
os.system('cp /opt/qiime_software/core_set_aligned.fasta.imputed $PWD')
os.system('mv core_set_aligned.fasta.imputed core_set_aligned_imputed.fasta')
log_output("\n#Step 8.0: Aligning sequences to template using 'parallel_align_seqs_pynast.py'...")
os.system('parallel_align_seqs_pynast.py -i open_otus_picked/rep_set.fna -o open_otus_picked/pynast_aligned_seqs \
-t $PWD/core_set_aligned_imputed.fasta --jobs_to_start %s' % parallel) #0:10:03.814598
log_output("parallel_align_seqs_pynast.py complete!")
log_output("\n#Step 8.1: Making OTU table by filtering alignment to remove sequences that did not align using 'make_otu_table.py'...")
os.system('make_otu_table.py -i $PWD/open_otus_picked/final_otu_map_mc2.txt -o $PWD/open_otus_picked/otu_table_mc2_no_pynast_failures_w_tax.biom \
-e $PWD/open_otus_picked/pynast_aligned_seqs/rep_set_failures.fasta -t $PWD/open_otus_picked/uclust_assigned_taxonomy/rep_set_tax_assignments.txt') #0:00:17.171876
log_output("make_otu_table.py complete!")
log_output("\n#Step 8.2: Identifying chimeric sequences using 'parallel_identify_chimeric_seqs.py'...")
os.system('parallel_identify_chimeric_seqs.py -i $PWD/open_otus_picked/pynast_aligned_seqs/rep_set_aligned.fasta -a $PWD/core_set_aligned_imputed.fasta \
-m ChimeraSlayer -o $PWD/chimeraslayer_chimeric_seqs.txt -O %s' % parallel) # 5:23:27.544627 # -m flag does not have uchiime
log_output("parallel_identify_chimeric_seqs.py complete!")
log_output("\n#Step 9: Filtering chimeric sequences out of the alignment file using 'filter_fasta.py'...")
os.system('filter_fasta.py -f $PWD/open_otus_picked/pynast_aligned_seqs/rep_set_aligned.fasta -o $PWD/non_chimeric_rep_set_aligned.fasta \
-s $PWD/chimeraslayer_chimeric_seqs.txt -n') #0:00:01.763882
log_output("filter_fasta.py complete!")
log_output("\n#Step 10: Filtering non_chimeric_rep_set_aligned.fasta to remove gaps using 'filter_alignment.py'...")
os.system('filter_alignment.py -i $PWD/non_chimeric_rep_set_aligned.fasta -m /opt/qiime_software/lanemask_in_1s_and_0s \
-o $PWD/non_chimeric_pynast_filtered/') #0:00:04.075475
log_output("filter_alignment.py complete!")
log_output("\n#Step 11: Building new phylogenetic tree using 'make_phylogeny.py'...")
os.system('make_phylogeny.py -i $PWD/non_chimeric_pynast_filtered/non_chimeric_rep_set_aligned_pfiltered.fasta \
-o non_chimeric_rep_set_aligned_pfiltered.tre') #0:07:56.575952
log_output("make_phylogeny.py complete!")
log_output("\n#Step 12: Filtering chimeric OTUs from the OTU table using 'filter_otus_from_otu_table.py'...")
os.system('filter_otus_from_otu_table.py -i $PWD/open_otus_picked/otu_table_mc2_no_pynast_failures_w_tax.biom \
-o otu_table_mc2_no_pynast_failures_no_chimeras_w_tax.biom -e chimeraslayer_chimeric_seqs.txt') #0:00:15.945285
log_output("filter_otus_from_otu_table.py complete!")
log_output("\n#Step 13: Writing biom table summary using 'biom summarize-table'...")
os.system('biom summarize-table -i otu_table_mc2_no_pynast_failures_no_chimeras_w_tax.biom \
-o otu_table_mc2_no_pynast_failures_no_chimeras_lowfilter_w_tax_biom_summary_mc2.txt') #0:00:01.602008
log_output("biom summarize-table complete!")
return
def summary_view(viewtable):
""" Function to show biom summary table """
if viewtable.lower() == 'yes':
os.system('less otu_table_mc2_no_pynast_failures_no_chimeras_lowfilter_w_tax_biom_summary_mc2.txt')
elif viewtable.lower() == 'no':
print "No is not an option!"
table = raw_input("The summary table of the final OTU table is ready. Type 'yes' to view it. \
Once viewed, you can quit by simply typing q. Are you ready? ")
return summary_view(table)
else:
print "I don't understand."
table = raw_input("The summary table of the final OTU table is ready. Type 'yes' to view it. \
Once viewed, you can quit by simply typing q. Are you ready? ")
return summary_view(table)
def rarefaction_check(depth):
""" Check value of rarefaction depth """
try:
return str(int(float(depth)))
except ValueError:
if depth == "":
print "No number of sequences provided to subsample for rarefaction."
dep = raw_input("1) What is the number of sequences to subsample per sample [-d flag]? (No default): ")
return rarefaction_check(dep)
else:
print "Non-integer value given for number of sequences to subsample for rarefaction."
dep = raw_input("1) What is the number of sequences to subsample per sample [-d flag]? (No default): ")
return rarefaction_check(dep)
def summary_plots(depth,merge_metadata):
""" Create alpha, beta and taxa summary plots """
log_output("\n#Step 14: Performing single rarefaction on OTU table using 'single_rarefaction.py'...")
os.system('single_rarefaction.py -i otu_table_mc2_no_pynast_failures_no_chimeras_w_tax.biom -o single_rarefied_otu_table.biom -d %s' % depth)
log_output("single_rarefaction.py complete!")
log_output("\n#Step 15: Summarizing and plotting taxa using 'summarize_taxa_through_plots.py'...")
os.system('summarize_taxa_through_plots.py -o taxa_summary -i single_rarefied_otu_table.biom -m %s' % merge_metadata)
log_output("summarize_taxa_through_plots.py complete!")
log_output("\n#Step 16: Calculating alpha-diversity using 'alpha_rarefaction.py'...")
os.system('alpha_rarefaction.py -i single_rarefied_otu_table.biom -o alpha_rarefaction/ -t non_chimeric_rep_set_aligned_pfiltered.tre \
-m %s --retain_intermediate_files' % merge_metadata)
log_output("alpha_rarefaction.py complete!")
log_output("\n#Step 17: Calculating beta-diversity using 'beta_diversity_through_plots.py'...")
os.system('beta_diversity_through_plots.py -i single_rarefied_otu_table.biom -o beta_diversity/ -t non_chimeric_rep_set_aligned_pfiltered.tre \
-m %s' % merge_metadata)
log_output("beta_diversity_through_plots.py complete!")
return
if __name__ == "__main__":
print "\n\t\t\t\033[1mWelcome to the Microbiome Analysis through Workflow of QIIME, MAWQ program (pronounced 'mock') brought to you by the Lynch Lab!\033[0m"
print "\tTo run the script with default parameters, just press enter to each question without entering a value. To \
exit the pipeline at any point in time, press Ctrl+C\n\n"
try:
inputfile = raw_input("1) Please provide the full name of the input-file (Type help for input-file format): ")
checked = input_check(inputfile)
inputfile = checked[1]
mapping_check(checked[0])
seq_data = raw_input("1) What's the path to the MiSeq run folder? (Default: /data/MiSeq_16S_data/MiSeqAnalysis) ")
print "\nThe following questions are for flash program: \n"
flash_q1 = "1) What's the minimum overlap length between reads [-m flag]? (Default: 225, if length of Read 2 > 250) "
m_min = check_value(raw_input(flash_q1),flash_q1,"integer")
flash_q2 = "2) What's the read length [-r flag]? (Default: 251) "
read_len = check_value(raw_input(flash_q2),flash_q2,"integer")
print "\nThe following questions are for split_libraries_fastq.py script: \n"
split_q1 = "1) What's the maximum unacceptable Phred quality score [-q flag]? (Default: 30) "
phred = check_value(raw_input(split_q1),split_q1,"integer")
split_q2 = "2) What's the max number of consecutive low quality base calls allowed before truncating a read [-r flag]? (Default: 3) "
max_bad_run = check_value(raw_input(split_q2),split_q2,"integer")
split_q3 = "3) What's the min number of consecutive high quality base calls to include a read (per single end read) as a fraction \
of the input read length [-p flag]? (Default: 0.75) "
min_rl_frac = check_value(raw_input(split_q3),split_q3,"float")
split_q4 = "4) What's the max number of N characters allowed in a sequence to retain it [-n flag]? (Default: 0) "
n_chars = check_value(raw_input(split_q4),split_q4,"integer")
split_q5 = "5) What's the type of barcode used [--barcode_type flag]? (Default: 12) "
barcode = check_value(raw_input(split_q5),split_q5,"integer")
split_q6 = "6) What's the start seq_ids as ascending integers beginning with start_seq_id [-s flag]? (Default: 0) "
start_seq = check_value(raw_input(split_q6),split_q6,"integer")
print "\nThe following questions are for pick_open_reference_otus.py script: \n"
otupick_q1 = "1) How many jobs do you wish to run in parallel? (Default: 4) "
parallel = check_value(raw_input(otupick_q1),otupick_q1,"integer")
ref_db = raw_input("2) What's the full path to the reference database? \
(Default: /data/Greengenes_Database_May_2013/gg_13_5_otus/rep_set/97_otus.fasta) ")
prefilt_id = raw_input("2) What's the prefilter_percent_id for sequences to cluster [pass 0.0 to disable]? \
(Default: 0.6) ")
startTime = datetime.now()
preprocess_steps(seq_data,m_min,read_len,inputfile)
split_library(inputfile,phred,max_bad_run,min_rl_frac,n_chars,barcode,start_seq) #0:29:38.925890
open_otus_till_biom(parallel,ref_db,prefilt_id)
viewtable = raw_input("The summary table of the final OTU table is ready. Type 'yes' to view it. \
Once viewed, you can quit by simply typing 'q'. Are you ready? ")
summary_view(viewtable)
print "\nThe following question is for 'single_rarefaction.py' script: \n"
depth = raw_input("1) What is the number of sequences to subsample per sample [-d flag]? (No default): ")
depth = rarefaction_check(depth)
print "\nThe following question is for 'summarize_taxa_through_plots.py', 'alpha_rarefaction.py', \
and 'beta_diversity_through_plots.py' script: \n"
merge_metadata = raw_input("1) What is the name of the final mapping data file for all runs [-m flag]? (No default): ")
merge_metadata_checked = mapping_check([merge_metadata])
summary_plots(depth,merge_metadata_checked[0])
log_parse("wrapper_log_file.txt","logging_module_output.txt")
os.system('rm logging_module_output.txt')
print "\n"+"Task Completed! Time it took to complete the task: "+ str(datetime.now()-startTime) #11:42:32.735675
except KeyboardInterrupt:
print "\n\nThanks for using (or attempting to use) the pipeline. Good-bye!\n" | {
"repo_name": "alifar76/MAWQ",
"path": "src/mawq_miseq_localhost.py",
"copies": "1",
"size": "19242",
"license": "mit",
"hash": 1693002575287560000,
"line_mean": 48.5953608247,
"line_max": 240,
"alpha_frac": 0.7088140526,
"autogenerated": false,
"ratio": 2.870654930628077,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4079468983228077,
"avg_score": null,
"num_lines": null
} |
"""Added query_index to ExperimentThing
Revision ID: e566e8e209d1
Revises: 879b6b345bd4
Create Date: 2017-07-29 19:57:15.738850
"""
# revision identifiers, used by Alembic.
revision = 'e566e8e209d1'
down_revision = '879b6b345bd4'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_development():
### commands auto generated by Alembic - please adjust! ###
op.add_column('experiment_things', sa.Column('query_index', sa.String(length=256), nullable=True))
op.create_index(op.f('ix_experiment_things_object_created'), 'experiment_things', ['object_created'], unique=False)
op.create_index(op.f('ix_experiment_things_query_index'), 'experiment_things', ['query_index'], unique=False)
### end Alembic commands ###
def downgrade_development():
### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_experiment_things_query_index'), table_name='experiment_things')
op.drop_index(op.f('ix_experiment_things_object_created'), table_name='experiment_things')
op.drop_column('experiment_things', 'query_index')
### end Alembic commands ###
def upgrade_test():
### commands auto generated by Alembic - please adjust! ###
op.add_column('experiment_things', sa.Column('query_index', sa.String(length=256), nullable=True))
op.create_index(op.f('ix_experiment_things_object_created'), 'experiment_things', ['object_created'], unique=False)
op.create_index(op.f('ix_experiment_things_query_index'), 'experiment_things', ['query_index'], unique=False)
### end Alembic commands ###
def downgrade_test():
### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_experiment_things_query_index'), table_name='experiment_things')
op.drop_index(op.f('ix_experiment_things_object_created'), table_name='experiment_things')
op.drop_column('experiment_things', 'query_index')
### end Alembic commands ###
def upgrade_production():
### commands auto generated by Alembic - please adjust! ###
op.add_column('experiment_things', sa.Column('query_index', sa.String(length=256), nullable=True))
op.create_index(op.f('ix_experiment_things_object_created'), 'experiment_things', ['object_created'], unique=False)
op.create_index(op.f('ix_experiment_things_query_index'), 'experiment_things', ['query_index'], unique=False)
### end Alembic commands ###
def downgrade_production():
### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_experiment_things_query_index'), table_name='experiment_things')
op.drop_index(op.f('ix_experiment_things_object_created'), table_name='experiment_things')
op.drop_column('experiment_things', 'query_index')
### end Alembic commands ###
| {
"repo_name": "c4fcm/CivilServant",
"path": "alembic/versions/e566e8e209d1_added_query_index_to_experimentthing.py",
"copies": "1",
"size": "2955",
"license": "mit",
"hash": -1962591241540579800,
"line_mean": 37.8815789474,
"line_max": 119,
"alpha_frac": 0.6998307953,
"autogenerated": false,
"ratio": 3.484669811320755,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.46845006066207545,
"avg_score": null,
"num_lines": null
} |
"""Added reactions
Revision ID: 28182dc2d089
Revises: None
Create Date: 2014-01-03 10:06:20.172703
"""
# revision identifiers, used by Alembic.
revision = '28182dc2d089'
down_revision = '4e953e8c6a7e'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.exc import OperationalError
def upgrade():
### commands auto generated by Alembic - please adjust! ###
try:
op.create_table('reactions',
sa.Column('reactid', sa.Integer(), nullable=False),
sa.Column('reactant', sa.Integer(), nullable=True),
sa.Column('product', sa.Integer(), nullable=True),
sa.Column('name', sa.Unicode(), nullable=True),
sa.ForeignKeyConstraint(['product'], ['metabolites.metid'], ),
sa.ForeignKeyConstraint(['reactant'], ['metabolites.metid'], ),
sa.PrimaryKeyConstraint('reactid')
)
except OperationalError as e:
print e
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('reactions')
### end Alembic commands ###
| {
"repo_name": "NLeSC/MAGMa",
"path": "web/alembic/versions/28182dc2d089_added_reactions.py",
"copies": "1",
"size": "1083",
"license": "apache-2.0",
"hash": -4848831810070612000,
"line_mean": 28.2702702703,
"line_max": 71,
"alpha_frac": 0.6574330563,
"autogenerated": false,
"ratio": 3.5860927152317883,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.47435257715317886,
"avg_score": null,
"num_lines": null
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.