response
stringlengths
1
33.1k
instruction
stringlengths
22
582k
Normalize a provided label to be of valid length and characters. Valid label values must be 63 characters or less and must be empty or begin and end with an alphanumeric character ([a-z0-9A-Z]) with dashes (-), underscores (_), dots (.), and alphanumerics between. If the label value is greater than 63 chars once made...
def make_safe_label_value(string: str) -> str: """ Normalize a provided label to be of valid length and characters. Valid label values must be 63 characters or less and must be empty or begin and end with an alphanumeric character ([a-z0-9A-Z]) with dashes (-), underscores (_), dots (.), and alphan...
Transform a datetime string to use as a label. Kubernetes doesn't like ":" in labels, since ISO datetime format uses ":" but not "_" let's replace ":" with "_" :param datetime_obj: datetime.datetime object :return: ISO-like string representing the datetime
def datetime_to_label_safe_datestring(datetime_obj: datetime.datetime) -> str: """ Transform a datetime string to use as a label. Kubernetes doesn't like ":" in labels, since ISO datetime format uses ":" but not "_" let's replace ":" with "_" :param datetime_obj: datetime.datetime object :...
Transform a label back to a datetime object. Kubernetes doesn't permit ":" in labels. ISO datetime format uses ":" but not "_", let's replace ":" with "_" :param string: str :return: datetime.datetime object
def label_safe_datestring_to_datetime(string: str) -> datetime.datetime: """ Transform a label back to a datetime object. Kubernetes doesn't permit ":" in labels. ISO datetime format uses ":" but not "_", let's replace ":" with "_" :param string: str :return: datetime.datetime object "...
Merge objects. :param base_obj: has the base attributes which are overwritten if they exist in the client_obj and remain if they do not exist in the client_obj :param client_obj: the object that the client wants to create. :return: the merged objects
def merge_objects(base_obj, client_obj): """ Merge objects. :param base_obj: has the base attributes which are overwritten if they exist in the client_obj and remain if they do not exist in the client_obj :param client_obj: the object that the client wants to create. :return: the merged obj...
Add field values to existing objects. :param base_obj: an object which has a property `field_name` that is a list :param client_obj: an object which has a property `field_name` that is a list. A copy of this object is returned with `field_name` modified :param field_name: the name of the list field :return: the cl...
def extend_object_field(base_obj, client_obj, field_name): """ Add field values to existing objects. :param base_obj: an object which has a property `field_name` that is a list :param client_obj: an object which has a property `field_name` that is a list. A copy of this object is returned with ...
Normalize a provided label to be of valid length and characters. Valid label values must be 63 characters or less and must be empty or begin and end with an alphanumeric character ([a-z0-9A-Z]) with dashes (-), underscores (_), dots (.), and alphanumerics between. If the label value is greater than 63 chars once made...
def make_safe_label_value(string): """ Normalize a provided label to be of valid length and characters. Valid label values must be 63 characters or less and must be empty or begin and end with an alphanumeric character ([a-z0-9A-Z]) with dashes (-), underscores (_), dots (.), and alphanumerics betw...
Get default value when None.
def default_if_none(arg: bool | None) -> bool: """Get default value when None.""" return arg or False
Get the lineage backend if defined in the configs.
def get_backend() -> LineageBackend | None: """Get the lineage backend if defined in the configs.""" clazz = conf.getimport("lineage", "backend", fallback=None) if clazz: if not issubclass(clazz, LineageBackend): raise TypeError( f"Your custom Lineage class `{clazz.__na...
Conditionally send lineage to the backend. Saves the lineage to XCom and if configured to do so sends it to the backend.
def apply_lineage(func: T) -> T: """ Conditionally send lineage to the backend. Saves the lineage to XCom and if configured to do so sends it to the backend. """ _backend = get_backend() @wraps(func) def wrapper(self, context, *args, **kwargs): self.log.debug("Lineage called wi...
Prepare the lineage inlets and outlets. Inlets can be: * "auto" -> picks up any outlets from direct upstream tasks that have outlets defined, as such that if A -> B -> C and B does not have outlets but A does, these are provided as inlets. * "list of task_ids" -> picks up outlets from the upstream task_ids * "list ...
def prepare_lineage(func: T) -> T: """ Prepare the lineage inlets and outlets. Inlets can be: * "auto" -> picks up any outlets from direct upstream tasks that have outlets defined, as such that if A -> B -> C and B does not have outlets but A does, these are provided as inlets. * "list of ta...
Get singleton listener manager.
def get_listener_manager() -> ListenerManager: """Get singleton listener manager.""" global _listener_manager if not _listener_manager: _listener_manager = ListenerManager() integrate_listener_plugins(_listener_manager) return _listener_manager
Execute when dag run state changes to RUNNING.
def on_dag_run_running(dag_run: DagRun, msg: str): """Execute when dag run state changes to RUNNING."""
Execute when dag run state changes to SUCCESS.
def on_dag_run_success(dag_run: DagRun, msg: str): """Execute when dag run state changes to SUCCESS."""
Execute when dag run state changes to FAIL.
def on_dag_run_failed(dag_run: DagRun, msg: str): """Execute when dag run state changes to FAIL."""
Execute when a new dataset is created.
def on_dataset_created( dataset: Dataset, ): """Execute when a new dataset is created."""
Execute when dataset change is registered.
def on_dataset_changed( dataset: Dataset, ): """Execute when dataset change is registered."""
Execute before Airflow component - jobs like scheduler, worker, or task runner starts. It's guaranteed this will be called before any other plugin method. :param component: Component that calls this method
def on_starting(component): """ Execute before Airflow component - jobs like scheduler, worker, or task runner starts. It's guaranteed this will be called before any other plugin method. :param component: Component that calls this method """
Execute before Airflow component - jobs like scheduler, worker, or task runner stops. It's guaranteed this will be called after any other plugin method. :param component: Component that calls this method
def before_stopping(component): """ Execute before Airflow component - jobs like scheduler, worker, or task runner stops. It's guaranteed this will be called after any other plugin method. :param component: Component that calls this method """
Execute when task state changes to RUNNING. previous_state can be None.
def on_task_instance_running( previous_state: TaskInstanceState | None, task_instance: TaskInstance, session: Session | None ): """Execute when task state changes to RUNNING. previous_state can be None."""
Execute when task state changes to SUCCESS. previous_state can be None.
def on_task_instance_success( previous_state: TaskInstanceState | None, task_instance: TaskInstance, session: Session | None ): """Execute when task state changes to SUCCESS. previous_state can be None."""
Execute when task state changes to FAIL. previous_state can be None.
def on_task_instance_failed( previous_state: TaskInstanceState | None, task_instance: TaskInstance, error: None | str | BaseException, session: Session | None, ): """Execute when task state changes to FAIL. previous_state can be None."""
Add or subtract days from a YYYY-MM-DD. :param ds: anchor date in ``YYYY-MM-DD`` format to add to :param days: number of days to add to the ds, you can use negative values >>> ds_add("2015-01-01", 5) '2015-01-06' >>> ds_add("2015-01-06", -5) '2015-01-01'
def ds_add(ds: str, days: int) -> str: """ Add or subtract days from a YYYY-MM-DD. :param ds: anchor date in ``YYYY-MM-DD`` format to add to :param days: number of days to add to the ds, you can use negative values >>> ds_add("2015-01-01", 5) '2015-01-06' >>> ds_add("2015-01-06", -5) '...
Output datetime string in a given format. :param ds: input string which contains a date :param input_format: input string format. E.g. %Y-%m-%d :param output_format: output string format E.g. %Y-%m-%d >>> ds_format("2015-01-01", "%Y-%m-%d", "%m-%d-%y") '01-01-15' >>> ds_format("1/5/2015", "%m/%d/%Y", "%Y-%m-%d") '20...
def ds_format(ds: str, input_format: str, output_format: str) -> str: """ Output datetime string in a given format. :param ds: input string which contains a date :param input_format: input string format. E.g. %Y-%m-%d :param output_format: output string format E.g. %Y-%m-%d >>> ds_format("201...
Return a human-readable/approximate difference between datetimes. When only one datetime is provided, the comparison will be based on now. :param dt: The datetime to display the diff for :param since: When to display the date from. If ``None`` then the diff is between ``dt`` and now.
def datetime_diff_for_humans(dt: Any, since: DateTime | None = None) -> str: """ Return a human-readable/approximate difference between datetimes. When only one datetime is provided, the comparison will be based on now. :param dt: The datetime to display the diff for :param since: When to display ...
Get DataDog StatsD logger.
def get_dogstatsd_logger(cls) -> SafeDogStatsdLogger: """Get DataDog StatsD logger.""" from datadog import DogStatsd metrics_validator: ListValidator dogstatsd = DogStatsd( host=conf.get("metrics", "statsd_host"), port=conf.getint("metrics", "statsd_port"), namespace=conf.get("...
Assembles the prefix, delimiter, and name and returns it as a string.
def full_name(name: str, *, prefix: str = DEFAULT_METRIC_NAME_PREFIX) -> str: """Assembles the prefix, delimiter, and name and returns it as a string.""" return f"{prefix}{DEFAULT_METRIC_NAME_DELIMITER}{name}"
Return True if the provided name and prefix would result in a name that meets the OpenTelemetry standard. Legal names are defined here: https://opentelemetry.io/docs/reference/specification/metrics/api/#instrument-name-syntax
def name_is_otel_safe(prefix: str, name: str) -> bool: """ Return True if the provided name and prefix would result in a name that meets the OpenTelemetry standard. Legal names are defined here: https://opentelemetry.io/docs/reference/specification/metrics/api/#instrument-name-syntax """ return...
Given an OpenTelemetry Instrument, returns the type of the instrument as a string. :param obj: An OTel Instrument or subclass :returns: The type() of the Instrument without all the nested class info
def _type_as_str(obj: Instrument) -> str: """ Given an OpenTelemetry Instrument, returns the type of the instrument as a string. :param obj: An OTel Instrument or subclass :returns: The type() of the Instrument without all the nested class info """ # type().__name__ will return something like: ...
Verify that the provided name does not exceed OpenTelemetry's maximum length for metric names. :param name: The original metric name :returns: The name, truncated to an OTel-acceptable length if required.
def _get_otel_safe_name(name: str) -> str: """ Verify that the provided name does not exceed OpenTelemetry's maximum length for metric names. :param name: The original metric name :returns: The name, truncated to an OTel-acceptable length if required. """ otel_safe_name = name[:OTEL_NAME_MAX_LE...
Add tags to stat with influxdb standard format if influxdb_tags_enabled is True.
def prepare_stat_with_tags(fn: T) -> T: """Add tags to stat with influxdb standard format if influxdb_tags_enabled is True.""" @wraps(fn) def wrapper( self, stat: str | None = None, *args, tags: dict[str, str] | None = None, **kwargs ) -> Callable[[str], str]: if self.influxdb_tags_enab...
Return logger for StatsD.
def get_statsd_logger(cls) -> SafeStatsdLogger: """Return logger for StatsD.""" # no need to check for the scheduler/statsd_on -> this method is only called when it is set # and previously it would crash with None is callable if it was called without it. from statsd import StatsClient stats_class =...
Check if stat name contains invalid characters; logs and does not emit stats if name is invalid.
def validate_stat(fn: Callable) -> Callable: """Check if stat name contains invalid characters; logs and does not emit stats if name is invalid.""" @wraps(fn) def wrapper(self, stat: str | None = None, *args, **kwargs) -> Callable | None: try: if stat is not None: handle...
Verify that a proposed prefix and name combination will meet OpenTelemetry naming standards. See: https://opentelemetry.io/docs/reference/specification/metrics/api/#instrument-name-syntax :param stat_prefix: The proposed prefix applied to all metric names. :param stat_name: The proposed name. :param max_length: The m...
def stat_name_otel_handler( stat_prefix: str, stat_name: str, max_length: int = OTEL_NAME_MAX_LENGTH, ) -> str: """ Verify that a proposed prefix and name combination will meet OpenTelemetry naming standards. See: https://opentelemetry.io/docs/reference/specification/metrics/api/#instrument-nam...
Validate the metric stat name. Apply changes when necessary and return the transformed stat name.
def stat_name_default_handler( stat_name: str, max_length: int = 250, allowed_chars: Iterable[str] = ALLOWED_CHARACTERS ) -> str: """ Validate the metric stat name. Apply changes when necessary and return the transformed stat name. """ if not isinstance(stat_name, str): raise InvalidSta...
Get Stat Name Handler from airflow.cfg.
def get_current_handler_stat_name_func() -> Callable[[str], str]: """Get Stat Name Handler from airflow.cfg.""" handler = conf.getimport("metrics", "stat_name_handler") if handler is None: if conf.get("metrics", "statsd_influxdb_enabled", fallback=False): handler = partial(stat_name_def...
Filter objects for autogenerating revisions.
def include_object(_, name, type_, *args): """Filter objects for autogenerating revisions.""" # Ignore _anything_ to do with Celery, or FlaskSession's tables if type_ == "table" and (name.startswith("celery_") or name == "session"): return False else: return True
Run migrations in 'offline' mode. This configures the context with just a URL and not an Engine, though an Engine is acceptable here as well. By skipping the Engine creation we don't even need a DBAPI to be available. Calls to context.execute() here emit the given string to the script output.
def run_migrations_offline(): """Run migrations in 'offline' mode. This configures the context with just a URL and not an Engine, though an Engine is acceptable here as well. By skipping the Engine creation we don't even need a DBAPI to be available. Calls to context.execute() here emit the g...
Run migrations in 'online' mode. In this scenario we need to create an Engine and associate a connection with the context.
def run_migrations_online(): """Run migrations in 'online' mode. In this scenario we need to create an Engine and associate a connection with the context. """ with contextlib.ExitStack() as stack: connection = config.attributes.get("connection", None) if not connection: ...
Return the primary and unique constraint along with column name. Some tables like `task_instance` are missing the primary key constraint name and the name is auto-generated by the SQL server, so this function helps to retrieve any primary or unique constraint name. :param conn: sql connection object :param table_name...
def get_mssql_table_constraints(conn, table_name) -> dict[str, dict[str, list[str]]]: """ Return the primary and unique constraint along with column name. Some tables like `task_instance` are missing the primary key constraint name and the name is auto-generated by the SQL server, so this function ...
Create Index.
def upgrade(): """Create Index.""" op.create_index("idx_xcom_dag_task_date", "xcom", ["dag_id", "task_id", "execution_date"], unique=False)
Drop Index.
def downgrade(): """Drop Index.""" op.drop_index("idx_xcom_dag_task_date", table_name="xcom")
Add pid column to task_instance table.
def upgrade(): """Add pid column to task_instance table.""" op.add_column("task_instance", sa.Column("pid", sa.Integer))
Drop pid column from task_instance table.
def downgrade(): """Drop pid column from task_instance table.""" op.drop_column("task_instance", "pid")
Fix broken foreign-key constraint for existing SQLite DBs.
def upgrade(): """Fix broken foreign-key constraint for existing SQLite DBs.""" conn = op.get_bind() if conn.dialect.name == "sqlite": # Fix broken foreign-key constraint for existing SQLite DBs. # # Re-define tables and use copy_from to avoid reflection # which would fail be...
Make TaskInstance.pool field not nullable.
def upgrade(): """Make TaskInstance.pool field not nullable.""" with create_session() as session: session.query(TaskInstance).filter(TaskInstance.pool.is_(None)).update( {TaskInstance.pool: "default_pool"}, synchronize_session=False ) # Avoid select updated rows session.comm...
Make TaskInstance.pool field nullable.
def downgrade(): """Make TaskInstance.pool field nullable.""" conn = op.get_bind() if conn.dialect.name == "mssql": op.drop_index(index_name="ti_pool", table_name="task_instance") # use batch_alter_table to support SQLite workaround with op.batch_alter_table("task_instance") as batch_op: ...
Upgrade version.
def upgrade(): """Upgrade version.""" json_type = sa.JSON conn = op.get_bind() if conn.dialect.name != "postgresql": # Mysql 5.7+/MariaDB 10.2.3 has JSON support. Rather than checking for # versions, check for the function existing. try: conn.execute(text("SELECT JSO...
Downgrade version.
def downgrade(): """Downgrade version.""" op.drop_table("serialized_dag")
Apply Add ``root_dag_id`` to ``DAG``
def upgrade(): """Apply Add ``root_dag_id`` to ``DAG``""" op.add_column("dag", sa.Column("root_dag_id", StringID(), nullable=True)) op.create_index("idx_root_dag_id", "dag", ["root_dag_id"], unique=False)
Unapply Add ``root_dag_id`` to ``DAG``
def downgrade(): """Unapply Add ``root_dag_id`` to ``DAG``""" op.drop_index("idx_root_dag_id", table_name="dag") op.drop_column("dag", "root_dag_id")
Change datetime to datetime2(6) when using MSSQL as backend.
def upgrade(): """Change datetime to datetime2(6) when using MSSQL as backend.""" conn = op.get_bind() if conn.dialect.name == "mssql": result = conn.execute( text( """SELECT CASE WHEN CONVERT(VARCHAR(128), SERVERPROPERTY ('productversion')) like '8%' THEN '20...
Change datetime2(6) back to datetime.
def downgrade(): """Change datetime2(6) back to datetime.""" conn = op.get_bind() if conn.dialect.name == "mssql": result = conn.execute( text( """SELECT CASE WHEN CONVERT(VARCHAR(128), SERVERPROPERTY ('productversion')) like '8%' THEN '2000' WHEN CONVERT(VARC...
Return primary and unique constraint along with column name. This function return primary and unique constraint along with column name. some tables like task_instance is missing primary key constraint name and the name is auto-generated by sql server. so this function helps to retrieve any primary or unique constraint...
def get_table_constraints(conn, table_name) -> dict[tuple[str, str], list[str]]: """Return primary and unique constraint along with column name. This function return primary and unique constraint along with column name. some tables like task_instance is missing primary key constraint name and the name ...
Reorder the columns for creating constraint. Preserve primary key ordering ``['task_id', 'dag_id', 'execution_date']`` :param columns: columns retrieved from DB related to constraint :return: ordered column
def reorder_columns(columns): """Reorder the columns for creating constraint. Preserve primary key ordering ``['task_id', 'dag_id', 'execution_date']`` :param columns: columns retrieved from DB related to constraint :return: ordered column """ ordered_columns = [] for column in ["task_i...
Drop a primary key or unique constraint. :param operator: batch_alter_table for the table :param constraint_dict: a dictionary of ((constraint name, constraint type), column name) of table
def drop_constraint(operator, constraint_dict): """Drop a primary key or unique constraint. :param operator: batch_alter_table for the table :param constraint_dict: a dictionary of ((constraint name, constraint type), column name) of table """ for constraint, columns in constraint_dict.items(): ...
Create a primary key or unique constraint. :param operator: batch_alter_table for the table :param constraint_dict: a dictionary of ((constraint name, constraint type), column name) of table
def create_constraint(operator, constraint_dict): """Create a primary key or unique constraint. :param operator: batch_alter_table for the table :param constraint_dict: a dictionary of ((constraint name, constraint type), column name) of table """ for constraint, columns in constraint_dict.items():...
Change type of column execution_date. Helper function changes type of column execution_date by dropping and recreating any primary/unique constraint associated with the column :param conn: sql connection object :param batch_operator: batch_alter_table for the table :param table_name: table name :param type_: DB column...
def modify_execution_date_with_constraint(conn, batch_operator, table_name, type_, nullable) -> None: """Change type of column execution_date. Helper function changes type of column execution_date by dropping and recreating any primary/unique constraint associated with the column :param conn: sql c...
Increase column size from 50 to 256 characters, caused by broker backends that might use unusually large queue names.
def upgrade(): """ Increase column size from 50 to 256 characters, caused by broker backends that might use unusually large queue names. """ # use batch_alter_table to support SQLite workaround with op.batch_alter_table("task_instance") as batch_op: batch_op.alter_column("queue", type_=s...
Revert column size from 256 to 50 characters, might result in data loss.
def downgrade(): """Revert column size from 256 to 50 characters, might result in data loss.""" # use batch_alter_table to support SQLite workaround with op.batch_alter_table("task_instance") as batch_op: batch_op.alter_column("queue", type_=sa.String(50))
Drop dag_stats table
def upgrade(): """Drop dag_stats table""" op.drop_table("dag_stats")
Create dag_stats table
def downgrade(): """Create dag_stats table""" op.create_table( "dag_stats", sa.Column("dag_id", sa.String(length=250), nullable=False), sa.Column("state", sa.String(length=50), nullable=False), sa.Column("count", sa.Integer(), nullable=False, default=0), sa.Column("dirty"...
Apply Increase length for connection password
def upgrade(): """Apply Increase length for connection password""" with op.batch_alter_table("connection", schema=None) as batch_op: batch_op.alter_column( "password", existing_type=sa.VARCHAR(length=500), type_=sa.String(length=5000), existing_nullable=Tr...
Unapply Increase length for connection password
def downgrade(): """Unapply Increase length for connection password""" with op.batch_alter_table("connection", schema=None) as batch_op: batch_op.alter_column( "password", existing_type=sa.String(length=5000), type_=sa.VARCHAR(length=500), existing_nullabl...
Apply Add ``DagTags`` table
def upgrade(): """Apply Add ``DagTags`` table""" op.create_table( "dag_tag", sa.Column("name", sa.String(length=100), nullable=False), sa.Column("dag_id", StringID(), nullable=False), sa.ForeignKeyConstraint( ["dag_id"], ["dag.dag_id"], ), ...
Unapply Add ``DagTags`` table
def downgrade(): """Unapply Add ``DagTags`` table""" op.drop_table("dag_tag")
Apply Add ``RenderedTaskInstanceFields`` table
def upgrade(): """Apply Add ``RenderedTaskInstanceFields`` table""" json_type = sa.JSON conn = op.get_bind() if conn.dialect.name != "postgresql": # Mysql 5.7+/MariaDB 10.2.3 has JSON support. Rather than checking for # versions, check for the function existing. try: ...
Drop RenderedTaskInstanceFields table
def downgrade(): """Drop RenderedTaskInstanceFields table""" op.drop_table(TABLE_NAME)
Create DagCode Table.
def upgrade(): """Create DagCode Table.""" from sqlalchemy.orm import declarative_base Base = declarative_base() class SerializedDagModel(Base): __tablename__ = "serialized_dag" # There are other columns here, but these are the only ones we need for the SELECT/UPDATE we are doing ...
Unapply add source code table
def downgrade(): """Unapply add source code table""" op.drop_table("dag_code")
Add Precision to ``execution_date`` in ``RenderedTaskInstanceFields`` table for MySQL
def upgrade(): """Add Precision to ``execution_date`` in ``RenderedTaskInstanceFields`` table for MySQL""" conn = op.get_bind() if conn.dialect.name == "mysql": op.alter_column( table_name=TABLE_NAME, column_name=COLUMN_NAME, type_=mysql.TIMESTAMP(fsp=6), nullable=False )
Unapply Add Precision to ``execution_date`` in ``RenderedTaskInstanceFields`` table
def downgrade(): """Unapply Add Precision to ``execution_date`` in ``RenderedTaskInstanceFields`` table""" conn = op.get_bind() if conn.dialect.name == "mysql": op.alter_column( table_name=TABLE_NAME, column_name=COLUMN_NAME, type_=mysql.TIMESTAMP(), nullable=False )
Apply Add ``dag_hash`` Column to ``serialized_dag`` table
def upgrade(): """Apply Add ``dag_hash`` Column to ``serialized_dag`` table""" op.add_column( "serialized_dag", sa.Column("dag_hash", sa.String(32), nullable=False, server_default="Hash not calculated yet"), )
Unapply Add ``dag_hash`` Column to ``serialized_dag`` table
def downgrade(): """Unapply Add ``dag_hash`` Column to ``serialized_dag`` table""" op.drop_column("serialized_dag", "dag_hash")
Create FAB Tables
def upgrade(): """Create FAB Tables""" conn = op.get_bind() inspector = inspect(conn) tables = inspector.get_table_names() if "ab_permission" not in tables: op.create_table( "ab_permission", sa.Column("id", sa.Integer(), nullable=False, primary_key=True), ...
Drop FAB Tables
def downgrade(): """Drop FAB Tables""" conn = op.get_bind() inspector = inspect(conn) tables = inspector.get_table_names() fab_tables = [ "ab_permission", "ab_view_menu", "ab_role", "ab_permission_view", "ab_permission_view_role", "ab_user", "a...
Apply Increase length of ``Flask-AppBuilder`` ``ab_view_menu.name`` column
def upgrade(): """Apply Increase length of ``Flask-AppBuilder`` ``ab_view_menu.name`` column""" conn = op.get_bind() inspector = inspect(conn) tables = inspector.get_table_names() if "ab_view_menu" in tables: if conn.dialect.name == "sqlite": op.execute("PRAGMA foreign_keys=off...
Unapply Increase length of ``Flask-AppBuilder`` ``ab_view_menu.name`` column
def downgrade(): """Unapply Increase length of ``Flask-AppBuilder`` ``ab_view_menu.name`` column""" conn = op.get_bind() inspector = inspect(conn) tables = inspector.get_table_names() if "ab_view_menu" in tables: if conn.dialect.name == "sqlite": op.execute("PRAGMA foreign_keys=...
This function return primary and unique constraint along with column name. Some tables like `task_instance` is missing the primary key constraint name and the name is auto-generated by the SQL server. so this function helps to retrieve any primary or unique constraint name. :param conn: sql connection object :param ta...
def get_table_constraints(conn, table_name) -> dict[tuple[str, str], list[str]]: """ This function return primary and unique constraint along with column name. Some tables like `task_instance` is missing the primary key constraint name and the name is auto-generated by the SQL server. so this functi...
Drop a primary key or unique constraint :param operator: batch_alter_table for the table :param constraint_dict: a dictionary of ((constraint name, constraint type), column name) of table
def drop_column_constraints(operator, column_name, constraint_dict): """ Drop a primary key or unique constraint :param operator: batch_alter_table for the table :param constraint_dict: a dictionary of ((constraint name, constraint type), column name) of table """ for constraint, columns in con...
Create a primary key or unique constraint :param operator: batch_alter_table for the table :param constraint_dict: a dictionary of ((constraint name, constraint type), column name) of table
def create_constraints(operator, column_name, constraint_dict): """ Create a primary key or unique constraint :param operator: batch_alter_table for the table :param constraint_dict: a dictionary of ((constraint name, constraint type), column name) of table """ for constraint, columns in constr...
Apply Remove id column from xcom
def upgrade(): """Apply Remove id column from xcom""" conn = op.get_bind() inspector = inspect(conn) with op.batch_alter_table("xcom") as bop: xcom_columns = [col.get("name") for col in inspector.get_columns("xcom")] if "id" in xcom_columns: if conn.dialect.name == "mssql":...
Unapply Remove id column from xcom
def downgrade(): """Unapply Remove id column from xcom""" conn = op.get_bind() with op.batch_alter_table("xcom") as bop: if conn.dialect.name != "mssql": bop.drop_constraint("pk_xcom", type_="primary") bop.add_column(Column("id", Integer, nullable=False)) bop.create_prima...
Increase column length of pool name from 50 to 256 characters
def upgrade(): """Increase column length of pool name from 50 to 256 characters""" # use batch_alter_table to support SQLite workaround with op.batch_alter_table("slot_pool", table_args=sa.UniqueConstraint("pool")) as batch_op: batch_op.alter_column("pool", type_=sa.String(256, **COLLATION_ARGS))
Revert Increased length of pool name from 256 to 50 characters
def downgrade(): """Revert Increased length of pool name from 256 to 50 characters""" with op.batch_alter_table("slot_pool", table_args=sa.UniqueConstraint("pool")) as batch_op: batch_op.alter_column("pool", type_=sa.String(50))
Apply Add ``run_type`` column in ``dag_run`` table
def upgrade(): """Apply Add ``run_type`` column in ``dag_run`` table""" run_type_col_type = sa.String(length=50) conn = op.get_bind() inspector = inspect(conn) dag_run_columns = [col.get("name") for col in inspector.get_columns("dag_run")] if "run_type" not in dag_run_columns: # Add nu...
Unapply Add ``run_type`` column in ``dag_run`` table
def downgrade(): """Unapply Add ``run_type`` column in ``dag_run`` table""" op.drop_column("dag_run", "run_type")
Apply Set ``conn_type`` as non-nullable
def upgrade(): """Apply Set ``conn_type`` as non-nullable""" Base = declarative_base() class Connection(Base): __tablename__ = "connection" id = sa.Column(sa.Integer(), primary_key=True) conn_id = sa.Column(sa.String(250)) conn_type = sa.Column(sa.String(500)) # Genera...
Unapply Set ``conn_type`` as non-nullable
def downgrade(): """Unapply Set ``conn_type`` as non-nullable""" with op.batch_alter_table("connection", schema=None) as batch_op: batch_op.alter_column("conn_type", existing_type=sa.VARCHAR(length=500), nullable=True)
Apply Add unique constraint to ``conn_id`` and set it as non-nullable
def upgrade(): """Apply Add unique constraint to ``conn_id`` and set it as non-nullable""" try: with op.batch_alter_table("connection") as batch_op: batch_op.alter_column("conn_id", nullable=False, existing_type=sa.String(250, **COLLATION_ARGS)) batch_op.create_unique_constraint(...
Unapply Add unique constraint to ``conn_id`` and set it as non-nullable
def downgrade(): """Unapply Add unique constraint to ``conn_id`` and set it as non-nullable""" with op.batch_alter_table("connection") as batch_op: batch_op.drop_constraint(constraint_name="unique_conn_id", type_="unique") batch_op.alter_column("conn_id", nullable=True, existing_type=sa.String(...
Apply Add queued by Job ID to TI
def upgrade(): """Apply Add queued by Job ID to TI""" with op.batch_alter_table("task_instance") as batch_op: batch_op.add_column(sa.Column("queued_by_job_id", sa.Integer(), nullable=True))
Unapply Add queued by Job ID to TI
def downgrade(): """Unapply Add queued by Job ID to TI""" with op.batch_alter_table("task_instance") as batch_op: batch_op.drop_column("queued_by_job_id")
Apply Add external executor ID to TI
def upgrade(): """Apply Add external executor ID to TI""" with op.batch_alter_table("task_instance", schema=None) as batch_op: batch_op.add_column(sa.Column("external_executor_id", sa.String(length=250), nullable=True))
Unapply Add external executor ID to TI
def downgrade(): """Unapply Add external executor ID to TI""" with op.batch_alter_table("task_instance", schema=None) as batch_op: batch_op.drop_column("external_executor_id")
Apply Drop ``KubeResourceVersion`` and ``KubeWorkerId``entifier tables
def upgrade(): """Apply Drop ``KubeResourceVersion`` and ``KubeWorkerId``entifier tables""" conn = op.get_bind() inspector = inspect(conn) tables = inspector.get_table_names() if WORKER_UUID_TABLE in tables: op.drop_table(WORKER_UUID_TABLE) if WORKER_RESOURCEVERSION_TABLE in tables: ...
Unapply Drop ``KubeResourceVersion`` and ``KubeWorkerId``entifier tables
def downgrade(): """Unapply Drop ``KubeResourceVersion`` and ``KubeWorkerId``entifier tables""" conn = op.get_bind() inspector = inspect(conn) tables = inspector.get_table_names() if WORKER_UUID_TABLE not in tables: _add_worker_uuid_table() if WORKER_RESOURCEVERSION_TABLE not in tables...
Apply Add ``scheduling_decision`` to ``DagRun`` and ``DAG``
def upgrade(): """Apply Add ``scheduling_decision`` to ``DagRun`` and ``DAG``""" conn = op.get_bind() is_sqlite = bool(conn.dialect.name == "sqlite") is_mssql = bool(conn.dialect.name == "mssql") if is_sqlite: op.execute("PRAGMA foreign_keys=off") with op.batch_alter_table("dag_run", s...
Unapply Add ``scheduling_decision`` to ``DagRun`` and ``DAG``
def downgrade(): """Unapply Add ``scheduling_decision`` to ``DagRun`` and ``DAG``""" conn = op.get_bind() is_sqlite = bool(conn.dialect.name == "sqlite") if is_sqlite: op.execute("PRAGMA foreign_keys=off") with op.batch_alter_table("dag_run", schema=None) as batch_op: batch_op.drop...
Recreate RenderedTaskInstanceFields table changing timestamp to datetime2(6) when using MSSQL as backend
def upgrade(): """ Recreate RenderedTaskInstanceFields table changing timestamp to datetime2(6) when using MSSQL as backend """ conn = op.get_bind() if conn.dialect.name == "mssql": json_type = sa.Text op.drop_table(TABLE_NAME) op.create_table( TABLE_NAME, ...
Recreate RenderedTaskInstanceFields table changing datetime2(6) to timestamp when using MSSQL as backend
def downgrade(): """ Recreate RenderedTaskInstanceFields table changing datetime2(6) to timestamp when using MSSQL as backend """ conn = op.get_bind() if conn.dialect.name == "mssql": json_type = sa.Text op.drop_table(TABLE_NAME) op.create_table( TABLE_NAME, ...