sample_id stringlengths 21 196 | text stringlengths 105 936k | metadata dict | category stringclasses 6
values |
|---|---|---|---|
apache/airflow:task-sdk/src/airflow/sdk/definitions/timetables/assets.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING
import attrs
from airflow.sdk.bases.timetable import BaseTimetable
from airflow.sdk.definitions.asset import AssetAll, BaseAsset
from airflow.sdk.definitions.partition_mappers.identity import IdentityMapper
if TYPE_CHECKING:
from collections.abc import Collection
from airflow.sdk import Asset
from airflow.sdk.definitions.partition_mappers.base import PartitionMapper
@attrs.define
class AssetTriggeredTimetable(BaseTimetable):
"""
Timetable that never schedules anything.
This should not be directly used anywhere, but only set if a DAG is triggered by assets.
:meta private:
"""
asset_condition: BaseAsset = attrs.field(alias="assets")
@attrs.define
class PartitionedAssetTimetable(AssetTriggeredTimetable):
"""Asset-driven timetable that listens for partitioned assets."""
asset_condition: BaseAsset = attrs.field(alias="assets")
partition_mapper_config: dict[BaseAsset, PartitionMapper] = attrs.field(factory=dict)
default_partition_mapper: PartitionMapper = IdentityMapper()
def _coerce_assets(o: Collection[Asset] | BaseAsset) -> BaseAsset:
if isinstance(o, BaseAsset):
return o
return AssetAll(*o)
@attrs.define(kw_only=True)
class AssetOrTimeSchedule(AssetTriggeredTimetable):
"""
Combine time-based scheduling with event-based scheduling.
:param assets: An asset of list of assets, in the same format as
``DAG(schedule=...)`` when using event-driven scheduling. This is used
to evaluate event-based scheduling.
:param timetable: A timetable instance to evaluate time-based scheduling.
"""
asset_condition: BaseAsset = attrs.field(alias="assets", converter=_coerce_assets)
timetable: BaseTimetable
def __attrs_post_init__(self) -> None:
self.active_runs_limit = self.timetable.active_runs_limit
self.can_be_scheduled = self.timetable.can_be_scheduled
| {
"repo_id": "apache/airflow",
"file_path": "task-sdk/src/airflow/sdk/definitions/timetables/assets.py",
"license": "Apache License 2.0",
"lines": 58,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:task-sdk/src/airflow/sdk/definitions/timetables/events.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING
import attrs
from airflow.sdk.bases.timetable import BaseTimetable
if TYPE_CHECKING:
from collections.abc import Iterable
from pendulum import DateTime
@attrs.define(init=False)
class EventsTimetable(BaseTimetable):
"""
Timetable that schedules DAG runs at specific listed datetimes.
Suitable for predictable but truly irregular scheduling, such as sporting
events, or to schedule against National Holidays.
:param event_dates: List of datetimes for the DAG to run at. Duplicates
will be ignored. This must be finite and of reasonable size, as it will
be loaded in its entirety.
:param restrict_to_events: Whether manual runs should use the most recent
event or the current time
:param presorted: if True, event_dates will be assumed to be in ascending
order. Provides modest performance improvement for larger lists of
*event_dates*.
:param description: A name for the timetable to display in the UI. If not
provided explicitly (or *None*) the UI will show "X Events" where X is
the length of *event_dates*.
"""
event_dates: list[DateTime]
restrict_to_events: bool
description: str | None
def __init__(
self,
event_dates: Iterable[DateTime],
*,
restrict_to_events: bool = False,
presorted: bool = False,
description: str | None = None,
) -> None:
self.__attrs_init__( # type: ignore[attr-defined]
sorted(event_dates) if presorted else list(event_dates),
restrict_to_events=restrict_to_events,
description=description,
)
| {
"repo_id": "apache/airflow",
"file_path": "task-sdk/src/airflow/sdk/definitions/timetables/events.py",
"license": "Apache License 2.0",
"lines": 57,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:task-sdk/src/airflow/sdk/definitions/timetables/interval.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import datetime
from dateutil.relativedelta import relativedelta
from airflow.sdk.bases.timetable import BaseTimetable
from airflow.sdk.definitions.timetables._cron import CronMixin
from airflow.sdk.definitions.timetables._delta import DeltaMixin
Delta = datetime.timedelta | relativedelta
class CronDataIntervalTimetable(CronMixin, BaseTimetable):
"""
Timetable that schedules data intervals with a cron expression.
This corresponds to ``schedule=<cron>``, where ``<cron>`` is either
a five/six-segment representation, or one of ``cron_presets``.
The implementation extends on croniter to add timezone awareness. This is
because croniter works only with naive timestamps, and cannot consider DST
when determining the next/previous time.
Using this class is equivalent to supplying a cron expression dire
Don't pass ``@once`` in here; use ``OnceTimetable`` instead.
"""
class DeltaDataIntervalTimetable(DeltaMixin, BaseTimetable):
"""
Timetable that schedules data intervals with a time delta.
This corresponds to ``schedule=<delta>``, where ``<delta>`` is
either a ``datetime.timedelta`` or ``dateutil.relativedelta.relativedelta``
instance.
"""
| {
"repo_id": "apache/airflow",
"file_path": "task-sdk/src/airflow/sdk/definitions/timetables/interval.py",
"license": "Apache License 2.0",
"lines": 41,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:task-sdk/src/airflow/sdk/definitions/timetables/simple.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from airflow.sdk.bases.timetable import BaseTimetable
class NullTimetable(BaseTimetable):
"""
Timetable that never schedules anything.
This corresponds to ``schedule=None``.
"""
can_be_scheduled = False
class OnceTimetable(BaseTimetable):
"""
Timetable that schedules the execution once as soon as possible.
This corresponds to ``schedule="@once"``.
"""
class ContinuousTimetable(BaseTimetable):
"""
Timetable that schedules continually, while still respecting start_date and end_date.
This corresponds to ``schedule="@continuous"``.
"""
active_runs_limit = 1
| {
"repo_id": "apache/airflow",
"file_path": "task-sdk/src/airflow/sdk/definitions/timetables/simple.py",
"license": "Apache License 2.0",
"lines": 35,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:task-sdk/src/airflow/sdk/definitions/timetables/trigger.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import datetime
from types import NoneType
from typing import TYPE_CHECKING
import attrs
from airflow.sdk.bases.timetable import BaseTimetable
from airflow.sdk.definitions.timetables._cron import CronMixin
from airflow.sdk.definitions.timetables._delta import DeltaMixin
if TYPE_CHECKING:
from dateutil.relativedelta import relativedelta
from pendulum.tz.timezone import FixedTimezone, Timezone
@attrs.define
class DeltaTriggerTimetable(DeltaMixin, BaseTimetable):
"""
Timetable that triggers DAG runs according to a cron expression.
This is different from ``DeltaDataIntervalTimetable``, where the delta value
specifies the *data interval* of a DAG run. With this timetable, the data
intervals are specified independently. Also for the same reason, this
timetable kicks off a DAG run immediately at the start of the period,
instead of needing to wait for one data interval to pass.
:param delta: How much time to wait between each run.
:param interval: The data interval of each run. Default is 0.
"""
interval: datetime.timedelta | relativedelta = attrs.field(kw_only=True, default=datetime.timedelta())
@attrs.define
class CronTriggerTimetable(CronMixin, BaseTimetable):
"""
Timetable that triggers Dag runs according to a cron expression.
This is different from ``CronDataIntervalTimetable``, where the cron
expression specifies the *data interval* of a DAG run. With this timetable,
the data intervals are specified independently from the cron expression.
Also for the same reason, this timetable kicks off a DAG run immediately at
the start of the period (similar to POSIX cron), instead of needing to wait
for one data interval to pass.
Don't pass ``@once`` in here; use ``OnceTimetable`` instead.
:param cron: cron string that defines when to run
:param timezone: Which timezone to use to interpret the cron string
:param interval: timedelta that defines the data interval start. Default 0.
*run_immediately* controls, if no *start_time* is given to the Dag, when
the first run of the Dag should be scheduled. It has no effect if there
already exist runs for this Dag.
* If *True*, always run immediately the most recent possible Dag run.
* If *False*, wait to run until the next scheduled time in the future.
* If passed a ``timedelta``, will run the most recent possible Dag run
if that run's ``data_interval_end`` is within timedelta of now.
* If *None*, the timedelta is calculated as 10% of the time between the
most recent past scheduled time and the next scheduled time. E.g. if
running every hour, this would run the previous time if less than 6
minutes had past since the previous run time, otherwise it would wait
until the next hour.
"""
interval: datetime.timedelta | relativedelta = attrs.field(kw_only=True, default=datetime.timedelta())
run_immediately: bool | datetime.timedelta = attrs.field(kw_only=True, default=False)
@attrs.define(init=False)
class MultipleCronTriggerTimetable(BaseTimetable):
"""
Timetable that triggers Dag runs according to multiple cron expressions.
This combines multiple ``CronTriggerTimetable`` instances underneath, and
triggers a Dag run whenever one of the timetables want to trigger a run.
Only at most one run is triggered for any given time, even if more than one
timetable fires at the same time.
"""
timetables: list[CronTriggerTimetable]
def __init__(
self,
*crons: str,
timezone: str | Timezone | FixedTimezone,
interval: datetime.timedelta | relativedelta = datetime.timedelta(),
run_immediately: bool | datetime.timedelta = False,
) -> None:
if not crons:
raise ValueError("cron expression required")
self.__attrs_init__( # type: ignore[attr-defined]
[
CronTriggerTimetable(cron, timezone, interval=interval, run_immediately=run_immediately)
for cron in crons
],
)
@attrs.define
class CronPartitionTimetable(CronTriggerTimetable):
"""
Timetable that triggers Dag runs according to a cron expression.
Creates runs for partition keys.
The cron expression determines the sequence of run dates. And
the partition dates are derived from those according to the ``run_offset``.
The partition key is then formatted using the partition date.
A ``run_offset`` of 1 means the partition_date will be one cron interval
after the run date; negative means the partition date will be one cron
interval prior to the run date.
:param cron: cron string that defines when to run
:param timezone: Which timezone to use to interpret the cron string
:param run_offset: Integer offset that determines which partition date to run for.
The partition key will be derived from the partition date.
:param key_format: How to translate the partition date into a string partition key.
*run_immediately* controls, if no *start_time* is given to the Dag, when
the first run of the Dag should be scheduled. It has no effect if there already exist runs for this Dag.
* If *True*, always run immediately the most recent possible Dag run.
* If *False*, wait to run until the next scheduled time in the future.
* If passed a ``timedelta``, will run the most recent possible Dag run
if that run's ``data_interval_end`` is within timedelta of now.
* If *None*, the timedelta is calculated as 10% of the time between the
most recent past scheduled time and the next scheduled time. E.g. if
running every hour, this would run the previous time if less than 6
minutes had past since the previous run time, otherwise it would wait
until the next hour.
# todo: AIP-76 talk about how we can have auto-reprocessing of partitions
# todo: AIP-76 we could allow a tuple of integer + time-based
"""
run_offset: int | datetime.timedelta | relativedelta | None = None
key_format: str = "%Y-%m-%dT%H:%M:%S" # todo: AIP-76 we can't infer partition date from this, so we need to store it separately
def __init__(
self,
cron: str,
*,
timezone: str | Timezone | FixedTimezone,
run_offset: int | datetime.timedelta | relativedelta | None = None,
run_immediately: bool | datetime.timedelta = False,
key_format: str = "%Y-%m-%dT%H:%M:%S", # todo: AIP-76 we can't infer partition date from this, so we need to store it separately
) -> None:
# super().__init__(cron, timezone=timezone, run_immediately=run_immediately)
if not isinstance(run_offset, (int, NoneType)):
# todo: AIP-76 implement timedelta / relative delta?
raise ValueError("Run offset other than integer not supported yet.")
self.__attrs_init__( # type: ignore[attr-defined]
cron,
timezone=timezone,
run_offset=run_offset,
run_immediately=run_immediately,
key_format=key_format,
)
| {
"repo_id": "apache/airflow",
"file_path": "task-sdk/src/airflow/sdk/definitions/timetables/trigger.py",
"license": "Apache License 2.0",
"lines": 146,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | documentation |
apache/airflow:providers/openlineage/tests/system/openlineage/example_openlineage_trigger_dag_deferrable.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Simple DAG that triggers another simple DAG in deferrable mode.
It checks:
- task's trigger_dag_id, trigger_run_id, deferrable attribute
- DAGRun START and COMPLETE events, for the triggered DAG
- automatic injection of OL parent and root info to DAGRun conf
- multiple levels of triggering
"""
from __future__ import annotations
from datetime import datetime
from airflow import DAG
from airflow.providers.standard.operators.bash import BashOperator
from airflow.providers.standard.operators.trigger_dagrun import TriggerDagRunOperator
from system.openlineage.expected_events import get_expected_event_file_path
from system.openlineage.operator import OpenLineageTestOperator
DAG_ID = "openlineage_trigger_dag_deferrable"
with DAG(
dag_id=DAG_ID,
start_date=datetime(2021, 1, 1),
schedule=None,
catchup=False,
default_args={"retries": 0},
) as dag:
trigger_dagrun = TriggerDagRunOperator(
task_id="trigger_dagrun",
trigger_dag_id="openlineage_trigger_dag_deferrable_child__notrigger",
wait_for_completion=True,
conf={"some_config": "value1"},
poke_interval=5,
deferrable=True,
)
check_events = OpenLineageTestOperator(
task_id="check_events",
file_path=get_expected_event_file_path(DAG_ID),
allow_duplicate_events_regex="openlineage_trigger_dag_deferrable.trigger_dagrun.event.start",
)
trigger_dagrun >> check_events
with DAG(
dag_id="openlineage_trigger_dag_deferrable_child__notrigger",
start_date=datetime(2021, 1, 1),
schedule=None,
catchup=False,
default_args={"retries": 0},
) as child_dag:
trigger_dagrun2 = TriggerDagRunOperator(
task_id="trigger_dagrun2",
trigger_dag_id="openlineage_trigger_dag_deferrable_child2__notrigger",
wait_for_completion=True,
poke_interval=5,
)
with DAG(
dag_id="openlineage_trigger_dag_deferrable_child2__notrigger",
start_date=datetime(2021, 1, 1),
schedule=None,
catchup=False,
default_args={"retries": 0},
) as child_dag2:
do_nothing_task = BashOperator(task_id="do_nothing_task", bash_command="sleep 10;")
from tests_common.test_utils.system_tests import get_test_run # noqa: E402
# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest)
test_run = get_test_run(dag)
| {
"repo_id": "apache/airflow",
"file_path": "providers/openlineage/tests/system/openlineage/example_openlineage_trigger_dag_deferrable.py",
"license": "Apache License 2.0",
"lines": 77,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:shared/logging/tests/logging/test_percent_formatter.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from unittest import mock
from airflow_shared.logging.percent_formatter import PercentFormatRender
class TestPercentFormatRender:
def test_no_callsite(self):
fmter = PercentFormatRender("%(filename)s:%(lineno)d %(message)s")
formatted = fmter(mock.Mock(name="Logger"), "info", {"event": "our msg"})
assert formatted == "(unknown file):0 our msg"
def test_lineno_is_none(self):
fmter = PercentFormatRender("%(filename)s:%(lineno)d %(message)s")
formatted = fmter(
mock.Mock(name="Logger"),
"info",
{"event": "our msg", "filename": "test.py", "lineno": None},
)
assert formatted == "test.py:0 our msg"
| {
"repo_id": "apache/airflow",
"file_path": "shared/logging/tests/logging/test_percent_formatter.py",
"license": "Apache License 2.0",
"lines": 33,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:shared/secrets_backend/src/airflow_shared/secrets_backend/base.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from abc import ABC
class BaseSecretsBackend(ABC):
"""Abstract base class to retrieve Connection object given a conn_id or Variable given a key."""
@staticmethod
def build_path(path_prefix: str, secret_id: str, sep: str = "/") -> str:
"""
Given conn_id, build path for Secrets Backend.
:param path_prefix: Prefix of the path to get secret
:param secret_id: Secret id
:param sep: separator used to concatenate connections_prefix and conn_id. Default: "/"
"""
return f"{path_prefix}{sep}{secret_id}"
def get_conn_value(self, conn_id: str, team_name: str | None = None) -> str | None:
"""
Retrieve from Secrets Backend a string value representing the Connection object.
If the client your secrets backend uses already returns a python dict, you should override
``get_connection`` instead.
:param conn_id: connection id
:param team_name: Team name associated to the task trying to access the connection (if any)
"""
raise NotImplementedError
def get_variable(self, key: str, team_name: str | None = None) -> str | None:
"""
Return value for Airflow Variable.
:param key: Variable Key
:param team_name: Team name associated to the task trying to access the variable (if any)
:return: Variable Value
"""
raise NotImplementedError()
def get_config(self, key: str) -> str | None:
"""
Return value for Airflow Config Key.
:param key: Config Key
:return: Config Value
"""
return None
def _set_connection_class(self, conn_class: type) -> None:
if not isinstance(conn_class, type):
raise TypeError(f"Connection class must be a type/class, got {type(conn_class).__name__}")
self._connection_class = conn_class
def _get_connection_class(self) -> type:
"""Get the Connection class to use for deserialization."""
conn_class = getattr(self, "_connection_class", None)
if conn_class is None:
raise RuntimeError(
"Connection class not set on backend instance. "
"Backends must be instantiated via initialize_secrets_backends() "
"or have _connection_class set manually."
)
return conn_class
@staticmethod
def _deserialize_connection_value(conn_class: type, conn_id: str, value: str):
value = value.strip()
if value[0] == "{":
return conn_class.from_json(value=value, conn_id=conn_id)
# TODO: Only sdk has from_uri defined on it. Is it worthwhile developing the core path or not?
if hasattr(conn_class, "from_uri"):
return conn_class.from_uri(conn_id=conn_id, uri=value)
return conn_class(conn_id=conn_id, uri=value)
def deserialize_connection(self, conn_id: str, value: str):
"""
Given a serialized representation of the airflow Connection, return an instance.
Uses the Connection class set on this class (which should be set to the appropriate Connection class for the execution context).
Uses Connection.from_json() for JSON format, Connection(uri=...) for URI format.
:param conn_id: connection id
:param value: the serialized representation of the Connection object
:return: the deserialized Connection
"""
conn_class = self._get_connection_class()
return self._deserialize_connection_value(conn_class, conn_id, value)
def get_connection(self, conn_id: str, team_name: str | None = None):
"""
Return connection object with a given ``conn_id``.
:param conn_id: connection id
:param team_name: Team name associated to the task trying to access the connection (if any)
:return: Connection object or None
"""
value = self.get_conn_value(conn_id=conn_id, team_name=team_name)
if value:
return self.deserialize_connection(conn_id=conn_id, value=value)
return None
| {
"repo_id": "apache/airflow",
"file_path": "shared/secrets_backend/src/airflow_shared/secrets_backend/base.py",
"license": "Apache License 2.0",
"lines": 98,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | documentation |
apache/airflow:shared/secrets_backend/tests/secrets_backend/test_base.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import pytest
from airflow_shared.secrets_backend.base import BaseSecretsBackend
class MockConnection:
"""Mock Connection class for testing deserialize_connection."""
def __init__(self, conn_id: str, uri: str | None = None, **kwargs):
self.conn_id = conn_id
self.uri = uri
self._kwargs = kwargs
@classmethod
def from_json(cls, value: str, conn_id: str):
import json
data = json.loads(value)
return cls(conn_id=conn_id, **data)
@classmethod
def from_uri(cls, conn_id: str, uri: str):
return cls(conn_id=conn_id, uri=uri)
class _TestBackend(BaseSecretsBackend):
def __init__(self, conn_values: dict[str, str] | None = None, variables: dict[str, str] | None = None):
self.conn_values = conn_values or {}
self.variables = variables or {}
def get_conn_value(self, conn_id: str) -> str | None:
return self.conn_values.get(conn_id)
def get_variable(self, key: str) -> str | None:
return self.variables.get(key)
class TestBaseSecretsBackend:
@pytest.mark.parametrize(
("prefix", "secret_id", "sep", "expected"),
[
("prefix", "secret_id", "/", "prefix/secret_id"),
("prefix", "secret_id", ":", "prefix:secret_id"),
],
)
def test_build_path_with_separator(self, prefix, secret_id, sep, expected):
path = BaseSecretsBackend.build_path(prefix, secret_id, sep=sep)
assert path == expected
def test_get_conn_value_not_implemented(self):
backend = BaseSecretsBackend()
with pytest.raises(NotImplementedError):
backend.get_conn_value("test_conn")
def test_get_variable_not_implemented(self):
backend = BaseSecretsBackend()
with pytest.raises(NotImplementedError):
backend.get_variable("test_var")
def test_get_config_returns_none_by_default(self):
backend = BaseSecretsBackend()
assert backend.get_config("test_key") is None
def test_implementation_get_conn_value(self, sample_conn_uri):
backend = _TestBackend(conn_values={"test_conn": sample_conn_uri})
conn_value = backend.get_conn_value("test_conn")
assert conn_value == sample_conn_uri
def test_concrete_implementation_get_conn_value_missing(self):
backend = _TestBackend(conn_values={})
conn_value = backend.get_conn_value("missing_conn")
assert conn_value is None
def test_concrete_implementation_get_variable(self):
backend = _TestBackend(variables={"test_var": "test_value"})
var_value = backend.get_variable("test_var")
assert var_value == "test_value"
def test_concrete_implementation_get_variable_missing(self):
backend = _TestBackend(variables={})
var_value = backend.get_variable("missing_var")
assert var_value is None
@pytest.mark.parametrize(
("conn_id", "expected"),
[
("simple", "simple"),
("with-dash", "with-dash"),
("with_underscore", "with_underscore"),
("with.dot", "with.dot"),
],
)
def test_get_conn_value_with_various_conn_ids(self, conn_id, expected):
backend = _TestBackend(conn_values={conn_id: f"uri_{expected}"})
conn_value = backend.get_conn_value(conn_id)
assert conn_value == f"uri_{expected}"
def test_deserialize_connection_json(self, sample_conn_json):
"""Test deserialize_connection with JSON format through _TestBackend."""
backend = _TestBackend()
backend._set_connection_class(MockConnection)
conn = backend.deserialize_connection("test_conn", sample_conn_json)
assert isinstance(conn, MockConnection)
assert conn.conn_id == "test_conn"
assert conn._kwargs["conn_type"] == "mysql"
def test_deserialize_connection_uri(self, sample_conn_uri):
"""Test deserialize_connection with URI format through _TestBackend."""
backend = _TestBackend()
backend._set_connection_class(MockConnection)
conn = backend.deserialize_connection("test_conn", sample_conn_uri)
assert isinstance(conn, MockConnection)
assert conn.conn_id == "test_conn"
assert conn.uri == sample_conn_uri
| {
"repo_id": "apache/airflow",
"file_path": "shared/secrets_backend/tests/secrets_backend/test_base.py",
"license": "Apache License 2.0",
"lines": 109,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:task-sdk/src/airflow/sdk/bases/secrets_backend.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from airflow.sdk._shared.secrets_backend.base import BaseSecretsBackend as _BaseSecretsBackend
class BaseSecretsBackend(_BaseSecretsBackend):
"""Base class for secrets backend with SDK Connection as default."""
def _get_connection_class(self) -> type:
conn_class = getattr(self, "_connection_class", None)
if conn_class is None:
from airflow.sdk.definitions.connection import Connection
self._connection_class = Connection
return Connection
return conn_class
| {
"repo_id": "apache/airflow",
"file_path": "task-sdk/src/airflow/sdk/bases/secrets_backend.py",
"license": "Apache License 2.0",
"lines": 27,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:airflow-core/src/airflow/api_fastapi/compat.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from fastapi import status
# HTTP_422_UNPROCESSABLE_CONTENT was added in Starlette 0.48.0, replacing HTTP_422_UNPROCESSABLE_ENTITY
# to align with RFC 9110 (HTTP Semantics).
#
# FastAPI 0.128.0 (our minimum version) requires starlette>=0.40.0. With "Low dep tests"
# (uv sync --resolution lowest-direct), starlette 0.40.0 is installed, which only has
# HTTP_422_UNPROCESSABLE_ENTITY. So we need this fallback for backward compatibility.
#
# Refs:
# - https://www.starlette.io/release-notes/
# - https://www.rfc-editor.org/rfc/rfc9110#status.422
try:
HTTP_422_UNPROCESSABLE_CONTENT = status.HTTP_422_UNPROCESSABLE_CONTENT
except AttributeError:
HTTP_422_UNPROCESSABLE_CONTENT = status.HTTP_422_UNPROCESSABLE_ENTITY # type: ignore[attr-defined]
__all__ = ["HTTP_422_UNPROCESSABLE_CONTENT"]
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/api_fastapi/compat.py",
"license": "Apache License 2.0",
"lines": 33,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:task-sdk/tests/task_sdk/bases/test_decorator.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import ast
import functools
import importlib.util
import textwrap
from pathlib import Path
import pytest
from airflow.sdk import task
from airflow.sdk.bases.decorator import DecoratedOperator, is_async_callable
RAW_CODE = """
from airflow.sdk import task
@task.kubernetes(
namespace="airflow",
image="python:3.12",
)
def a_task():
##################
return "success A"
"""
class DummyK8sDecoratedOperator(DecoratedOperator):
custom_operator_name = "@task.kubernetes"
class TestBaseDecorator:
def test_get_python_source_strips_decorator_and_comment(self, tmp_path: Path):
module_path = tmp_path / "tmp_mod.py"
module_path.write_text(textwrap.dedent(RAW_CODE))
spec = importlib.util.spec_from_file_location("tmp_mod", module_path)
assert spec is not None
assert spec.loader is not None
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
a_task_callable = module.a_task
op = DummyK8sDecoratedOperator(task_id="t", python_callable=a_task_callable)
cleaned = op.get_python_source()
# Decorator & comment should be gone
assert "@task.kubernetes" not in cleaned
assert "##################" not in cleaned
# Returned source must be valid Python
ast.parse(cleaned)
assert cleaned.lstrip().splitlines()[0].startswith("def a_task")
def simple_decorator(fn):
@functools.wraps(fn)
def wrapper(*args, **kwargs):
return fn(*args, **kwargs)
return wrapper
def decorator_without_wraps(fn):
def wrapper(*args, **kwargs):
return fn(*args, **kwargs)
return wrapper
async def async_fn():
return 42
def sync_fn():
return 42
@simple_decorator
async def wrapped_async_fn():
return 42
@simple_decorator
def wrapped_sync_fn():
return 42
@decorator_without_wraps
async def wrapped_async_fn_no_wraps():
return 42
@simple_decorator
@simple_decorator
async def multi_wrapped_async_fn():
return 42
async def async_with_args(x, y):
return x + y
def sync_with_args(x, y):
return x + y
class AsyncCallable:
async def __call__(self):
return 42
class SyncCallable:
def __call__(self):
return 42
class WrappedAsyncCallable:
@simple_decorator
async def __call__(self):
return 42
class TestAsyncCallable:
def test_plain_async_function(self):
assert is_async_callable(async_fn)
def test_plain_sync_function(self):
assert not is_async_callable(sync_fn)
def test_wrapped_async_function_with_wraps(self):
assert is_async_callable(wrapped_async_fn)
def test_wrapped_sync_function_with_wraps(self):
assert not is_async_callable(wrapped_sync_fn)
def test_wrapped_async_function_without_wraps(self):
"""
Without functools.wraps, inspect.unwrap cannot recover the coroutine.
This documents expected behavior.
"""
assert not is_async_callable(wrapped_async_fn_no_wraps)
def test_multi_wrapped_async_function(self):
assert is_async_callable(multi_wrapped_async_fn)
def test_partial_async_function(self):
fn = functools.partial(async_with_args, 1)
assert is_async_callable(fn)
def test_partial_sync_function(self):
fn = functools.partial(sync_with_args, 1)
assert not is_async_callable(fn)
def test_nested_partial_async_function(self):
fn = functools.partial(
functools.partial(async_with_args, 1),
2,
)
assert is_async_callable(fn)
def test_async_callable_class(self):
assert is_async_callable(AsyncCallable())
def test_sync_callable_class(self):
assert not is_async_callable(SyncCallable())
def test_wrapped_async_callable_class(self):
assert is_async_callable(WrappedAsyncCallable())
def test_partial_callable_class(self):
fn = functools.partial(AsyncCallable())
assert is_async_callable(fn)
@pytest.mark.parametrize("value", [None, 42, "string", object()])
def test_non_callable(self, value):
assert not is_async_callable(value)
def test_task_decorator_async_function(self):
@task
async def async_task_fn():
return 42
assert is_async_callable(async_task_fn)
def test_task_decorator_sync_function(self):
@task
def sync_task_fn():
return 42
assert not is_async_callable(sync_task_fn)
| {
"repo_id": "apache/airflow",
"file_path": "task-sdk/tests/task_sdk/bases/test_decorator.py",
"license": "Apache License 2.0",
"lines": 146,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:scripts/ci/prek/check_airflowctl_command_coverage.py | #!/usr/bin/env python
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# /// script
# requires-python = ">=3.10,<3.11"
# dependencies = [
# "rich>=13.6.0",
# ]
# ///
"""
Check that all airflowctl CLI commands have integration test coverage by comparing commands from operations.py against test_commands in conftest.py.
"""
from __future__ import annotations
import ast
import re
import sys
from pathlib import Path
sys.path.insert(0, str(Path(__file__).parent.resolve()))
from common_prek_utils import AIRFLOW_ROOT_PATH, console
OPERATIONS_FILE = AIRFLOW_ROOT_PATH / "airflow-ctl" / "src" / "airflowctl" / "api" / "operations.py"
CTL_TESTS_FILE = (
AIRFLOW_ROOT_PATH / "airflow-ctl-tests" / "tests" / "airflowctl_tests" / "test_airflowctl_commands.py"
)
# Operations excluded from CLI (see cli_config.py)
EXCLUDED_OPERATION_CLASSES = {"BaseOperations", "LoginOperations", "VersionOperations"}
EXCLUDED_METHODS = {
"__init__",
"__init_subclass__",
"error",
"_check_flag_and_exit_if_server_response_error",
"bulk",
"export",
}
EXCLUDED_COMMANDS = {
"assets delete-dag-queued-events",
"assets delete-queued-event",
"assets delete-queued-events",
"assets get-by-alias",
"assets get-dag-queued-event",
"assets get-dag-queued-events",
"assets get-queued-events",
"assets list-by-alias",
"assets materialize",
"backfill cancel",
"backfill create",
"backfill create-dry-run",
"backfill get",
"backfill pause",
"backfill unpause",
"connections create-defaults",
"connections test",
"dags delete",
"dags get-import-error",
"dags get-tags",
}
def parse_operations() -> dict[str, list[str]]:
commands: dict[str, list[str]] = {}
with open(OPERATIONS_FILE) as f:
tree = ast.parse(f.read(), filename=str(OPERATIONS_FILE))
for node in ast.walk(tree):
if isinstance(node, ast.ClassDef) and node.name.endswith("Operations"):
if node.name in EXCLUDED_OPERATION_CLASSES:
continue
group_name = node.name.replace("Operations", "").lower()
commands[group_name] = []
for child in node.body:
if isinstance(child, ast.FunctionDef):
method_name = child.name
if method_name in EXCLUDED_METHODS or method_name.startswith("_"):
continue
subcommand = method_name.replace("_", "-")
commands[group_name].append(subcommand)
return commands
def parse_tested_commands() -> set[str]:
tested: set[str] = set()
with open(CTL_TESTS_FILE) as f:
content = f.read()
# Match command patterns like "assets list", "dags list-import-errors", etc.
# Also handles f-strings like f"dagrun get..." or f'dagrun get...'
pattern = r'f?["\']([a-z]+(?:-[a-z]+)*\s+[a-z]+(?:-[a-z]+)*)'
for match in re.findall(pattern, content):
parts = match.split()
if len(parts) >= 2:
tested.add(f"{parts[0]} {parts[1]}")
return tested
def main():
available = parse_operations()
tested = parse_tested_commands()
missing = []
for group, subcommands in sorted(available.items()):
for subcommand in sorted(subcommands):
cmd = f"{group} {subcommand}"
if cmd not in tested and cmd not in EXCLUDED_COMMANDS:
missing.append(cmd)
if missing:
console.print("[red]ERROR: Commands not covered by integration tests:[/]")
for cmd in missing:
console.print(f" [red]- {cmd}[/]")
console.print()
console.print("[yellow]Fix by either:[/]")
console.print(f"1. Add test to {CTL_TESTS_FILE}")
console.print(f"2. Add to EXCLUDED_COMMANDS in {__file__}")
sys.exit(1)
total = sum(len(cmds) for cmds in available.values())
console.print(
f"[green]All {total} CLI commands covered ({len(tested)} tested, {len(EXCLUDED_COMMANDS)} excluded)[/]"
)
sys.exit(0)
if __name__ == "__main__":
main()
| {
"repo_id": "apache/airflow",
"file_path": "scripts/ci/prek/check_airflowctl_command_coverage.py",
"license": "Apache License 2.0",
"lines": 125,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/standard/src/airflow/providers/standard/utils/openlineage.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import logging
from typing import TYPE_CHECKING
from airflow.providers.common.compat.openlineage.check import require_openlineage_version
from airflow.providers.common.compat.sdk import AirflowOptionalProviderFeatureException
if TYPE_CHECKING:
from airflow.models import TaskInstance
from airflow.sdk.types import RuntimeTaskInstanceProtocol as RuntimeTI
log = logging.getLogger(__name__)
OPENLINEAGE_PROVIDER_MIN_VERSION = "2.8.0"
def _is_openlineage_provider_accessible() -> bool:
"""
Check if the OpenLineage provider is accessible.
This function attempts to import the necessary OpenLineage modules and checks if the provider
is enabled and the listener is available.
Returns:
bool: True if the OpenLineage provider is accessible, False otherwise.
"""
try:
from airflow.providers.openlineage.conf import is_disabled
from airflow.providers.openlineage.plugins.listener import get_openlineage_listener
except (ImportError, AttributeError):
log.debug("OpenLineage provider could not be imported.")
return False
if is_disabled():
log.debug("OpenLineage provider is disabled.")
return False
if not get_openlineage_listener():
log.debug("OpenLineage listener could not be found.")
return False
return True
@require_openlineage_version(provider_min_version=OPENLINEAGE_PROVIDER_MIN_VERSION)
def _get_openlineage_parent_info(ti: TaskInstance | RuntimeTI) -> dict[str, str]:
"""Get OpenLineage metadata about the parent task."""
from airflow.providers.openlineage.plugins.macros import (
lineage_job_name,
lineage_job_namespace,
lineage_root_job_name,
lineage_root_job_namespace,
lineage_root_run_id,
lineage_run_id,
)
return {
"parentRunId": lineage_run_id(ti),
"parentJobName": lineage_job_name(ti),
"parentJobNamespace": lineage_job_namespace(),
"rootParentRunId": lineage_root_run_id(ti),
"rootParentJobName": lineage_root_job_name(ti),
"rootParentJobNamespace": lineage_root_job_namespace(ti),
}
def _inject_openlineage_parent_info_to_dagrun_conf(
dr_conf: dict | None, ol_parent_info: dict[str, str]
) -> dict:
"""
Safely inject OpenLineage parent and root run metadata into a DAG run configuration.
This function adds parent and root job/run identifiers derived from the given TaskInstance into the
`openlineage` section of the DAG run configuration. If an `openlineage` key already exists, it is
preserved and extended, but no existing parent or root identifiers are overwritten.
The function performs several safety checks:
- If conf is not a dictionary or contains a non-dict `openlineage` section, conf is returned unmodified.
- If `openlineage` section contains any parent/root lineage identifiers, conf is returned unmodified.
Args:
dr_conf: The original DAG run configuration dictionary or None.
ol_parent_info: OpenLineage metadata about the parent task
Returns:
A modified DAG run conf with injected OpenLineage parent and root metadata,
or the original conf if injection is not possible.
"""
current_ol_dr_conf = {}
if isinstance(dr_conf, dict) and dr_conf.get("openlineage"):
current_ol_dr_conf = dr_conf["openlineage"]
if not isinstance(current_ol_dr_conf, dict):
log.warning(
"Existing 'openlineage' section of DagRun conf is not a dictionary; "
"skipping injection of parent metadata."
)
return dr_conf
forbidden_keys = (
"parentRunId",
"parentJobName",
"parentJobNamespace",
"rootParentRunId",
"rootJobName",
"rootJobNamespace",
)
if existing := [k for k in forbidden_keys if k in current_ol_dr_conf]:
log.warning(
"'openlineage' section of DagRun conf already contains parent or root "
"identifiers: `%s`; skipping injection to avoid overwriting existing values.",
", ".join(existing),
)
return dr_conf
return {**(dr_conf or {}), **{"openlineage": {**ol_parent_info, **current_ol_dr_conf}}}
def safe_inject_openlineage_properties_into_dagrun_conf(
dr_conf: dict | None, ti: TaskInstance | RuntimeTI | None
) -> dict | None:
"""
Safely inject OpenLineage parent task metadata into a DAG run conf.
This function checks whether the OpenLineage provider is accessible and supports parent information
injection. If so, it enriches the DAG run conf with OpenLineage metadata about the parent task
to improve lineage tracking. The function does not modify other conf fields, will not overwrite
any existing content, and safely returns the original configuration if OpenLineage is unavailable,
unsupported, or an error occurs during injection.
:param dr_conf: The original DAG run configuration dictionary.
:param ti: The TaskInstance whose metadata may be injected.
:return: A potentially enriched DAG run conf with OpenLineage parent information,
or the original conf if injection was skipped or failed.
"""
try:
if ti is None:
log.debug("Task instance not provided - dagrun conf not modified.")
return dr_conf
if not _is_openlineage_provider_accessible():
log.debug("OpenLineage provider not accessible - dagrun conf not modified.")
return dr_conf
ol_parent_info = _get_openlineage_parent_info(ti=ti)
log.info("Injecting openlineage parent task information into dagrun conf.")
new_conf = _inject_openlineage_parent_info_to_dagrun_conf(
dr_conf=dr_conf.copy() if isinstance(dr_conf, dict) else dr_conf,
ol_parent_info=ol_parent_info,
)
return new_conf
except AirflowOptionalProviderFeatureException:
log.info(
"Current OpenLineage provider version doesn't support parent information in "
"the DagRun conf. Upgrade `apache-airflow-providers-openlineage>=%s` to use this feature. "
"DagRun conf has not been modified by OpenLineage.",
OPENLINEAGE_PROVIDER_MIN_VERSION,
)
return dr_conf
except Exception as e:
log.warning(
"An error occurred while trying to inject OpenLineage information into dagrun conf. "
"DagRun conf has not been modified by OpenLineage. Error: %s",
str(e),
)
log.debug("Error details: ", exc_info=e)
return dr_conf
| {
"repo_id": "apache/airflow",
"file_path": "providers/standard/src/airflow/providers/standard/utils/openlineage.py",
"license": "Apache License 2.0",
"lines": 154,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/standard/tests/unit/standard/utils/test_openlineage.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from unittest.mock import MagicMock, patch
import pytest
from airflow.providers.common.compat.sdk import AirflowOptionalProviderFeatureException
from airflow.providers.standard.utils.openlineage import (
OPENLINEAGE_PROVIDER_MIN_VERSION,
_get_openlineage_parent_info,
_inject_openlineage_parent_info_to_dagrun_conf,
_is_openlineage_provider_accessible,
safe_inject_openlineage_properties_into_dagrun_conf,
)
OL_UTILS_PATH = "airflow.providers.standard.utils.openlineage"
OL_PROVIDER_PATH = "airflow.providers.openlineage"
OL_MACROS_PATH = f"{OL_PROVIDER_PATH}.plugins.macros"
OL_CONF_PATH = f"{OL_PROVIDER_PATH}.conf"
OL_LISTENER_PATH = f"{OL_PROVIDER_PATH}.plugins.listener"
IMPORTLIB_VERSION = "importlib.metadata.version"
@patch(f"{OL_LISTENER_PATH}.get_openlineage_listener")
@patch(f"{OL_CONF_PATH}.is_disabled")
def test_is_openlineage_provider_accessible(mock_is_disabled, mock_get_listener):
mock_is_disabled.return_value = False
mock_get_listener.return_value = True
assert _is_openlineage_provider_accessible() is True
@patch(f"{OL_LISTENER_PATH}.get_openlineage_listener")
@patch(f"{OL_CONF_PATH}.is_disabled")
def test_is_openlineage_provider_disabled(mock_is_disabled, mock_get_listener):
mock_is_disabled.return_value = True
assert _is_openlineage_provider_accessible() is False
@patch(f"{OL_LISTENER_PATH}.get_openlineage_listener")
@patch(f"{OL_CONF_PATH}.is_disabled")
def test_is_openlineage_listener_not_found(mock_is_disabled, mock_get_listener):
mock_is_disabled.return_value = False
mock_get_listener.return_value = None
assert _is_openlineage_provider_accessible() is False
@patch(f"{OL_CONF_PATH}.is_disabled")
def test_is_openlineage_provider_accessible_import_error(mock_is_disabled):
"""Test that ImportError is handled when OpenLineage modules cannot be imported."""
mock_is_disabled.side_effect = RuntimeError("Should not be called.")
with patch.dict(
"sys.modules",
{
OL_CONF_PATH: None,
OL_LISTENER_PATH: None,
},
):
result = _is_openlineage_provider_accessible()
assert result is False
def _mock_ol_parent_info():
"""Create a mock OpenLineage parent info dict."""
return {
"parentRunId": "test-run-id",
"parentJobName": "test-job",
"parentJobNamespace": "test-ns",
"rootParentRunId": "test-root-run-id",
"rootParentJobName": "test-root-job",
"rootParentJobNamespace": "test-root-ns",
}
def test_get_openlineage_parent_info():
"""Test that _get_openlineage_parent_info calls all macros correctly and returns proper structure."""
ti = MagicMock()
expected_values = {
"parentRunId": "parent-run-id-123",
"parentJobName": "parent-job-name",
"parentJobNamespace": "parent-namespace",
"rootParentRunId": "root-run-id-456",
"rootParentJobName": "root-job-name",
"rootParentJobNamespace": "root-namespace",
}
def _mock_version(package):
if package == "apache-airflow-providers-openlineage":
return OPENLINEAGE_PROVIDER_MIN_VERSION # Exactly minimum version
raise Exception(f"Unexpected package: {package}")
with (
patch(f"{OL_MACROS_PATH}.lineage_run_id", return_value=expected_values["parentRunId"]) as mock_run_id,
patch(
f"{OL_MACROS_PATH}.lineage_job_name", return_value=expected_values["parentJobName"]
) as mock_job_name,
patch(
f"{OL_MACROS_PATH}.lineage_job_namespace",
return_value=expected_values["parentJobNamespace"],
) as mock_job_namespace,
patch(
f"{OL_MACROS_PATH}.lineage_root_run_id",
return_value=expected_values["rootParentRunId"],
) as mock_root_run_id,
patch(
f"{OL_MACROS_PATH}.lineage_root_job_name",
return_value=expected_values["rootParentJobName"],
) as mock_root_job_name,
patch(
f"{OL_MACROS_PATH}.lineage_root_job_namespace",
return_value=expected_values["rootParentJobNamespace"],
) as mock_root_job_namespace,
patch(IMPORTLIB_VERSION, side_effect=_mock_version),
):
result = _get_openlineage_parent_info(ti)
# Verify all macros were called correctly
mock_run_id.assert_called_once_with(ti)
mock_job_name.assert_called_once_with(ti)
mock_job_namespace.assert_called_once() # No args
mock_root_run_id.assert_called_once_with(ti)
mock_root_job_name.assert_called_once_with(ti)
mock_root_job_namespace.assert_called_once_with(ti)
# Verify result structure
assert isinstance(result, dict)
assert result == expected_values
assert set(result.keys()) == {
"parentRunId",
"parentJobName",
"parentJobNamespace",
"rootParentRunId",
"rootParentJobName",
"rootParentJobNamespace",
}
@pytest.mark.parametrize(
("provider_version", "should_raise"),
[
("2.7.0", True), # Below minimum
("2.7.9", True), # Below minimum
("2.8.0", False), # Exactly minimum
("2.8.1", False), # Above minimum
],
)
def test_get_openlineage_parent_info_version_check(provider_version, should_raise):
"""Test that _get_openlineage_parent_info raises AirflowOptionalProviderFeatureException when version is insufficient."""
ti = MagicMock()
def _mock_version(package):
if package == "apache-airflow-providers-openlineage":
return provider_version
raise Exception(f"Unexpected package: {package}")
with patch(IMPORTLIB_VERSION, side_effect=_mock_version):
if should_raise:
expected_err = (
f"OpenLineage provider version `{provider_version}` is lower than "
f"required `{OPENLINEAGE_PROVIDER_MIN_VERSION}`, "
"skipping function `_get_openlineage_parent_info` execution"
)
with pytest.raises(AirflowOptionalProviderFeatureException, match=expected_err):
_get_openlineage_parent_info(ti)
else:
# When version is sufficient, mock all macros to allow execution
with (
patch(f"{OL_MACROS_PATH}.lineage_run_id", return_value="run-id"),
patch(f"{OL_MACROS_PATH}.lineage_job_name", return_value="job-name"),
patch(f"{OL_MACROS_PATH}.lineage_job_namespace", return_value="job-ns"),
patch(f"{OL_MACROS_PATH}.lineage_root_run_id", return_value="root-run-id"),
patch(f"{OL_MACROS_PATH}.lineage_root_job_name", return_value="root-job-name"),
patch(f"{OL_MACROS_PATH}.lineage_root_job_namespace", return_value="root-job-ns"),
):
result = _get_openlineage_parent_info(ti)
assert isinstance(result, dict)
assert "parentRunId" in result
@pytest.mark.parametrize("dr_conf", [None, {}])
def test_inject_parent_info_with_none_or_empty_conf(dr_conf):
"""Test injection with None or empty dict creates new openlineage section."""
result = _inject_openlineage_parent_info_to_dagrun_conf(dr_conf, _mock_ol_parent_info())
expected = {"openlineage": _mock_ol_parent_info()}
assert result == expected
@pytest.mark.parametrize("dr_conf", ["conf as string", ["conf_list"], [{"a": 1}, {"b": 2}]])
def test_inject_parent_info_with_wrong_type_conf_raises_error(dr_conf):
"""Test injection with wrong type of conf raises error (we catch it later on)."""
with pytest.raises(TypeError):
_inject_openlineage_parent_info_to_dagrun_conf(dr_conf, _mock_ol_parent_info())
def test_inject_parent_info_with_existing_conf_no_openlineage_key():
"""Test injection with existing conf but no openlineage key."""
dr_conf = {"some": "other", "config": "value"}
result = _inject_openlineage_parent_info_to_dagrun_conf(dr_conf, _mock_ol_parent_info())
expected = {
"some": "other",
"config": "value",
"openlineage": _mock_ol_parent_info(),
}
assert result == expected
# Original conf should not be modified
assert dr_conf == {"some": "other", "config": "value"}
def test_inject_parent_info_with_existing_openlineage_dict():
"""Test injection with existing openlineage dict merges correctly."""
dr_conf = {
"some": "other",
"openlineage": {
"existing": "value",
"otherKey": "otherValue",
},
}
result = _inject_openlineage_parent_info_to_dagrun_conf(dr_conf, _mock_ol_parent_info())
expected = {
"some": "other",
"openlineage": {
"existing": "value",
"otherKey": "otherValue",
**_mock_ol_parent_info(),
},
}
assert result == expected
# Original conf should not be modified
assert dr_conf == {
"some": "other",
"openlineage": {
"existing": "value",
"otherKey": "otherValue",
},
}
def test_inject_parent_info_with_non_dict_openlineage_returns_unchanged():
"""Test that non-dict openlineage value returns conf unchanged."""
dr_conf = {"openlineage": "not-a-dict"}
result = _inject_openlineage_parent_info_to_dagrun_conf(dr_conf, _mock_ol_parent_info())
assert result == dr_conf
assert result is dr_conf # Should return same object
@pytest.mark.parametrize(
"forbidden_key",
[
"parentRunId",
"parentJobName",
"parentJobNamespace",
"rootParentRunId",
"rootJobName",
"rootJobNamespace",
],
)
def test_inject_parent_info_with_existing_forbidden_key_returns_unchanged(forbidden_key):
"""Test that existing forbidden keys prevent injection."""
dr_conf = {
"openlineage": {
forbidden_key: "existing-value",
"otherKey": "value",
}
}
result = _inject_openlineage_parent_info_to_dagrun_conf(dr_conf, _mock_ol_parent_info())
assert result == dr_conf
assert result is dr_conf # Should return same object
def test_inject_parent_info_with_multiple_existing_keys_returns_unchanged():
"""Test that multiple existing forbidden keys are all detected."""
dr_conf = {
"openlineage": {
"parentRunId": "existing-parent-id",
"rootParentJobName": "existing-root-job",
"otherKey": "value",
}
}
result = _inject_openlineage_parent_info_to_dagrun_conf(dr_conf, _mock_ol_parent_info())
assert result == dr_conf
# Original conf should not be modified
assert dr_conf == {
"openlineage": {
"parentRunId": "existing-parent-id",
"rootParentJobName": "existing-root-job",
"otherKey": "value",
}
}
def test_safe_inject_returns_unchanged_when_provider_not_accessible():
"""Test returns original conf when OpenLineage provider is not accessible."""
dr_conf = {"some": "config"}
with patch(f"{OL_UTILS_PATH}._is_openlineage_provider_accessible", return_value=False):
result = safe_inject_openlineage_properties_into_dagrun_conf(dr_conf, MagicMock())
assert result == dr_conf
assert result is dr_conf # Should return same object
def test_safe_inject_correctly_injects_openlineage_info():
"""Test that OpenLineage injection works when OL is available and version is sufficient."""
dr_conf = {"some": "config"}
expected_result = {
"some": "config",
"openlineage": _mock_ol_parent_info(),
}
def _mock_version(package):
if package == "apache-airflow-providers-openlineage":
return OPENLINEAGE_PROVIDER_MIN_VERSION
raise Exception(f"Unexpected package: {package}")
with (
patch(f"{OL_UTILS_PATH}._is_openlineage_provider_accessible", return_value=True),
patch(IMPORTLIB_VERSION, side_effect=_mock_version),
patch(f"{OL_UTILS_PATH}._get_openlineage_parent_info", return_value=_mock_ol_parent_info()),
):
result = safe_inject_openlineage_properties_into_dagrun_conf(dr_conf, MagicMock())
assert result == expected_result
@pytest.mark.parametrize("dr_conf", [None, {}, "not-a-dict", ["a", "b", "c"]])
def test_safe_inject_handles_none_empty_and_non_dict_conf(dr_conf):
"""Test handles None, empty dict, or non-dict conf without raising error."""
def _mock_version(package):
if package == "apache-airflow-providers-openlineage":
return OPENLINEAGE_PROVIDER_MIN_VERSION
raise Exception(f"Unexpected package: {package}")
with (
patch(f"{OL_UTILS_PATH}._is_openlineage_provider_accessible", return_value=True),
patch(IMPORTLIB_VERSION, side_effect=_mock_version),
patch(f"{OL_UTILS_PATH}._get_openlineage_parent_info", return_value=_mock_ol_parent_info()),
):
result = safe_inject_openlineage_properties_into_dagrun_conf(dr_conf, MagicMock())
if dr_conf is None or isinstance(dr_conf, dict):
assert result == {"openlineage": _mock_ol_parent_info()}
else:
assert result == dr_conf
assert result is dr_conf
def test_safe_inject_copies_dict_before_passing():
"""Test that dict is copied before being passed to inject function."""
dr_conf = {"some": "config", "nested": {"key": "value"}}
with (
patch(f"{OL_UTILS_PATH}._is_openlineage_provider_accessible", return_value=True),
patch(f"{OL_UTILS_PATH}._get_openlineage_parent_info", return_value=_mock_ol_parent_info()),
patch(f"{OL_UTILS_PATH}._inject_openlineage_parent_info_to_dagrun_conf") as mock_inject,
):
expected_result = {**dr_conf, "openlineage": _mock_ol_parent_info()}
mock_inject.return_value = expected_result
safe_inject_openlineage_properties_into_dagrun_conf(dr_conf, MagicMock())
# Verify that a copy was passed (not the original)
mock_inject.assert_called_once()
call_args = mock_inject.call_args
passed_conf = call_args[1]["dr_conf"] # Keyword argument
assert passed_conf == dr_conf
# The copy should be a different object (shallow copy)
assert passed_conf is not dr_conf
@pytest.mark.parametrize(
"exception", [ValueError("Test error"), KeyError("Missing key"), RuntimeError("Runtime issue")]
)
def test_safe_inject_preserves_original_conf_on_exception(exception):
"""Test that original conf is preserved when any exception occurs during injection."""
dr_conf = {"key": "value", "nested": {"deep": "data"}}
ti = MagicMock()
with (
patch(f"{OL_UTILS_PATH}._is_openlineage_provider_accessible", return_value=True),
patch(f"{OL_UTILS_PATH}._get_openlineage_parent_info", side_effect=exception),
):
result = safe_inject_openlineage_properties_into_dagrun_conf(dr_conf, ti)
# Should return original conf unchanged
assert result == {"key": "value", "nested": {"deep": "data"}}
assert result is dr_conf # Should return same object
@pytest.mark.parametrize(
("provider_version", "should_raise"),
[
("2.7.0", True), # Below minimum
("2.7.9", True), # Below minimum
("2.8.0", False), # Exactly minimum
("2.8.1", False), # Above minimum
("3.0.0", False), # Well above minimum
],
)
def test_safe_inject_with_provider_version_check(provider_version, should_raise):
"""Test that version checking works correctly - exception caught when insufficient, works when sufficient."""
dr_conf = {"some": "config"}
ti = MagicMock()
ol_parent_info = _mock_ol_parent_info()
def _mock_version(package):
if package == "apache-airflow-providers-openlineage":
return provider_version
raise Exception(f"Unexpected package: {package}")
with (
patch(f"{OL_UTILS_PATH}._is_openlineage_provider_accessible", return_value=True),
patch(IMPORTLIB_VERSION, side_effect=_mock_version),
):
if should_raise:
# When version is insufficient, _get_openlineage_parent_info will raise
# The exception should be caught and conf returned unchanged
result = safe_inject_openlineage_properties_into_dagrun_conf(dr_conf, ti)
assert result == dr_conf
else:
# When version is sufficient, mock _get_openlineage_parent_info to return data
with patch(f"{OL_UTILS_PATH}._get_openlineage_parent_info", return_value=ol_parent_info):
result = safe_inject_openlineage_properties_into_dagrun_conf(dr_conf, ti)
expected = {
"some": "config",
"openlineage": ol_parent_info,
}
assert result == expected
def test_inject_when_provider_not_found():
"""Test that injection handles case when OpenLineage provider package is not found."""
dr_conf = {"some": "config"}
ti = MagicMock()
# Simulate the case where _get_openlineage_parent_info raises AirflowOptionalProviderFeatureException
# because the provider package is not found (this happens inside require_openlineage_version decorator)
with (
patch(f"{OL_UTILS_PATH}._is_openlineage_provider_accessible", return_value=True),
patch(
f"{OL_UTILS_PATH}._get_openlineage_parent_info",
side_effect=AirflowOptionalProviderFeatureException(
"OpenLineage provider not found or has no version, "
"skipping function `_get_openlineage_parent_info` execution"
),
),
):
result = safe_inject_openlineage_properties_into_dagrun_conf(dr_conf, ti)
assert result == dr_conf
| {
"repo_id": "apache/airflow",
"file_path": "providers/standard/tests/unit/standard/utils/test_openlineage.py",
"license": "Apache License 2.0",
"lines": 397,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:scripts/in_container/bin/generate_mprocs_config.py | #!/usr/bin/env python3
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# /// script
# requires-python = ">=3.10"
# dependencies = [
# "rich>=13.6.0",
# "PyYAML>=6.0",
# ]
# ///
"""Generate mprocs configuration dynamically based on environment variables."""
from __future__ import annotations
import os
import sys
import tempfile
import yaml
from rich.console import Console
from rich.panel import Panel
from rich.syntax import Syntax
def get_env_bool(var_name: str, default: str = "false") -> bool:
"""Get environment variable as boolean."""
return os.environ.get(var_name, default).lower() == "true"
def get_env(var_name: str, default: str = "") -> str:
"""Get environment variable with default."""
return os.environ.get(var_name, default)
def generate_mprocs_config() -> str:
"""Generate mprocs YAML configuration based on environment variables."""
procs = {}
# Scheduler
scheduler_cmd = "airflow scheduler"
if get_env_bool("BREEZE_DEBUG_SCHEDULER"):
port = get_env("BREEZE_DEBUG_SCHEDULER_PORT", "5678")
scheduler_cmd = f"debugpy --listen 0.0.0.0:{port} --wait-for-client -m airflow scheduler"
procs["scheduler"] = {
"shell": scheduler_cmd,
"restart": "always",
"scrollback": 100000,
}
# API Server or Webserver (depending on Airflow version)
use_airflow_version = get_env("USE_AIRFLOW_VERSION", "")
if not use_airflow_version.startswith("2."):
# API Server (Airflow 3.x+)
if get_env_bool("BREEZE_DEBUG_APISERVER"):
port = get_env("BREEZE_DEBUG_APISERVER_PORT", "5679")
api_cmd = f"debugpy --listen 0.0.0.0:{port} --wait-for-client -m airflow api-server -d"
else:
dev_mode = get_env_bool("DEV_MODE")
api_cmd = "airflow api-server -d" if dev_mode else "airflow api-server"
procs["api_server"] = {
"shell": api_cmd,
"restart": "always",
"scrollback": 100000,
}
else:
# Webserver (Airflow 2.x)
if get_env_bool("BREEZE_DEBUG_WEBSERVER"):
port = get_env("BREEZE_DEBUG_WEBSERVER_PORT", "5680")
web_cmd = f"debugpy --listen 0.0.0.0:{port} --wait-for-client -m airflow webserver"
else:
dev_mode = get_env_bool("DEV_MODE")
web_cmd = "airflow webserver -d" if dev_mode else "airflow webserver"
procs["webserver"] = {
"shell": web_cmd,
"restart": "always",
"scrollback": 100000,
}
# Triggerer
triggerer_cmd = "airflow triggerer"
if get_env_bool("BREEZE_DEBUG_TRIGGERER"):
port = get_env("BREEZE_DEBUG_TRIGGERER_PORT", "5681")
triggerer_cmd = f"debugpy --listen 0.0.0.0:{port} --wait-for-client -m airflow triggerer"
procs["triggerer"] = {
"shell": triggerer_cmd,
"restart": "always",
"scrollback": 100000,
}
# Celery Worker (conditional)
if get_env_bool("INTEGRATION_CELERY"):
if get_env_bool("BREEZE_DEBUG_CELERY_WORKER"):
port = get_env("BREEZE_DEBUG_CELERY_WORKER_PORT", "5682")
celery_cmd = f"debugpy --listen 0.0.0.0:{port} --wait-for-client -m airflow celery worker"
else:
celery_cmd = "airflow celery worker"
procs["celery_worker"] = {
"shell": celery_cmd,
"restart": "always",
"scrollback": 100000,
}
# Flower (conditional)
if get_env_bool("INTEGRATION_CELERY") and get_env_bool("CELERY_FLOWER"):
if get_env_bool("BREEZE_DEBUG_FLOWER"):
port = get_env("BREEZE_DEBUG_FLOWER_PORT", "5683")
flower_cmd = f"debugpy --listen 0.0.0.0:{port} --wait-for-client -m airflow celery flower"
else:
flower_cmd = "airflow celery flower"
procs["flower"] = {
"shell": flower_cmd,
"restart": "always",
"scrollback": 100000,
}
# Edge Worker (conditional)
executor = get_env("AIRFLOW__CORE__EXECUTOR", "")
if executor == "airflow.providers.edge3.executors.edge_executor.EdgeExecutor":
if get_env_bool("BREEZE_DEBUG_EDGE"):
port = get_env("BREEZE_DEBUG_EDGE_PORT", "5684")
edge_cmd = f"debugpy --listen 0.0.0.0:{port} --wait-for-client -m airflow edge worker --edge-hostname breeze --queues default"
else:
# Build command with environment cleanup
edge_cmd_parts = [
"unset AIRFLOW__DATABASE__SQL_ALCHEMY_CONN || true",
"unset AIRFLOW__CELERY__RESULT_BACKEND || true",
"unset POSTGRES_HOST_PORT || true",
"unset BACKEND || true",
"unset POSTGRES_VERSION || true",
"export AIRFLOW__LOGGING__BASE_LOG_FOLDER=edge_logs",
"airflow edge worker --edge-hostname breeze --queues default",
]
edge_cmd = " && ".join(edge_cmd_parts)
procs["edge_worker"] = {
"shell": edge_cmd,
"restart": "always",
"scrollback": 100000,
}
# Dag Processor (conditional)
if get_env_bool("STANDALONE_DAG_PROCESSOR"):
if get_env_bool("BREEZE_DEBUG_DAG_PROCESSOR"):
port = get_env("BREEZE_DEBUG_DAG_PROCESSOR_PORT", "5685")
dag_proc_cmd = f"debugpy --listen 0.0.0.0:{port} --wait-for-client -m airflow dag-processor"
else:
dag_proc_cmd = "airflow dag-processor"
procs["dag_processor"] = {
"shell": dag_proc_cmd,
"restart": "always",
"scrollback": 100000,
}
procs["shell"] = {
"shell": "bash",
"restart": "always",
"scrollback": 100000,
}
# Generate YAML output
config_dict = {"procs": procs}
return yaml.dump(config_dict, default_flow_style=False, sort_keys=False)
def main():
# Set LocalExecutor if not set and backend is not sqlite
backend = get_env("BACKEND", "")
if backend != "sqlite" and not get_env("AIRFLOW__CORE__EXECUTOR"):
os.environ["AIRFLOW__CORE__EXECUTOR"] = "LocalExecutor"
# Generate and print configuration
config = generate_mprocs_config()
# Determine output path
if len(sys.argv) > 1:
output_path = sys.argv[1]
else:
temp_dir = tempfile.gettempdir()
output_path = os.path.join(temp_dir, "mprocs.yaml")
with open(output_path, "w") as f:
f.write(config)
if os.environ.get("VERBOSE", "false") == "true":
# Use rich console for pretty output
console = Console()
console.print(
f"\n[bold green]✓[/bold green] Generated mprocs configuration at: [cyan]{output_path}[/cyan]"
)
# Display configuration with syntax highlighting
syntax = Syntax(config, "yaml", theme="monokai", line_numbers=False)
panel = Panel(
syntax,
title="[bold yellow]Configuration Preview[/bold yellow]",
border_style="blue",
expand=False,
)
console.print(panel)
if __name__ == "__main__":
main()
| {
"repo_id": "apache/airflow",
"file_path": "scripts/in_container/bin/generate_mprocs_config.py",
"license": "Apache License 2.0",
"lines": 189,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:task-sdk/src/airflow/sdk/definitions/_internal/logging_mixin.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING, Any, TypeVar
import structlog
if TYPE_CHECKING:
from airflow.sdk.types import Logger
_T = TypeVar("_T")
class LoggingMixin:
"""Convenience super-class to have a logger configured with the class name."""
_log: Logger | None = None
# Parent logger used by this class. It should match one of the loggers defined in the
# `logging_config_class`. By default, this attribute is used to create the final name of the logger, and
# will prefix the `_logger_name` with a separating dot.
_log_config_logger_name: str | None = None
_logger_name: str | None = None
def __init__(self, context=None):
self._set_context(context)
super().__init__()
@staticmethod
def _create_logger_name(
logged_class: type[_T],
log_config_logger_name: str | None = None,
class_logger_name: str | None = None,
) -> str:
"""
Generate a logger name for the given `logged_class`.
By default, this function returns the `class_logger_name` as logger name. If it is not provided,
the {class.__module__}.{class.__name__} is returned instead. When a `parent_logger_name` is provided,
it will prefix the logger name with a separating dot.
"""
logger_name: str = (
class_logger_name
if class_logger_name is not None
else f"{logged_class.__module__}.{logged_class.__name__}"
)
if log_config_logger_name:
return f"{log_config_logger_name}.{logger_name}" if logger_name else log_config_logger_name
return logger_name
@classmethod
def _get_log(cls, obj: Any, clazz: type[_T]) -> Logger:
if obj._log is None:
logger_name: str = cls._create_logger_name(
logged_class=clazz,
log_config_logger_name=obj._log_config_logger_name,
class_logger_name=obj._logger_name,
)
obj._log = structlog.get_logger(logger_name)
return obj._log
@classmethod
def logger(cls) -> Logger:
"""Return a logger."""
return LoggingMixin._get_log(cls, cls)
@property
def log(self) -> Logger:
"""Return a logger."""
return LoggingMixin._get_log(self, self.__class__)
def _set_context(self, context): ...
| {
"repo_id": "apache/airflow",
"file_path": "task-sdk/src/airflow/sdk/definitions/_internal/logging_mixin.py",
"license": "Apache License 2.0",
"lines": 72,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:dev/breeze/src/airflow_breeze/commands/ui_commands_config.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
UI_COMMANDS: dict[str, str | list[str]] = {
"name": "UI commands",
"commands": [
"check-translation-completeness",
"compile-assets",
],
}
UI_PARAMETERS: dict[str, list[dict[str, str | list[str]]]] = {
"breeze ui check-translation-completeness": [
{
"name": "Translation options",
"options": [
"--language",
"--add-missing",
"--remove-extra",
],
},
],
"breeze ui compile-assets": [
{
"name": "Compile ui assets flags",
"options": [
"--dev",
"--force-clean",
],
}
],
}
| {
"repo_id": "apache/airflow",
"file_path": "dev/breeze/src/airflow_breeze/commands/ui_commands_config.py",
"license": "Apache License 2.0",
"lines": 45,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:dev/breeze/tests/test_ui_commands.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import json
from airflow_breeze.commands.ui_commands import (
LocaleFiles,
LocaleKeySet,
LocaleSummary,
compare_keys,
expand_plural_keys,
flatten_keys,
get_plural_base,
)
class TestPluralHandling:
def test_get_plural_base_with_suffix(self):
suffixes = ["_one", "_other"]
assert get_plural_base("message_one", suffixes) == "message"
assert get_plural_base("message_other", suffixes) == "message"
def test_get_plural_base_without_suffix(self):
suffixes = ["_one", "_other"]
assert get_plural_base("message", suffixes) is None
def test_get_plural_base_with_complex_suffixes(self):
suffixes = ["_zero", "_one", "_two", "_few", "_many", "_other"]
assert get_plural_base("item_zero", suffixes) == "item"
assert get_plural_base("item_many", suffixes) == "item"
def test_expand_plural_keys_english(self):
keys = {"message_one", "message_other", "simple"}
expanded = expand_plural_keys(keys, "en")
# Should include both _one and _other forms for "message"
assert "message_one" in expanded
assert "message_other" in expanded
assert "simple" in expanded
def test_expand_plural_keys_polish(self):
keys = {"message_one"}
expanded = expand_plural_keys(keys, "pl")
# Polish has 4 forms: _one, _few, _many, _other
assert "message_one" in expanded
assert "message_few" in expanded
assert "message_many" in expanded
assert "message_other" in expanded
class TestFlattenKeys:
def test_flatten_simple_dict(self):
data = {"key1": "value1", "key2": "value2"}
keys = flatten_keys(data)
assert set(keys) == {"key1", "key2"}
def test_flatten_nested_dict(self):
data = {"parent": {"child1": "value1", "child2": "value2"}}
keys = flatten_keys(data)
assert set(keys) == {"parent.child1", "parent.child2"}
def test_flatten_deeply_nested_dict(self):
data = {"level1": {"level2": {"level3": "value"}}}
keys = flatten_keys(data)
assert keys == ["level1.level2.level3"]
def test_flatten_mixed_dict(self):
data = {"simple": "value", "nested": {"key": "value2"}}
keys = flatten_keys(data)
assert set(keys) == {"simple", "nested.key"}
class TestCompareKeys:
def test_compare_keys_identical(self, tmp_path):
# Create temporary locale files
en_dir = tmp_path / "en"
en_dir.mkdir()
de_dir = tmp_path / "de"
de_dir.mkdir()
test_data = {"greeting": "Hello", "farewell": "Goodbye"}
(en_dir / "test.json").write_text(json.dumps(test_data))
(de_dir / "test.json").write_text(json.dumps(test_data))
# Mock LOCALES_DIR temporarily
import airflow_breeze.commands.ui_commands as ui_commands
original_locales_dir = ui_commands.LOCALES_DIR
ui_commands.LOCALES_DIR = tmp_path
try:
locale_files = [
LocaleFiles(locale="en", files=["test.json"]),
LocaleFiles(locale="de", files=["test.json"]),
]
summary, missing_counts = compare_keys(locale_files)
assert "test.json" in summary
assert summary["test.json"].missing_keys.get("de", []) == []
assert summary["test.json"].extra_keys.get("de", []) == []
finally:
ui_commands.LOCALES_DIR = original_locales_dir
def test_compare_keys_with_missing(self, tmp_path):
en_dir = tmp_path / "en"
en_dir.mkdir()
de_dir = tmp_path / "de"
de_dir.mkdir()
en_data = {"greeting": "Hello", "farewell": "Goodbye"}
de_data = {"greeting": "Hallo"}
(en_dir / "test.json").write_text(json.dumps(en_data))
(de_dir / "test.json").write_text(json.dumps(de_data))
import airflow_breeze.commands.ui_commands as ui_commands
original_locales_dir = ui_commands.LOCALES_DIR
ui_commands.LOCALES_DIR = tmp_path
try:
locale_files = [
LocaleFiles(locale="en", files=["test.json"]),
LocaleFiles(locale="de", files=["test.json"]),
]
summary, missing_counts = compare_keys(locale_files)
assert "test.json" in summary
assert "farewell" in summary["test.json"].missing_keys.get("de", [])
assert missing_counts["test.json"]["de"] == 1
finally:
ui_commands.LOCALES_DIR = original_locales_dir
def test_compare_keys_with_extra(self, tmp_path):
en_dir = tmp_path / "en"
en_dir.mkdir()
de_dir = tmp_path / "de"
de_dir.mkdir()
en_data = {"greeting": "Hello"}
de_data = {"greeting": "Hallo", "extra": "Extra"}
(en_dir / "test.json").write_text(json.dumps(en_data))
(de_dir / "test.json").write_text(json.dumps(de_data))
import airflow_breeze.commands.ui_commands as ui_commands
original_locales_dir = ui_commands.LOCALES_DIR
ui_commands.LOCALES_DIR = tmp_path
try:
locale_files = [
LocaleFiles(locale="en", files=["test.json"]),
LocaleFiles(locale="de", files=["test.json"]),
]
summary, missing_counts = compare_keys(locale_files)
assert "test.json" in summary
assert "extra" in summary["test.json"].extra_keys.get("de", [])
finally:
ui_commands.LOCALES_DIR = original_locales_dir
class TestLocaleSummary:
def test_locale_summary_creation(self):
summary = LocaleSummary(missing_keys={"de": ["key1", "key2"]}, extra_keys={"de": ["key3"]})
assert summary.missing_keys == {"de": ["key1", "key2"]}
assert summary.extra_keys == {"de": ["key3"]}
class TestLocaleFiles:
def test_locale_files_creation(self):
lf = LocaleFiles(locale="en", files=["test.json", "common.json"])
assert lf.locale == "en"
assert len(lf.files) == 2
class TestLocaleKeySet:
def test_locale_key_set_with_keys(self):
lks = LocaleKeySet(locale="en", keys={"key1", "key2"})
assert lks.locale == "en"
assert lks.keys == {"key1", "key2"}
def test_locale_key_set_without_keys(self):
lks = LocaleKeySet(locale="de", keys=None)
assert lks.locale == "de"
assert lks.keys is None
class TestCountTodos:
def test_count_todos_in_string(self):
from airflow_breeze.commands.ui_commands import count_todos
assert count_todos("TODO: translate: Hello") == 1
assert count_todos("Hello") == 0
def test_count_todos_in_dict(self):
from airflow_breeze.commands.ui_commands import count_todos
data = {
"key1": "TODO: translate: Hello",
"key2": "Already translated",
"key3": "TODO: translate: Goodbye",
}
assert count_todos(data) == 2
def test_count_todos_nested(self):
from airflow_breeze.commands.ui_commands import count_todos
data = {
"parent": {
"child1": "TODO: translate: Hello",
"child2": "TODO: translate: World",
},
"simple": "No TODO",
}
assert count_todos(data) == 2
class TestAddMissingTranslations:
def test_add_missing_translations(self, tmp_path):
from airflow_breeze.commands.ui_commands import add_missing_translations
en_dir = tmp_path / "en"
en_dir.mkdir()
de_dir = tmp_path / "de"
de_dir.mkdir()
en_data = {"greeting": "Hello", "farewell": "Goodbye"}
de_data = {"greeting": "Hallo"}
(en_dir / "test.json").write_text(json.dumps(en_data))
(de_dir / "test.json").write_text(json.dumps(de_data))
import airflow_breeze.commands.ui_commands as ui_commands
original_locales_dir = ui_commands.LOCALES_DIR
ui_commands.LOCALES_DIR = tmp_path
try:
summary = LocaleSummary(
missing_keys={"de": ["farewell"]},
extra_keys={"de": []},
)
add_missing_translations("de", {"test.json": summary})
# Check that the file was updated
de_data_updated = json.loads((de_dir / "test.json").read_text())
assert "farewell" in de_data_updated
assert de_data_updated["farewell"].startswith("TODO: translate:")
finally:
ui_commands.LOCALES_DIR = original_locales_dir
class TestRemoveExtraTranslations:
def test_remove_extra_translations(self, tmp_path):
from airflow_breeze.commands.ui_commands import remove_extra_translations
de_dir = tmp_path / "de"
de_dir.mkdir()
de_data = {"greeting": "Hallo", "extra": "Extra Key"}
(de_dir / "test.json").write_text(json.dumps(de_data))
import airflow_breeze.commands.ui_commands as ui_commands
original_locales_dir = ui_commands.LOCALES_DIR
ui_commands.LOCALES_DIR = tmp_path
try:
summary = LocaleSummary(
missing_keys={"de": []},
extra_keys={"de": ["extra"]},
)
remove_extra_translations("de", {"test.json": summary})
# Check that the extra key was removed
de_data_updated = json.loads((de_dir / "test.json").read_text())
assert "extra" not in de_data_updated
assert "greeting" in de_data_updated
finally:
ui_commands.LOCALES_DIR = original_locales_dir
class TestNaturalSorting:
def test_natural_sort_matches_eslint(self, tmp_path):
"""Test that keys are sorted like eslint-plugin-jsonc with natural: true (case-insensitive with case-sensitive tiebreaker)."""
from airflow_breeze.commands.ui_commands import add_missing_translations
en_dir = tmp_path / "en"
en_dir.mkdir()
de_dir = tmp_path / "de"
de_dir.mkdir()
# Create English data with mixed-case keys to test sorting behavior
# This tests the specific cases that differ between ASCII and natural sort
en_data = {
"assetEvent_few": "1",
"asset_few": "2",
"parseDuration": "3",
"parsedAt": "4",
"Zebra": "5",
"apple": "6",
}
de_data = {}
(en_dir / "test.json").write_text(json.dumps(en_data))
(de_dir / "test.json").write_text(json.dumps(de_data))
import airflow_breeze.commands.ui_commands as ui_commands
original_locales_dir = ui_commands.LOCALES_DIR
ui_commands.LOCALES_DIR = tmp_path
try:
summary = LocaleSummary(
missing_keys={"de": list(en_data.keys())},
extra_keys={"de": []},
)
add_missing_translations("de", {"test.json": summary})
# Check that keys are sorted using natural sort (case-insensitive with case-sensitive tiebreaker)
de_data_updated = json.loads((de_dir / "test.json").read_text())
keys = list(de_data_updated.keys())
# Expected order matches eslint-plugin-jsonc with natural: true:
# - "apple" < "asset_few" < "assetEvent_few" (case-insensitive: apple < asset < assetevent)
# - "parseDuration" < "parsedAt" (case-insensitive equal at "parse", then D < d at position 5)
# - "Zebra" at the end (case-insensitive: z comes last)
assert keys == ["apple", "asset_few", "assetEvent_few", "parseDuration", "parsedAt", "Zebra"]
finally:
ui_commands.LOCALES_DIR = original_locales_dir
| {
"repo_id": "apache/airflow",
"file_path": "dev/breeze/tests/test_ui_commands.py",
"license": "Apache License 2.0",
"lines": 271,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/teradata/src/airflow/providers/teradata/hooks/tpt.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import logging
import os
import shutil
import socket
import subprocess
import tempfile
import uuid
from collections.abc import Generator
from contextlib import contextmanager
from paramiko import SSHException
from airflow.providers.ssh.hooks.ssh import SSHHook
from airflow.providers.teradata.hooks.ttu import TtuHook
from airflow.providers.teradata.utils.encryption_utils import (
generate_encrypted_file_with_openssl,
generate_random_password,
)
from airflow.providers.teradata.utils.tpt_util import (
decrypt_remote_file,
execute_remote_command,
remote_secure_delete,
secure_delete,
set_local_file_permissions,
set_remote_file_permissions,
terminate_subprocess,
transfer_file_sftp,
verify_tpt_utility_on_remote_host,
write_file,
)
class TptHook(TtuHook):
"""
Hook for executing Teradata Parallel Transporter (TPT) operations.
This hook provides methods to execute TPT operations both locally and remotely via SSH.
It supports DDL operations using tbuild utility. and data loading operations using tdload.
It extends the `TtuHook` and integrates with Airflow's SSHHook for remote execution.
The TPT operations are used to interact with Teradata databases for DDL operations
such as creating, altering, or dropping tables and high-performance data loading and
DDL operations.
Features:
- Supports both local and remote execution of TPT operations.
- Secure file encryption for remote transfers.
- Comprehensive error handling and logging.
- Resource cleanup and management.
.. seealso::
- :ref:`hook API connection <howto/connection:teradata>`
:param ssh_conn_id: SSH connection ID for remote execution. If None, executes locally.
"""
def __init__(self, ssh_conn_id: str | None = None, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
self.ssh_conn_id = ssh_conn_id
self.ssh_hook = SSHHook(ssh_conn_id=ssh_conn_id) if ssh_conn_id else None
def execute_ddl(
self,
tpt_script: str | list[str],
remote_working_dir: str,
) -> int:
"""
Execute a DDL statement using TPT.
Args:
tpt_script: TPT script content as string or list of strings
remote_working_dir: Remote working directory for SSH execution
Returns:
Exit code from the TPT operation
Raises:
ValueError: If tpt_script is empty or invalid
RuntimeError: Non-zero tbuild exit status or unexpected execution failure
ConnectionError: SSH connection not established or fails
TimeoutError: SSH connection/network timeout
FileNotFoundError: tbuild binary not found in PATH
"""
if not tpt_script:
raise ValueError("TPT script must not be empty.")
tpt_script_content = "\n".join(tpt_script) if isinstance(tpt_script, list) else tpt_script
# Validate script content
if not tpt_script_content.strip():
raise ValueError("TPT script content must not be empty after processing.")
if self.ssh_hook:
self.log.info("Executing DDL statements via SSH on remote host")
return self._execute_tbuild_via_ssh(tpt_script_content, remote_working_dir)
self.log.info("Executing DDL statements locally")
return self._execute_tbuild_locally(tpt_script_content)
def _execute_tbuild_via_ssh(
self,
tpt_script_content: str,
remote_working_dir: str,
) -> int:
"""Execute tbuild command via SSH."""
with self.preferred_temp_directory() as tmp_dir:
local_script_file = os.path.join(tmp_dir, f"tbuild_script_{uuid.uuid4().hex}.sql")
write_file(local_script_file, tpt_script_content)
encrypted_file_path = f"{local_script_file}.enc"
remote_encrypted_script_file = os.path.join(
remote_working_dir, os.path.basename(encrypted_file_path)
)
remote_script_file = os.path.join(remote_working_dir, os.path.basename(local_script_file))
job_name = f"tbuild_job_{uuid.uuid4().hex}"
try:
if not self.ssh_hook:
raise ConnectionError("SSH connection is not established. `ssh_hook` is None or invalid.")
with self.ssh_hook.get_conn() as ssh_client:
verify_tpt_utility_on_remote_host(ssh_client, "tbuild", logging.getLogger(__name__))
password = generate_random_password()
generate_encrypted_file_with_openssl(local_script_file, password, encrypted_file_path)
transfer_file_sftp(
ssh_client,
encrypted_file_path,
remote_encrypted_script_file,
logging.getLogger(__name__),
)
decrypt_remote_file(
ssh_client,
remote_encrypted_script_file,
remote_script_file,
password,
logging.getLogger(__name__),
)
set_remote_file_permissions(ssh_client, remote_script_file, logging.getLogger(__name__))
tbuild_cmd = ["tbuild", "-f", remote_script_file, job_name]
self.log.info("Executing tbuild command on remote server: %s", " ".join(tbuild_cmd))
exit_status, output, error = execute_remote_command(ssh_client, " ".join(tbuild_cmd))
self.log.info("tbuild command output:\n%s", output)
self.log.info("tbuild command exited with status %s", exit_status)
# Clean up remote files before checking exit status
remote_secure_delete(
ssh_client,
[remote_encrypted_script_file, remote_script_file],
logging.getLogger(__name__),
)
if exit_status != 0:
raise RuntimeError(f"tbuild command failed with exit code {exit_status}: {error}")
return exit_status
except ConnectionError:
# Re-raise ConnectionError as-is (don't convert to TimeoutError)
raise
except (OSError, socket.gaierror) as e:
self.log.error("SSH connection timed out: %s", str(e))
raise TimeoutError(
"SSH connection timed out. Please check the network or server availability."
) from e
except SSHException as e:
raise ConnectionError(f"SSH error during connection: {str(e)}") from e
except RuntimeError:
raise
except Exception as e:
raise RuntimeError(
f"Unexpected error while executing tbuild script on remote machine: {str(e)}"
) from e
finally:
# Clean up local files
secure_delete(encrypted_file_path, logging.getLogger(__name__))
secure_delete(local_script_file, logging.getLogger(__name__))
def _execute_tbuild_locally(
self,
tpt_script_content: str,
) -> int:
"""Execute tbuild command locally."""
with self.preferred_temp_directory() as tmp_dir:
local_script_file = os.path.join(tmp_dir, f"tbuild_script_{uuid.uuid4().hex}.sql")
write_file(local_script_file, tpt_script_content)
# Set file permission to read-only for the current user (no permissions for group/others)
set_local_file_permissions(local_script_file, logging.getLogger(__name__))
job_name = f"tbuild_job_{uuid.uuid4().hex}"
tbuild_cmd = ["tbuild", "-f", local_script_file, job_name]
if not shutil.which("tbuild"):
raise FileNotFoundError("tbuild binary not found in PATH.")
sp = None
try:
self.log.info("Executing tbuild command: %s", " ".join(tbuild_cmd))
sp = subprocess.Popen(
tbuild_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, start_new_session=True
)
error_lines = []
if sp.stdout is not None:
for line in iter(sp.stdout.readline, b""):
decoded_line = line.decode("UTF-8").strip()
self.log.info(decoded_line)
if "error" in decoded_line.lower():
error_lines.append(decoded_line)
sp.wait()
self.log.info("tbuild command exited with return code %s", sp.returncode)
if sp.returncode != 0:
error_msg = "\n".join(error_lines) if error_lines else "Unknown error"
raise RuntimeError(f"tbuild command failed with return code {sp.returncode}: {error_msg}")
return sp.returncode
except RuntimeError:
raise
except Exception as e:
self.log.error("Error executing tbuild command: %s", str(e))
raise RuntimeError(f"Error executing tbuild command: {str(e)}") from e
finally:
secure_delete(local_script_file, logging.getLogger(__name__))
terminate_subprocess(sp, logging.getLogger(__name__))
def execute_tdload(
self,
remote_working_dir: str,
job_var_content: str | None = None,
tdload_options: str | None = None,
tdload_job_name: str | None = None,
) -> int:
"""
Execute a tdload operation using the tdload command-line utility.
Args:
remote_working_dir: Remote working directory for SSH execution
job_var_content: Content of the job variable file
tdload_options: Additional command-line options for tdload
tdload_job_name: Name for the tdload job
Returns:
Exit code from the tdload operation
Raises:
RuntimeError: Non-zero tdload exit status or unexpected execution failure
ConnectionError: SSH connection not established or fails
TimeoutError: SSH connection/network timeout
FileNotFoundError: tdload binary not found in PATH
"""
tdload_job_name = tdload_job_name or f"tdload_job_{uuid.uuid4().hex}"
if self.ssh_hook:
self.log.info("Executing tdload via SSH on remote host with job name: %s", tdload_job_name)
return self._execute_tdload_via_ssh(
remote_working_dir, job_var_content, tdload_options, tdload_job_name
)
self.log.info("Executing tdload locally with job name: %s", tdload_job_name)
return self._execute_tdload_locally(job_var_content, tdload_options, tdload_job_name)
def _execute_tdload_via_ssh(
self,
remote_working_dir: str,
job_var_content: str | None,
tdload_options: str | None,
tdload_job_name: str | None,
) -> int:
"""
Write job_var_content to a temporary file, then transfer and execute it on the remote host.
Args:
remote_working_dir: Remote working directory
job_var_content: Content for the job variable file
tdload_options: Additional tdload command options
tdload_job_name: Name for the tdload job
Returns:
Exit code from the tdload operation
"""
with self.preferred_temp_directory() as tmp_dir:
local_job_var_file = os.path.join(tmp_dir, f"tdload_job_var_{uuid.uuid4().hex}.txt")
write_file(local_job_var_file, job_var_content or "")
return self._transfer_to_and_execute_tdload_on_remote(
local_job_var_file, remote_working_dir, tdload_options, tdload_job_name
)
def _transfer_to_and_execute_tdload_on_remote(
self,
local_job_var_file: str,
remote_working_dir: str,
tdload_options: str | None,
tdload_job_name: str | None,
) -> int:
"""Transfer job variable file to remote host and execute tdload command."""
encrypted_file_path = f"{local_job_var_file}.enc"
remote_encrypted_job_file = os.path.join(remote_working_dir, os.path.basename(encrypted_file_path))
remote_job_file = os.path.join(remote_working_dir, os.path.basename(local_job_var_file))
try:
if not self.ssh_hook:
raise ConnectionError("SSH connection is not established. `ssh_hook` is None or invalid.")
with self.ssh_hook.get_conn() as ssh_client:
verify_tpt_utility_on_remote_host(ssh_client, "tdload", logging.getLogger(__name__))
password = generate_random_password()
generate_encrypted_file_with_openssl(local_job_var_file, password, encrypted_file_path)
transfer_file_sftp(
ssh_client, encrypted_file_path, remote_encrypted_job_file, logging.getLogger(__name__)
)
decrypt_remote_file(
ssh_client,
remote_encrypted_job_file,
remote_job_file,
password,
logging.getLogger(__name__),
)
set_remote_file_permissions(ssh_client, remote_job_file, logging.getLogger(__name__))
# Build tdload command more robustly
tdload_cmd = self._build_tdload_command(remote_job_file, tdload_options, tdload_job_name)
self.log.info("Executing tdload command on remote server: %s", " ".join(tdload_cmd))
exit_status, output, error = execute_remote_command(ssh_client, " ".join(tdload_cmd))
self.log.info("tdload command output:\n%s", output)
self.log.info("tdload command exited with status %s", exit_status)
# Clean up remote files before checking exit status
remote_secure_delete(
ssh_client, [remote_encrypted_job_file, remote_job_file], logging.getLogger(__name__)
)
if exit_status != 0:
raise RuntimeError(f"tdload command failed with exit code {exit_status}: {error}")
return exit_status
except ConnectionError:
# Re-raise ConnectionError as-is (don't convert to TimeoutError)
raise
except (OSError, socket.gaierror) as e:
self.log.error("SSH connection timed out: %s", str(e))
raise TimeoutError(
"SSH connection timed out. Please check the network or server availability."
) from e
except SSHException as e:
raise ConnectionError(f"SSH error during connection: {str(e)}") from e
except RuntimeError:
raise
except Exception as e:
raise RuntimeError(
f"Unexpected error while executing tdload script on remote machine: {str(e)}"
) from e
finally:
# Clean up local files
secure_delete(encrypted_file_path, logging.getLogger(__name__))
secure_delete(local_job_var_file, logging.getLogger(__name__))
def _execute_tdload_locally(
self,
job_var_content: str | None,
tdload_options: str | None,
tdload_job_name: str | None,
) -> int:
"""
Execute tdload command locally.
Args:
job_var_content: Content for the job variable file
tdload_options: Additional tdload command options
tdload_job_name: Name for the tdload job
Returns:
Exit code from the tdload operation
"""
with self.preferred_temp_directory() as tmp_dir:
local_job_var_file = os.path.join(tmp_dir, f"tdload_job_var_{uuid.uuid4().hex}.txt")
write_file(local_job_var_file, job_var_content or "")
# Set file permission to read-only for the current user (no permissions for group/others)
set_local_file_permissions(local_job_var_file, logging.getLogger(__name__))
# Log file permissions for debugging purposes
file_permissions = oct(os.stat(local_job_var_file).st_mode & 0o777)
self.log.debug("Local job variable file permissions: %s", file_permissions)
# Build tdload command
tdload_cmd = self._build_tdload_command(local_job_var_file, tdload_options, tdload_job_name)
if not shutil.which("tdload"):
raise FileNotFoundError("tdload binary not found in PATH.")
sp = None
try:
# Print a visual separator for clarity in logs
self.log.info("Executing tdload command: %s", " ".join(tdload_cmd))
sp = subprocess.Popen(
tdload_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, start_new_session=True
)
error_lines = []
if sp.stdout is not None:
for line in iter(sp.stdout.readline, b""):
decoded_line = line.decode("UTF-8").strip()
self.log.info(decoded_line)
if "error" in decoded_line.lower():
error_lines.append(decoded_line)
sp.wait()
self.log.info("tdload command exited with return code %s", sp.returncode)
if sp.returncode != 0:
error_msg = "\n".join(error_lines) if error_lines else ""
if error_msg:
raise RuntimeError(
f"tdload command failed with return code {sp.returncode}:\n{error_msg}"
)
raise RuntimeError(f"tdload command failed with return code {sp.returncode}")
return sp.returncode
except RuntimeError:
raise
except Exception as e:
self.log.error("Error executing tdload command: %s", str(e))
raise RuntimeError(f"Error executing tdload command: {str(e)}") from e
finally:
secure_delete(local_job_var_file, logging.getLogger(__name__))
terminate_subprocess(sp, logging.getLogger(__name__))
def _build_tdload_command(
self, job_var_file: str, tdload_options: str | None, tdload_job_name: str | None
) -> list[str]:
"""
Build the tdload command with proper option handling.
Args:
job_var_file: Path to the job variable file
tdload_options: Additional tdload options as a space-separated string
tdload_job_name: Name for the tdload job
Returns:
List of command arguments for tdload
"""
tdload_cmd = ["tdload", "-j", job_var_file]
# Add tdload_options if provided, with proper handling of quoted options
if tdload_options:
# Split options while preserving quoted arguments
import shlex
try:
parsed_options = shlex.split(tdload_options)
tdload_cmd.extend(parsed_options)
except ValueError as e:
self.log.warning(
"Failed to parse tdload_options using shlex, falling back to simple split: %s", str(e)
)
# Fallback to simple split if shlex parsing fails
tdload_cmd.extend(tdload_options.split())
# Add job name if provided (and not empty)
if tdload_job_name:
tdload_cmd.append(tdload_job_name)
return tdload_cmd
def on_kill(self) -> None:
"""
Handle cleanup when the task is killed.
This method is called when Airflow needs to terminate the hook,
typically during task cancellation or shutdown.
"""
self.log.info("TPT Hook cleanup initiated")
# Note: SSH connections are managed by context managers and will be cleaned up automatically
# Subprocesses are handled by terminate_subprocess in the finally blocks
# This method is available for future enhancements if needed
@contextmanager
def preferred_temp_directory(self, prefix: str = "tpt_") -> Generator[str, None, None]:
try:
temp_dir = tempfile.gettempdir()
if not os.path.isdir(temp_dir) or not os.access(temp_dir, os.W_OK):
raise OSError("OS temp dir not usable")
except Exception:
temp_dir = self.get_airflow_home_dir()
with tempfile.TemporaryDirectory(dir=temp_dir, prefix=prefix) as tmp:
yield tmp
def get_airflow_home_dir(self) -> str:
"""Return the Airflow home directory."""
return os.environ.get("AIRFLOW_HOME", os.path.expanduser("~/airflow"))
| {
"repo_id": "apache/airflow",
"file_path": "providers/teradata/src/airflow/providers/teradata/hooks/tpt.py",
"license": "Apache License 2.0",
"lines": 435,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/teradata/src/airflow/providers/teradata/operators/tpt.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import logging
from typing import TYPE_CHECKING
from airflow.models import BaseOperator
from airflow.providers.ssh.hooks.ssh import SSHHook
from airflow.providers.teradata.hooks.teradata import TeradataHook
from airflow.providers.teradata.hooks.tpt import TptHook
from airflow.providers.teradata.utils.tpt_util import (
get_remote_temp_directory,
is_valid_file,
is_valid_remote_job_var_file,
prepare_tdload_job_var_file,
prepare_tpt_ddl_script,
read_file,
)
if TYPE_CHECKING:
from paramiko import SSHClient
from airflow.sdk import Context
class DdlOperator(BaseOperator):
"""
Operator to execute one or more DDL (Data Definition Language) statements on a Teradata Database.
This operator is designed to facilitate DDL operations such as creating, altering, or dropping tables, indexes, views, or other database objects in a scalable and efficient manner.
It leverages the TPT (Teradata Parallel Transporter) utility to perform the operations and supports templating for SQL statements, allowing dynamic generation of SQL at runtime.
Key Features:
- Executes one or more DDL statements sequentially on Teradata using TPT
- Supports error handling with customizable error code list
- Supports XCom push to share execution results with downstream tasks
- Integrates with Airflow's templating engine for dynamic SQL generation
- Can execute statements via SSH connection if needed
:param ddl: A list of DDL statements to be executed. Each item should be a valid SQL
DDL command supported by Teradata.
:param error_list: Optional integer or list of error codes to ignore during execution.
If provided, the operator will not fail when these specific error codes occur.
Example: error_list=3803 or error_list=[3803, 3807]
:param teradata_conn_id: The connection ID for the Teradata database.
Defaults to TeradataHook.default_conn_name.
:param ssh_conn_id: Optional SSH connection ID if the commands need to be executed through SSH.
:param remote_working_dir: Directory on the remote server where temporary files will be stored.
:param ddl_job_name: Optional name for the DDL job.
:raises ValueError: If the ddl parameter or error_list is invalid.
:raises RuntimeError: If underlying TPT execution (tbuild) fails with non-zero exit status.
:raises ConnectionError: If remote SSH connection cannot be established.
:raises TimeoutError: If SSH connection attempt times out.
:raises FileNotFoundError: If required TPT utility (tbuild) is missing locally or on remote host.
Example usage::
# Example of creating tables using DdlOperator
create_tables = DdlOperator(
task_id="create_tables_task",
ddl=[
"CREATE TABLE my_database.my_table1 (id INT, name VARCHAR(100))",
"CREATE TABLE my_database.my_table2 (id INT, value FLOAT)",
],
teradata_conn_id="my_teradata_conn",
error_list=[3803], # Ignore "Table already exists" errors
ddl_job_name="create_tables_job",
)
# Example of dropping tables using DdlOperator
drop_tables = DdlOperator(
task_id="drop_tables_task",
ddl=["DROP TABLE my_database.my_table1", "DROP TABLE my_database.my_table2"],
teradata_conn_id="my_teradata_conn",
error_list=3807, # Ignore "Object does not exist" errors
ddl_job_name="drop_tables_job",
)
# Example using templated SQL file
alter_table = DdlOperator(
task_id="alter_table_task",
ddl="{{ var.value.get('ddl_directory') }}/alter_table.sql",
teradata_conn_id="my_teradata_conn",
ssh_conn_id="my_ssh_conn",
ddl_job_name="alter_table_job",
)
"""
template_fields = ("ddl", "ddl_job_name")
template_ext = (".sql",)
ui_color = "#a8e4b1"
def __init__(
self,
*,
ddl: list[str],
error_list: int | list[int] | None = None,
teradata_conn_id: str = TeradataHook.default_conn_name,
ssh_conn_id: str | None = None,
remote_working_dir: str | None = None,
ddl_job_name: str | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.ddl = ddl
self.error_list = error_list
self.teradata_conn_id = teradata_conn_id
self.ssh_conn_id = ssh_conn_id
self.remote_working_dir = remote_working_dir
self.ddl_job_name = ddl_job_name
self._hook: TptHook | None = None
self._ssh_hook: SSHHook | None = None
def execute(self, context: Context) -> int | None:
"""Execute the DDL operations using the TptHook."""
# Validate the ddl parameter
if (
not self.ddl
or not isinstance(self.ddl, list)
or not all(isinstance(stmt, str) and stmt.strip() for stmt in self.ddl)
):
raise ValueError(
"ddl parameter must be a non-empty list of non-empty strings representing DDL statements."
)
# Normalize error_list to a list of ints
normalized_error_list = self._normalize_error_list(self.error_list)
self.log.info("Initializing Teradata connection using teradata_conn_id: %s", self.teradata_conn_id)
self._hook = TptHook(teradata_conn_id=self.teradata_conn_id, ssh_conn_id=self.ssh_conn_id)
self._ssh_hook = SSHHook(ssh_conn_id=self.ssh_conn_id) if self.ssh_conn_id else None
try:
# Prepare TPT script for DDL execution
tpt_ddl_script = prepare_tpt_ddl_script(
sql=self.ddl,
error_list=normalized_error_list,
source_conn=self._hook.get_conn(),
job_name=self.ddl_job_name,
)
# Set remote working directory if SSH is used
if self._ssh_hook and not self.remote_working_dir:
self.remote_working_dir = get_remote_temp_directory(
self._ssh_hook.get_conn(), logging.getLogger(__name__)
)
# Ensure remote_working_dir has a value even for local execution
if not self.remote_working_dir:
self.remote_working_dir = "/tmp"
return self._hook.execute_ddl(
tpt_ddl_script,
self.remote_working_dir,
)
except Exception as e:
self.log.error("Failed to execute DDL operations: %s", str(e))
raise
def _normalize_error_list(self, error_list: int | list[int] | None) -> list[int]:
"""
Normalize error_list parameter to a list of integers.
Args:
error_list: An integer, list of integers, or None
Returns:
A list of integers representing error codes to ignore
Raises:
ValueError: If error_list is not of the expected type
"""
if error_list is None:
return []
if isinstance(error_list, int):
return [error_list]
if isinstance(error_list, list) and all(isinstance(err, int) for err in error_list):
return error_list
raise ValueError(
f"error_list must be an int or a list of ints, got {type(error_list).__name__}. "
"Example: error_list=3803 or error_list=[3803, 3807]"
)
def on_kill(self):
"""Handle termination signals and ensure the hook is properly cleaned up."""
self.log.info("Cleaning up TPT DDL connections on task kill")
if self._hook:
try:
self._hook.on_kill()
self.log.info("TPT DDL hook cleaned up successfully")
except Exception as e:
self.log.error("Error cleaning up TPT DDL hook: %s", str(e))
else:
self.log.warning("No TptHook initialized to clean up on task kill")
class TdLoadOperator(BaseOperator):
"""
Operator to handle data transfers using Teradata Parallel Transporter (TPT) tdload utility.
This operator supports three main scenarios:
1. Load data from a file to a Teradata table
2. Export data from a Teradata table to a file
3. Transfer data between two Teradata tables (potentially across different databases)
For all scenarios:
:param teradata_conn_id: Connection ID for Teradata database (source for table operations)
For file to table loading:
:param source_file_name: Path to the source file (required for file to table)
:param select_stmt: SQL SELECT statement to filter data (optional)
:param insert_stmt: SQL INSERT statement to use for loading data (optional)
:param target_table: Name of the target table (required for file to table)
:param target_teradata_conn_id: Connection ID for target Teradata database (defaults to teradata_conn_id)
For table to file export:
:param source_table: Name of the source table (required for table to file)
:param target_file_name: Path to the target file (required for table to file)
For table to table transfer:
:param source_table: Name of the source table (required for table to table)
:param select_stmt: SQL SELECT statement to filter data (optional)
:param insert_stmt: SQL INSERT statement to use for loading data (optional)
:param target_table: Name of the target table (required for table to table)
:param target_teradata_conn_id: Connection ID for target Teradata database (required for table to table)
Optional configuration parameters:
:param source_format: Format of source data (default: 'Delimited')
:param target_format: Format of target data (default: 'Delimited')
:param source_text_delimiter: Source text delimiter (default: ',')
:param target_text_delimiter: Target text delimiter (default: ',')
:param tdload_options: Additional options for tdload (optional)
:param tdload_job_name: Name for the tdload job (optional)
:param tdload_job_var_file: Path to tdload job variable file (optional)
:param ssh_conn_id: SSH connection ID for secure file transfer (optional, used for file operations)
:raises ValueError: If parameter combinations are invalid or required files are missing.
:raises RuntimeError: If underlying TPT execution (tdload) fails with non-zero exit status.
:raises ConnectionError: If remote SSH connection cannot be established.
:raises TimeoutError: If SSH connection attempt times out.
:raises FileNotFoundError: If required TPT utility (tdload) is missing locally or on remote host.
Example usage::
# Example usage for file to table:
load_file = TdLoadOperator(
task_id="load_from_file",
source_file_name="/path/to/data.csv",
target_table="my_database.my_table",
target_teradata_conn_id="teradata_target_conn",
insert_stmt="INSERT INTO my_database.my_table (col1, col2) VALUES (?, ?)",
)
# Example usage for table to file:
export_data = TdLoadOperator(
task_id="export_to_file",
source_table="my_database.my_table",
target_file_name="/path/to/export.csv",
teradata_conn_id="teradata_source_conn",
ssh_conn_id="ssh_default",
tdload_job_name="export_job",
)
# Example usage for table to table:
transfer_data = TdLoadOperator(
task_id="transfer_between_tables",
source_table="source_db.source_table",
target_table="target_db.target_table",
teradata_conn_id="teradata_source_conn",
target_teradata_conn_id="teradata_target_conn",
tdload_job_var_file="/path/to/vars.txt",
insert_stmt="INSERT INTO target_db.target_table (col1, col2) VALUES (?, ?)",
)
"""
template_fields = (
"source_table",
"target_table",
"select_stmt",
"insert_stmt",
"source_file_name",
"target_file_name",
"tdload_options",
)
ui_color = "#a8e4b1"
def __init__(
self,
*,
teradata_conn_id: str = TeradataHook.default_conn_name,
target_teradata_conn_id: str | None = None,
ssh_conn_id: str | None = None,
source_table: str | None = None,
select_stmt: str | None = None,
insert_stmt: str | None = None,
target_table: str | None = None,
source_file_name: str | None = None,
target_file_name: str | None = None,
source_format: str = "Delimited",
target_format: str = "Delimited",
source_text_delimiter: str = ",",
target_text_delimiter: str = ",",
tdload_options: str | None = None,
tdload_job_name: str | None = None,
tdload_job_var_file: str | None = None,
remote_working_dir: str | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.teradata_conn_id = teradata_conn_id
self.target_teradata_conn_id = target_teradata_conn_id
self.ssh_conn_id = ssh_conn_id
self.source_table = source_table
self.select_stmt = select_stmt
self.insert_stmt = insert_stmt
self.target_table = target_table
self.source_file_name = source_file_name
self.target_file_name = target_file_name
self.source_format = source_format
self.source_text_delimiter = source_text_delimiter
self.target_format = target_format
self.target_text_delimiter = target_text_delimiter
self.tdload_options = tdload_options
self.tdload_job_name = tdload_job_name
self.tdload_job_var_file = tdload_job_var_file
self.remote_working_dir = remote_working_dir
self._src_hook: TptHook | None = None
self._dest_hook: TptHook | None = None
def execute(self, context: Context) -> int | None:
"""Execute the TdLoad operation based on the configured parameters."""
# Validate parameter combinations
mode = self._validate_and_determine_mode()
# Initialize hooks
self._initialize_hooks(mode)
try:
# Prepare job variable file content if not provided
tdload_job_var_content = None
tdload_job_var_file = self.tdload_job_var_file
if not tdload_job_var_file:
tdload_job_var_content = self._prepare_job_var_content(mode)
self.log.info("Prepared tdload job variable content for mode '%s'", mode)
# Set remote working directory if SSH is used
if self._ssh_hook and not self.remote_working_dir:
self.remote_working_dir = get_remote_temp_directory(
self._ssh_hook.get_conn(), logging.getLogger(__name__)
)
# Ensure remote_working_dir is always a str
if not self.remote_working_dir:
self.remote_working_dir = "/tmp"
# Execute based on SSH availability and job var file source
return self._execute_based_on_configuration(tdload_job_var_file, tdload_job_var_content, context)
except Exception as e:
self.log.error("Failed to execute TdLoad operation in mode '%s': %s", mode, str(e))
raise
def _validate_and_determine_mode(self) -> str:
"""
Validate parameters and determine the operation mode.
Returns:
A string indicating the operation mode: 'file_to_table', 'table_to_file',
'table_to_table', or 'job_var_file'
Raises:
ValueError: If parameter combinations are invalid
"""
if self.source_table and self.select_stmt:
raise ValueError(
"Both source_table and select_stmt cannot be provided simultaneously. "
"Please provide only one."
)
if self.insert_stmt and not self.target_table:
raise ValueError(
"insert_stmt is provided but target_table is not specified. "
"Please provide a target_table for the insert operation."
)
# Determine the mode of operation based on provided parameters
if self.source_file_name and self.target_table:
mode = "file_to_table"
if self.target_teradata_conn_id is None:
self.target_teradata_conn_id = self.teradata_conn_id
self.log.info(
"Loading data from file '%s' to table '%s'", self.source_file_name, self.target_table
)
elif (self.source_table or self.select_stmt) and self.target_file_name:
mode = "table_to_file"
self.log.info(
"Exporting data from %s to file '%s'",
self.source_table or "custom select statement",
self.target_file_name,
)
elif (self.source_table or self.select_stmt) and self.target_table:
mode = "table_to_table"
if self.target_teradata_conn_id is None:
raise ValueError("For table to table transfer, target_teradata_conn_id must be provided.")
self.log.info(
"Transferring data from %s to table '%s'",
self.source_table or "custom select statement",
self.target_table,
)
else:
if not self.tdload_job_var_file:
raise ValueError(
"Invalid parameter combination for the TdLoadOperator. Please provide one of these valid combinations:\n"
"1. source_file_name and target_table: to load data from a file to a table\n"
"2. source_table/select_stmt and target_file_name: to export data from a table to a file\n"
"3. source_table/select_stmt and target_table: to transfer data between tables\n"
"4. tdload_job_var_file: to use a pre-configured job variable file"
)
mode = "job_var_file"
self.log.info("Using pre-configured job variable file: %s", self.tdload_job_var_file)
return mode
def _initialize_hooks(self, mode: str) -> None:
"""
Initialize the required hooks based on the operation mode.
Args:
mode: The operation mode ('file_to_table', 'table_to_file', 'table_to_table', etc.)
"""
self.log.info("Initializing source connection using teradata_conn_id: %s", self.teradata_conn_id)
self._src_hook = TptHook(teradata_conn_id=self.teradata_conn_id, ssh_conn_id=self.ssh_conn_id)
if mode in ("table_to_table", "file_to_table"):
self.log.info(
"Initializing destination connection using target_teradata_conn_id: %s",
self.target_teradata_conn_id,
)
self._dest_hook = TptHook(teradata_conn_id=self.target_teradata_conn_id)
self._ssh_hook = SSHHook(ssh_conn_id=self.ssh_conn_id) if self.ssh_conn_id else None
def _prepare_job_var_content(self, mode: str) -> str:
"""
Prepare the job variable file content.
Args:
mode: The operation mode
Returns:
The prepared job variable file content as a string
"""
if not self._src_hook:
raise ValueError("Source hook not initialized")
return prepare_tdload_job_var_file(
mode=mode,
source_table=self.source_table,
select_stmt=self.select_stmt,
insert_stmt=self.insert_stmt,
target_table=self.target_table,
source_file_name=self.source_file_name,
target_file_name=self.target_file_name,
source_format=self.source_format,
target_format=self.target_format,
source_text_delimiter=self.source_text_delimiter,
target_text_delimiter=self.target_text_delimiter,
source_conn=self._src_hook.get_conn(),
target_conn=self._dest_hook.get_conn() if self._dest_hook else None,
)
def _execute_based_on_configuration(
self, tdload_job_var_file: str | None, tdload_job_var_content: str | None, context: Context
) -> int | None:
"""Execute TdLoad operation based on SSH and job var file configuration."""
if self._ssh_hook:
if tdload_job_var_file:
with self._ssh_hook.get_conn() as ssh_client:
if is_valid_remote_job_var_file(
ssh_client, tdload_job_var_file, logging.getLogger(__name__)
):
return self._handle_remote_job_var_file(
ssh_client=ssh_client,
file_path=tdload_job_var_file,
context=context,
)
raise ValueError(
f"The provided remote job variables file path '{tdload_job_var_file}' is invalid or does not exist on remote machine."
)
else:
if not self._src_hook:
raise ValueError("Source hook not initialized")
# Ensure remote_working_dir is always a str
remote_working_dir = self.remote_working_dir or "/tmp"
return self._src_hook.execute_tdload(
remote_working_dir,
tdload_job_var_content,
self.tdload_options,
self.tdload_job_name,
)
else:
if tdload_job_var_file:
if is_valid_file(tdload_job_var_file):
return self._handle_local_job_var_file(
file_path=tdload_job_var_file,
context=context,
)
raise ValueError(
f"The provided job variables file path '{tdload_job_var_file}' is invalid or does not exist."
)
if not self._src_hook:
raise ValueError("Source hook not initialized")
# Ensure remote_working_dir is always a str
remote_working_dir = self.remote_working_dir or "/tmp"
return self._src_hook.execute_tdload(
remote_working_dir,
tdload_job_var_content,
self.tdload_options,
self.tdload_job_name,
)
def _handle_remote_job_var_file(
self,
ssh_client: SSHClient,
file_path: str | None,
context: Context,
) -> int | None:
"""Handle execution using a remote job variable file."""
if not file_path:
raise ValueError("Please provide a valid job variables file path on the remote machine.")
try:
sftp = ssh_client.open_sftp()
try:
with sftp.open(file_path, "r") as remote_file:
tdload_job_var_content = remote_file.read().decode("UTF-8")
self.log.info("Successfully read remote job variable file: %s", file_path)
finally:
sftp.close()
if self._src_hook:
# Ensure remote_working_dir is always a str
remote_working_dir = self.remote_working_dir or "/tmp"
return self._src_hook._execute_tdload_via_ssh(
remote_working_dir,
tdload_job_var_content,
self.tdload_options,
self.tdload_job_name,
)
raise ValueError("Source hook not initialized for remote execution.")
except Exception as e:
self.log.error("Failed to handle remote job variable file '%s': %s", file_path, str(e))
raise
def _handle_local_job_var_file(
self,
file_path: str | None,
context: Context,
) -> int | None:
"""
Handle execution using a local job variable file.
Args:
file_path: Path to the local job variable file
context: Airflow context
Returns:
Exit code from the TdLoad operation
Raises:
ValueError: If file path is invalid or hook not initialized
"""
if not file_path:
raise ValueError("Please provide a valid local job variables file path.")
if not is_valid_file(file_path):
raise ValueError(f"The job variables file path '{file_path}' is invalid or does not exist.")
try:
tdload_job_var_content = read_file(file_path, encoding="UTF-8")
self.log.info("Successfully read local job variable file: %s", file_path)
if self._src_hook:
return self._src_hook._execute_tdload_locally(
tdload_job_var_content,
self.tdload_options,
self.tdload_job_name,
)
raise ValueError("Source hook not initialized for local execution.")
except Exception as e:
self.log.error("Failed to handle local job variable file '%s': %s", file_path, str(e))
raise
def on_kill(self):
"""Handle termination signals and ensure all hooks are properly cleaned up."""
self.log.info("Cleaning up TPT tdload connections on task kill")
cleanup_errors = []
# Clean up the source hook if it was initialized
if self._src_hook:
try:
self.log.info("Cleaning up source connection")
self._src_hook.on_kill()
except Exception as e:
cleanup_errors.append(f"Failed to cleanup source hook: {str(e)}")
self.log.error("Error cleaning up source connection: %s", str(e))
# Clean up the destination hook if it was initialized
if self._dest_hook:
try:
self.log.info("Cleaning up destination connection")
self._dest_hook.on_kill()
except Exception as e:
cleanup_errors.append(f"Failed to cleanup destination hook: {str(e)}")
self.log.error("Error cleaning up destination connection: %s", str(e))
# Log any cleanup errors but don't raise them during shutdown
if cleanup_errors:
self.log.warning("Some cleanup operations failed: %s", "; ".join(cleanup_errors))
else:
self.log.info("All TPT connections cleaned up successfully")
| {
"repo_id": "apache/airflow",
"file_path": "providers/teradata/src/airflow/providers/teradata/operators/tpt.py",
"license": "Apache License 2.0",
"lines": 558,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/teradata/src/airflow/providers/teradata/utils/tpt_util.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import logging
import os
import shutil
import stat
import subprocess
import uuid
from typing import TYPE_CHECKING, Any
if TYPE_CHECKING:
from paramiko import SSHClient
class TPTConfig:
"""Configuration constants for TPT operations."""
DEFAULT_TIMEOUT = 5
FILE_PERMISSIONS_READ_ONLY = 0o400
TEMP_DIR_WINDOWS = "C:\\Windows\\Temp"
TEMP_DIR_UNIX = "/tmp"
def execute_remote_command(ssh_client: SSHClient, command: str) -> tuple[int, str, str]:
"""
Execute a command on remote host and properly manage SSH channels.
:param ssh_client: SSH client connection
:param command: Command to execute
:return: Tuple of (exit_status, stdout, stderr)
"""
stdin, stdout, stderr = ssh_client.exec_command(command)
try:
exit_status = stdout.channel.recv_exit_status()
stdout_data = stdout.read().decode().strip()
stderr_data = stderr.read().decode().strip()
return exit_status, stdout_data, stderr_data
finally:
stdin.close()
stdout.close()
stderr.close()
def write_file(path: str, content: str) -> None:
with open(path, "w", encoding="utf-8") as f:
f.write(content)
def secure_delete(file_path: str, logger: logging.Logger | None = None) -> None:
"""
Securely delete a file using shred if available, otherwise use os.remove.
:param file_path: Path to the file to be deleted
:param logger: Optional logger instance
"""
logger = logger or logging.getLogger(__name__)
if not os.path.exists(file_path):
return
try:
# Check if shred is available
if shutil.which("shred") is not None:
# Use shred to securely delete the file
subprocess.run(["shred", "--remove", file_path], check=True, timeout=TPTConfig.DEFAULT_TIMEOUT)
logger.info("Securely removed file using shred: %s", file_path)
else:
# Fall back to regular deletion
os.remove(file_path)
logger.info("Removed file: %s", file_path)
except (OSError, subprocess.CalledProcessError, subprocess.TimeoutExpired) as e:
logger.warning("Failed to remove file %s: %s", file_path, str(e))
def remote_secure_delete(
ssh_client: SSHClient, remote_files: list[str], logger: logging.Logger | None = None
) -> None:
"""
Securely delete remote files via SSH. Attempts shred first, falls back to rm if shred is unavailable.
:param ssh_client: SSH client connection
:param remote_files: List of remote file paths to delete
:param logger: Optional logger instance
"""
logger = logger or logging.getLogger(__name__)
if not ssh_client or not remote_files:
return
try:
# Detect remote OS
remote_os = get_remote_os(ssh_client, logger)
windows_remote = remote_os == "windows"
# Check if shred is available on remote system (UNIX/Linux)
shred_available = False
if not windows_remote:
exit_status, output, _ = execute_remote_command(ssh_client, "command -v shred")
shred_available = exit_status == 0 and output.strip() != ""
for file_path in remote_files:
try:
if windows_remote:
# Windows remote host - use del command
replace_slash = file_path.replace("/", "\\")
execute_remote_command(
ssh_client, f'if exist "{replace_slash}" del /f /q "{replace_slash}"'
)
elif shred_available:
# UNIX/Linux with shred
execute_remote_command(ssh_client, f"shred --remove {file_path}")
else:
# UNIX/Linux without shred - overwrite then delete
execute_remote_command(
ssh_client,
f"if [ -f {file_path} ]; then "
f"dd if=/dev/zero of={file_path} bs=4096 count=$(($(stat -c '%s' {file_path})/4096+1)) 2>/dev/null; "
f"rm -f {file_path}; fi",
)
except Exception as e:
logger.warning("Failed to process remote file %s: %s", file_path, str(e))
logger.info("Processed remote files: %s", ", ".join(remote_files))
except Exception as e:
logger.warning("Failed to remove remote files: %s", str(e))
def terminate_subprocess(sp: subprocess.Popen | None, logger: logging.Logger | None = None) -> None:
"""
Terminate a subprocess gracefully with proper error handling.
:param sp: Subprocess to terminate
:param logger: Optional logger instance
"""
logger = logger or logging.getLogger(__name__)
if not sp or sp.poll() is not None:
# Process is None or already terminated
return
logger.info("Terminating subprocess (PID: %s)", sp.pid)
try:
sp.terminate() # Attempt to terminate gracefully
sp.wait(timeout=TPTConfig.DEFAULT_TIMEOUT)
logger.info("Subprocess terminated gracefully")
except subprocess.TimeoutExpired:
logger.warning(
"Subprocess did not terminate gracefully within %d seconds, killing it", TPTConfig.DEFAULT_TIMEOUT
)
try:
sp.kill()
sp.wait(timeout=2) # Brief wait after kill
logger.info("Subprocess killed successfully")
except Exception as e:
logger.error("Error killing subprocess: %s", str(e))
except Exception as e:
logger.error("Error terminating subprocess: %s", str(e))
def get_remote_os(ssh_client: SSHClient, logger: logging.Logger | None = None) -> str:
"""
Detect the operating system of the remote host via SSH.
:param ssh_client: SSH client connection
:param logger: Optional logger instance
:return: Operating system type as string ('windows' or 'unix')
"""
logger = logger or logging.getLogger(__name__)
if not ssh_client:
logger.warning("No SSH client provided for OS detection")
return "unix"
try:
# Check for Windows first
exit_status, stdout_data, stderr_data = execute_remote_command(ssh_client, "echo %OS%")
if "Windows" in stdout_data:
return "windows"
# All other systems are treated as Unix-like
return "unix"
except Exception as e:
logger.error("Error detecting remote OS: %s", str(e))
return "unix"
def set_local_file_permissions(local_file_path: str, logger: logging.Logger | None = None) -> None:
"""
Set permissions for a local file to be read-only for the owner.
:param local_file_path: Path to the local file
:param logger: Optional logger instance
:raises FileNotFoundError: If the file does not exist
:raises OSError: If setting permissions fails
"""
logger = logger or logging.getLogger(__name__)
if not local_file_path:
logger.warning("No file path provided for permission setting")
return
if not os.path.exists(local_file_path):
raise FileNotFoundError(f"File does not exist: {local_file_path}")
try:
# Set file permission to read-only for the owner (400)
os.chmod(local_file_path, TPTConfig.FILE_PERMISSIONS_READ_ONLY)
logger.info("Set read-only permissions for file %s", local_file_path)
except (OSError, PermissionError) as e:
raise OSError(f"Error setting permissions for local file {local_file_path}: {e}") from e
def _set_windows_file_permissions(
ssh_client: SSHClient, remote_file_path: str, logger: logging.Logger
) -> None:
"""Set restrictive permissions on Windows remote file."""
command = f'icacls "{remote_file_path}" /inheritance:r /grant:r "%USERNAME%":R'
exit_status, stdout_data, stderr_data = execute_remote_command(ssh_client, command)
if exit_status != 0:
raise RuntimeError(
f"Failed to set restrictive permissions on Windows remote file {remote_file_path}. "
f"Exit status: {exit_status}, Error: {stderr_data if stderr_data else 'N/A'}"
)
logger.info("Set restrictive permissions (owner read-only) for Windows remote file %s", remote_file_path)
def _set_unix_file_permissions(ssh_client: SSHClient, remote_file_path: str, logger: logging.Logger) -> None:
"""Set read-only permissions on Unix/Linux remote file."""
command = f"chmod 400 {remote_file_path}"
exit_status, stdout_data, stderr_data = execute_remote_command(ssh_client, command)
if exit_status != 0:
raise RuntimeError(
f"Failed to set permissions (400) on remote file {remote_file_path}. "
f"Exit status: {exit_status}, Error: {stderr_data if stderr_data else 'N/A'}"
)
logger.info("Set read-only permissions for remote file %s", remote_file_path)
def set_remote_file_permissions(
ssh_client: SSHClient, remote_file_path: str, logger: logging.Logger | None = None
) -> None:
"""
Set permissions for a remote file to be read-only for the owner.
:param ssh_client: SSH client connection
:param remote_file_path: Path to the remote file
:param logger: Optional logger instance
:raises RuntimeError: If permission setting fails
"""
logger = logger or logging.getLogger(__name__)
if not ssh_client or not remote_file_path:
logger.warning(
"Invalid parameters: ssh_client=%s, remote_file_path=%s", bool(ssh_client), remote_file_path
)
return
try:
# Detect remote OS once
remote_os = get_remote_os(ssh_client, logger)
if remote_os == "windows":
_set_windows_file_permissions(ssh_client, remote_file_path, logger)
else:
_set_unix_file_permissions(ssh_client, remote_file_path, logger)
except RuntimeError:
raise
except Exception as e:
raise RuntimeError(f"Error setting permissions for remote file {remote_file_path}: {e}") from e
def get_remote_temp_directory(ssh_client: SSHClient, logger: logging.Logger | None = None) -> str:
"""
Get the remote temporary directory path based on the operating system.
:param ssh_client: SSH client connection
:param logger: Optional logger instance
:return: Path to the remote temporary directory
"""
logger = logger or logging.getLogger(__name__)
try:
# Detect OS once
remote_os = get_remote_os(ssh_client, logger)
if remote_os == "windows":
exit_status, temp_dir, stderr_data = execute_remote_command(ssh_client, "echo %TEMP%")
if exit_status == 0 and temp_dir and temp_dir != "%TEMP%":
return temp_dir
logger.warning("Could not get TEMP directory, using default: %s", TPTConfig.TEMP_DIR_WINDOWS)
return TPTConfig.TEMP_DIR_WINDOWS
# Unix/Linux - use /tmp
return TPTConfig.TEMP_DIR_UNIX
except Exception as e:
logger.warning("Error getting remote temp directory: %s", str(e))
return TPTConfig.TEMP_DIR_UNIX
def is_valid_file(file_path: str) -> bool:
return os.path.isfile(file_path)
def verify_tpt_utility_installed(utility: str) -> None:
"""Verify if a TPT utility (e.g., tbuild) is installed and available in the system's PATH."""
if shutil.which(utility) is None:
raise FileNotFoundError(
f"TPT utility '{utility}' is not installed or not available in the system's PATH"
)
def verify_tpt_utility_on_remote_host(
ssh_client: SSHClient, utility: str, logger: logging.Logger | None = None
) -> None:
"""
Verify if a TPT utility (tbuild) is installed on the remote host via SSH.
:param ssh_client: SSH client connection
:param utility: Name of the utility to verify
:param logger: Optional logger instance
:raises FileNotFoundError: If utility is not found on remote host
:raises RuntimeError: If verification fails unexpectedly
"""
logger = logger or logging.getLogger(__name__)
try:
# Detect remote OS once
remote_os = get_remote_os(ssh_client, logger)
if remote_os == "windows":
command = f"where {utility}"
else:
command = f"which {utility}"
exit_status, output, error = execute_remote_command(ssh_client, command)
if exit_status != 0 or not output:
raise FileNotFoundError(
f"TPT utility '{utility}' is not installed or not available in PATH on the remote host. "
f"Command: {command}, Exit status: {exit_status}, "
f"stderr: {error if error else 'N/A'}"
)
logger.info("TPT utility '%s' found at: %s", utility, output.split("\n")[0])
except (FileNotFoundError, RuntimeError):
raise
except Exception as e:
raise RuntimeError(f"Failed to verify TPT utility '{utility}' on remote host: {e}") from e
def prepare_tpt_ddl_script(
sql: list[str],
error_list: list[int] | None,
source_conn: dict[str, Any],
job_name: str | None = None,
) -> str:
"""
Prepare a TPT script for executing DDL statements.
This method generates a TPT script that defines a DDL operator and applies the provided SQL statements.
It also supports specifying a list of error codes to handle during the operation.
:param sql: A list of DDL statements to execute.
:param error_list: A list of error codes to handle during the operation.
:param source_conn: Connection details for the source database.
:param job_name: The name of the TPT job. Defaults to unique name if None.
:return: A formatted TPT script as a string.
:raises ValueError: If the SQL statement list is empty.
"""
if not sql or not isinstance(sql, list):
raise ValueError("SQL statement list must be a non-empty list")
# Clean and escape each SQL statement:
sql_statements = [
stmt.strip().rstrip(";").replace("'", "''")
for stmt in sql
if stmt and isinstance(stmt, str) and stmt.strip()
]
if not sql_statements:
raise ValueError("No valid SQL statements found in the provided input")
# Format for TPT APPLY block, indenting after the first line
apply_sql = ",\n".join(
[f"('{stmt};')" if i == 0 else f" ('{stmt};')" for i, stmt in enumerate(sql_statements)]
)
if job_name is None:
job_name = f"airflow_tptddl_{uuid.uuid4().hex}"
# Format error list for inclusion in the TPT script
if not error_list:
error_list_stmt = "ErrorList = ['']"
else:
error_list_str = ", ".join([f"'{error}'" for error in error_list])
error_list_stmt = f"ErrorList = [{error_list_str}]"
host = source_conn["host"]
login = source_conn["login"]
password = source_conn["password"]
tpt_script = f"""
DEFINE JOB {job_name}
DESCRIPTION 'TPT DDL Operation'
(
APPLY
{apply_sql}
TO OPERATOR ( $DDL ()
ATTR
(
TdpId = '{host}',
UserName = '{login}',
UserPassword = '{password}',
{error_list_stmt}
)
);
);
"""
return tpt_script
def prepare_tdload_job_var_file(
mode: str,
source_table: str | None,
select_stmt: str | None,
insert_stmt: str | None,
target_table: str | None,
source_file_name: str | None,
target_file_name: str | None,
source_format: str,
target_format: str,
source_text_delimiter: str,
target_text_delimiter: str,
source_conn: dict[str, Any],
target_conn: dict[str, Any] | None = None,
) -> str:
"""
Prepare a tdload job variable file based on the specified mode.
:param mode: The operation mode ('file_to_table', 'table_to_file', or 'table_to_table')
:param source_table: Name of the source table
:param select_stmt: SQL SELECT statement for data extraction
:param insert_stmt: SQL INSERT statement for data loading
:param target_table: Name of the target table
:param source_file_name: Path to the source file
:param target_file_name: Path to the target file
:param source_format: Format of source data
:param target_format: Format of target data
:param source_text_delimiter: Source text delimiter
:param target_text_delimiter: Target text delimiter
:return: The content of the job variable file
:raises ValueError: If invalid parameters are provided
"""
# Create a dictionary to store job variables
job_vars = {}
# Add appropriate parameters based on the mode
if mode == "file_to_table":
job_vars.update(
{
"TargetTdpId": source_conn["host"],
"TargetUserName": source_conn["login"],
"TargetUserPassword": source_conn["password"],
"TargetTable": target_table,
"SourceFileName": source_file_name,
}
)
if insert_stmt:
job_vars["InsertStmt"] = insert_stmt
elif mode == "table_to_file":
job_vars.update(
{
"SourceTdpId": source_conn["host"],
"SourceUserName": source_conn["login"],
"SourceUserPassword": source_conn["password"],
"TargetFileName": target_file_name,
}
)
if source_table:
job_vars["SourceTable"] = source_table
elif select_stmt:
job_vars["SourceSelectStmt"] = select_stmt
elif mode == "table_to_table":
if target_conn is None:
raise ValueError("target_conn must be provided for 'table_to_table' mode")
job_vars.update(
{
"SourceTdpId": source_conn["host"],
"SourceUserName": source_conn["login"],
"SourceUserPassword": source_conn["password"],
"TargetTdpId": target_conn["host"],
"TargetUserName": target_conn["login"],
"TargetUserPassword": target_conn["password"],
"TargetTable": target_table,
}
)
if source_table:
job_vars["SourceTable"] = source_table
elif select_stmt:
job_vars["SourceSelectStmt"] = select_stmt
if insert_stmt:
job_vars["InsertStmt"] = insert_stmt
# Add common parameters if not empty
if source_format:
job_vars["SourceFormat"] = source_format
if target_format:
job_vars["TargetFormat"] = target_format
if source_text_delimiter:
job_vars["SourceTextDelimiter"] = source_text_delimiter
if target_text_delimiter:
job_vars["TargetTextDelimiter"] = target_text_delimiter
# Format job variables content
job_var_content = "".join([f"{key}='{value}',\n" for key, value in job_vars.items()])
job_var_content = job_var_content.rstrip(",\n")
return job_var_content
def is_valid_remote_job_var_file(
ssh_client: SSHClient, remote_job_var_file_path: str, logger: logging.Logger | None = None
) -> bool:
"""Check if the given remote job variable file path is a valid file."""
if remote_job_var_file_path:
sftp_client = ssh_client.open_sftp()
try:
# Get file metadata
file_stat = sftp_client.stat(remote_job_var_file_path)
if file_stat.st_mode:
is_regular_file = stat.S_ISREG(file_stat.st_mode)
return is_regular_file
return False
except FileNotFoundError:
if logger:
logger.error("File does not exist on remote at : %s", remote_job_var_file_path)
return False
finally:
sftp_client.close()
else:
return False
def read_file(file_path: str, encoding: str = "UTF-8") -> str:
"""
Read the content of a file with the specified encoding.
:param file_path: Path to the file to be read.
:param encoding: Encoding to use for reading the file.
:return: Content of the file as a string.
"""
if not os.path.isfile(file_path):
raise FileNotFoundError(f"The file {file_path} does not exist.")
with open(file_path, encoding=encoding) as f:
return f.read()
def decrypt_remote_file(
ssh_client: SSHClient,
remote_enc_file: str,
remote_dec_file: str,
password: str,
logger: logging.Logger | None = None,
) -> int:
"""
Decrypt a remote file using OpenSSL.
:param ssh_client: SSH client connection
:param remote_enc_file: Path to the encrypted file
:param remote_dec_file: Path for the decrypted file
:param password: Decryption password
:param logger: Optional logger instance
:return: Exit status of the decryption command
:raises RuntimeError: If decryption fails
"""
logger = logger or logging.getLogger(__name__)
# Detect remote OS
remote_os = get_remote_os(ssh_client, logger)
windows_remote = remote_os == "windows"
if windows_remote:
# Windows - use different quoting and potentially different OpenSSL parameters
password_escaped = password.replace('"', '""') # Escape double quotes for Windows
decrypt_cmd = (
f'openssl enc -d -aes-256-cbc -salt -pbkdf2 -pass pass:"{password_escaped}" '
f'-in "{remote_enc_file}" -out "{remote_dec_file}"'
)
else:
# Unix/Linux - use single quote escaping
password_escaped = password.replace("'", "'\\''") # Escape single quotes
decrypt_cmd = (
f"openssl enc -d -aes-256-cbc -salt -pbkdf2 -pass pass:'{password_escaped}' "
f"-in {remote_enc_file} -out {remote_dec_file}"
)
exit_status, stdout_data, stderr_data = execute_remote_command(ssh_client, decrypt_cmd)
if exit_status != 0:
raise RuntimeError(
f"Decryption failed with exit status {exit_status}. Error: {stderr_data if stderr_data else 'N/A'}"
)
logger.info("Successfully decrypted remote file %s to %s", remote_enc_file, remote_dec_file)
return exit_status
def transfer_file_sftp(
ssh_client: SSHClient, local_path: str, remote_path: str, logger: logging.Logger | None = None
) -> None:
"""
Transfer a file from local to remote host using SFTP.
:param ssh_client: SSH client connection
:param local_path: Local file path
:param remote_path: Remote file path
:param logger: Optional logger instance
:raises FileNotFoundError: If local file does not exist
:raises RuntimeError: If file transfer fails
"""
logger = logger or logging.getLogger(__name__)
if not os.path.exists(local_path):
raise FileNotFoundError(f"Local file does not exist: {local_path}")
sftp = None
try:
sftp = ssh_client.open_sftp()
sftp.put(local_path, remote_path)
logger.info("Successfully transferred file from %s to %s", local_path, remote_path)
except Exception as e:
raise RuntimeError(f"Failed to transfer file from {local_path} to {remote_path}: {e}") from e
finally:
if sftp:
sftp.close()
| {
"repo_id": "apache/airflow",
"file_path": "providers/teradata/src/airflow/providers/teradata/utils/tpt_util.py",
"license": "Apache License 2.0",
"lines": 547,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/teradata/tests/system/teradata/example_remote_tpt.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Example Airflow DAG to show usage of DdlOperator and TdLoadOperator with remote SSH execution.
This DAG assumes Airflow Connections with connection ids `teradata_default` and `ssh_default` already exist locally.
It demonstrates how to use DdlOperator and TdLoadOperator to perform remote Teradata operations via SSH,
including creating, dropping, altering tables and transferring data.
"""
from __future__ import annotations
import datetime
import os
import pytest
from airflow import DAG
try:
from airflow.providers.teradata.operators.tpt import DdlOperator, TdLoadOperator
except ImportError:
pytest.skip("TERADATA provider not available", allow_module_level=True)
# [START tdload_operator_howto_guide]
ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
DAG_ID = "example_remote_tpt"
CONN_ID = "teradata_default"
SSH_CONN_ID = "ssh_default"
# Define file paths and table names for the test
SYSTEM_TESTS_DIR = os.path.abspath(os.path.dirname(__file__))
SOURCE_FILE = os.path.join(SYSTEM_TESTS_DIR, "tdload_src_file.txt")
TARGET_FILE = os.path.join(SYSTEM_TESTS_DIR, "tdload_target_file.txt")
params = {
"SOURCE_TABLE": "source_table",
"TARGET_TABLE": "target_table",
"SOURCE_FILE": SOURCE_FILE,
"TARGET_FILE": TARGET_FILE,
}
with DAG(
dag_id=DAG_ID,
start_date=datetime.datetime(2020, 2, 2),
schedule="@once",
catchup=False,
default_args={"teradata_conn_id": CONN_ID, "params": params, "ssh_conn_id": SSH_CONN_ID},
) as dag:
# [START ddl_operator_howto_guide_drop_table]
# Drop tables if they exist
drop_table = DdlOperator(
task_id="drop_table",
ddl=[
"DROP TABLE {{ params.SOURCE_TABLE }};",
"DROP TABLE {{ params.SOURCE_TABLE }}_UV;",
"DROP TABLE {{ params.SOURCE_TABLE }}_ET;",
"DROP TABLE {{ params.SOURCE_TABLE }}_WT;",
"DROP TABLE {{ params.SOURCE_TABLE }}_Log;",
"DROP TABLE {{ params.TARGET_TABLE }};",
"DROP TABLE {{ params.TARGET_TABLE }}_UV;",
"DROP TABLE {{ params.TARGET_TABLE }}_ET;",
"DROP TABLE {{ params.TARGET_TABLE }}_WT;",
"DROP TABLE {{ params.TARGET_TABLE }}_Log;",
],
error_list=[3706, 3803, 3807],
)
# [END ddl_operator_howto_guide_drop_table]
# [START ddl_operator_howto_guide_create_table]
create_source_table = DdlOperator(
task_id="create_source_table",
ddl=[
"CREATE TABLE {{ params.SOURCE_TABLE }} ( \
first_name VARCHAR(100), \
last_name VARCHAR(100), \
employee_id VARCHAR(10), \
department VARCHAR(50) \
);"
],
)
create_target_table = DdlOperator(
task_id="create_target_table",
ddl=[
"CREATE TABLE {{ params.TARGET_TABLE }} ( \
first_name VARCHAR(100), \
last_name VARCHAR(100), \
employee_id VARCHAR(10), \
department VARCHAR(50) \
);"
],
)
# [END ddl_operator_howto_guide_create_table]
# [START ddl_operator_howto_guide_create_index]
create_index_on_source = DdlOperator(
task_id="create_index_on_source",
ddl=["CREATE INDEX idx_employee_id (employee_id) ON {{ params.SOURCE_TABLE }};"],
)
# [END ddl_operator_howto_guide_create_index]
# [START tdload_operator_howto_guide_load_from_file]
load_file = TdLoadOperator(
task_id="load_file",
source_file_name="{{ params.SOURCE_FILE }}",
target_table="{{ params.SOURCE_TABLE }}",
source_format="Delimited",
source_text_delimiter="|",
)
# [END tdload_operator_howto_guide_load_from_file]
# [START tdload_operator_howto_guide_export_data]
export_data = TdLoadOperator(
task_id="export_data",
source_table="{{ params.SOURCE_TABLE }}",
target_file_name="{{ params.TARGET_FILE }}",
target_format="Delimited",
target_text_delimiter=";",
)
# [END tdload_operator_howto_guide_export_data]
# [START tdload_operator_howto_guide_transfer_data]
transfer_data = TdLoadOperator(
task_id="transfer_data",
source_table="{{ params.SOURCE_TABLE }}",
target_table="{{ params.TARGET_TABLE }}",
target_teradata_conn_id=CONN_ID,
)
# [END tdload_operator_howto_guide_transfer_data]
create_select_dest_table = DdlOperator(
task_id="create_select_dest_table",
ddl=[
"DROP TABLE {{ params.SOURCE_TABLE }}_select_dest;",
"DROP TABLE {{ params.SOURCE_TABLE }}_select_log;",
"DROP TABLE {{ params.SOURCE_TABLE }}_select_err1;",
"DROP TABLE {{ params.SOURCE_TABLE }}_select_err2;",
"CREATE TABLE {{ params.SOURCE_TABLE }}_select_dest ( \
first_name VARCHAR(100), \
last_name VARCHAR(100), \
employee_id VARCHAR(10), \
department VARCHAR(50) \
);",
],
error_list=[3706, 3803, 3807],
)
# [START tdload_operator_howto_guide_transfer_data_select_stmt]
# TdLoadOperator using select statement as source
transfer_data_select_stmt = TdLoadOperator(
task_id="transfer_data_select_stmt",
select_stmt="SELECT * FROM {{ params.SOURCE_TABLE }}",
target_table="{{ params.SOURCE_TABLE }}_select_dest",
tdload_options="--LogTable {{ params.SOURCE_TABLE }}_select_log --ErrorTable1 {{ params.SOURCE_TABLE }}_select_err1 --ErrorTable2 {{ params.SOURCE_TABLE }}_select_err2",
target_teradata_conn_id=CONN_ID,
)
# [END tdload_operator_howto_guide_transfer_data_select_stmt]
# Create table for insert statement test
create_insert_dest_table = DdlOperator(
task_id="create_insert_dest_table",
ddl=[
"DROP TABLE {{ params.SOURCE_TABLE }}_insert_dest;",
"DROP TABLE {{ params.SOURCE_TABLE }}_insert_log;",
"DROP TABLE {{ params.SOURCE_TABLE }}_insert_err1;",
"DROP TABLE {{ params.SOURCE_TABLE }}_insert_err2;",
"CREATE TABLE {{ params.SOURCE_TABLE }}_insert_dest ( \
first_name VARCHAR(100), \
last_name VARCHAR(100), \
employee_id VARCHAR(10), \
department VARCHAR(50) \
);",
],
error_list=[3706, 3803, 3807],
)
# [START tdload_operator_howto_guide_transfer_data_insert_stmt]
transfer_data_insert_stmt = TdLoadOperator(
task_id="transfer_data_insert_stmt",
source_table="{{ params.SOURCE_TABLE }}",
insert_stmt="INSERT INTO {{ params.SOURCE_TABLE }}_insert_dest VALUES (?, ?, ?, ?)",
target_table="{{ params.SOURCE_TABLE }}_insert_dest",
tdload_options="--LogTable {{ params.SOURCE_TABLE }}_insert_log --ErrorTable1 {{ params.SOURCE_TABLE }}_insert_err1 --ErrorTable2 {{ params.SOURCE_TABLE }}_insert_err2",
tdload_job_name="tdload_job_insert_stmt",
target_teradata_conn_id=CONN_ID,
)
# [END tdload_operator_howto_guide_transfer_data_insert_stmt]
# [START ddl_operator_howto_guide_rename_table]
rename_target_table = DdlOperator(
task_id="rename_target_table",
ddl=[
"RENAME TABLE {{ params.TARGET_TABLE }} TO {{ params.TARGET_TABLE }}_renamed;",
"DROP TABLE {{ params.TARGET_TABLE }}_renamed",
],
)
# [END ddl_operator_howto_guide_rename_table]
# [START ddl_operator_howto_guide_drop_index]
drop_index_on_source = DdlOperator(
task_id="drop_index_on_source",
ddl=["DROP INDEX idx_employee_id ON {{ params.SOURCE_TABLE }};"],
error_list=[3706, 3803, 3807],
)
# [END ddl_operator_howto_guide_drop_index]
# [START ddl_operator_howto_guide_alter_table]
alter_source_table = DdlOperator(
task_id="alter_source_table",
ddl=["ALTER TABLE {{ params.SOURCE_TABLE }} ADD hire_date DATE;"],
)
# [END ddl_operator_howto_guide_alter_table]
# Define the task dependencies
(
drop_table
>> create_source_table
>> create_target_table
>> create_index_on_source
>> load_file
>> export_data
>> transfer_data
>> create_select_dest_table
>> transfer_data_select_stmt
>> create_insert_dest_table
>> transfer_data_insert_stmt
>> rename_target_table
>> drop_index_on_source
>> alter_source_table
)
from tests_common.test_utils.watcher import watcher
# This test needs watcher in order to properly mark success/failure
# when "tearDown" task with trigger rule is part of the DAG
list(dag.tasks) >> watcher()
from tests_common.test_utils.system_tests import get_test_run # noqa: E402
# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest)
test_run = get_test_run(dag)
# [END tdload_operator_howto_guide_remote]
| {
"repo_id": "apache/airflow",
"file_path": "providers/teradata/tests/system/teradata/example_remote_tpt.py",
"license": "Apache License 2.0",
"lines": 229,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/teradata/tests/system/teradata/example_tpt.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Example Airflow DAG to show usage of DdlOperator and TdLoadOperator.
This DAG assumes an Airflow Connection with connection id `teradata_default` already exists locally.
It demonstrates how to use DdlOperator to create, drop, alter, and rename Teradata tables and indexes.
It also shows how to load data from a file to a Teradata table, export data from a Teradata table to a file and
transfer data between two Teradata tables (potentially across different databases).
"""
from __future__ import annotations
import datetime
import os
import pytest
from airflow import DAG
try:
from airflow.providers.teradata.operators.tpt import DdlOperator, TdLoadOperator
except ImportError:
pytest.skip("TERADATA provider not available", allow_module_level=True)
# [START tdload_operator_howto_guide]
ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
DAG_ID = "example_tpt"
CONN_ID = "teradata_default"
SSH_CONN_ID = "ssh_default"
# Define file paths and table names for the test
SYSTEM_TESTS_DIR = os.path.abspath(os.path.dirname(__file__))
SOURCE_FILE = os.path.join(SYSTEM_TESTS_DIR, "tdload_src_file.txt")
TARGET_FILE = os.path.join(SYSTEM_TESTS_DIR, "tdload_target_file.txt")
params = {
"SOURCE_TABLE": "source_table",
"TARGET_TABLE": "target_table",
"SOURCE_FILE": SOURCE_FILE,
"TARGET_FILE": TARGET_FILE,
}
with DAG(
dag_id=DAG_ID,
start_date=datetime.datetime(2020, 2, 2),
schedule="@once",
catchup=False,
default_args={"teradata_conn_id": CONN_ID, "params": params},
) as dag:
# [START ddl_operator_howto_guide_drop_table]
# Drop tables if they exist
drop_table = DdlOperator(
task_id="drop_table",
ddl=[
"DROP TABLE {{ params.SOURCE_TABLE }};",
"DROP TABLE {{ params.SOURCE_TABLE }}_UV;",
"DROP TABLE {{ params.SOURCE_TABLE }}_ET;",
"DROP TABLE {{ params.SOURCE_TABLE }}_WT;",
"DROP TABLE {{ params.SOURCE_TABLE }}_Log;",
"DROP TABLE {{ params.TARGET_TABLE }};",
"DROP TABLE {{ params.TARGET_TABLE }}_UV;",
"DROP TABLE {{ params.TARGET_TABLE }}_ET;",
"DROP TABLE {{ params.TARGET_TABLE }}_WT;",
"DROP TABLE {{ params.TARGET_TABLE }}_Log;",
],
error_list=[3706, 3803, 3807],
)
# [END ddl_operator_howto_guide_drop_table]
# [START ddl_operator_howto_guide_create_table]
create_source_table = DdlOperator(
task_id="create_source_table",
ddl=[
"CREATE TABLE {{ params.SOURCE_TABLE }} ( \
first_name VARCHAR(100), \
last_name VARCHAR(100), \
employee_id VARCHAR(10), \
department VARCHAR(50) \
);"
],
)
create_target_table = DdlOperator(
task_id="create_target_table",
ddl=[
"CREATE TABLE {{ params.TARGET_TABLE }} ( \
first_name VARCHAR(100), \
last_name VARCHAR(100), \
employee_id VARCHAR(10), \
department VARCHAR(50) \
);"
],
)
# [END ddl_operator_howto_guide_create_table]
# [START ddl_operator_howto_guide_create_index]
create_index_on_source = DdlOperator(
task_id="create_index_on_source",
ddl=["CREATE INDEX idx_employee_id (employee_id) ON {{ params.SOURCE_TABLE }};"],
)
# [END ddl_operator_howto_guide_create_index]
# [START tdload_operator_howto_guide_load_from_file]
load_file = TdLoadOperator(
task_id="load_file",
source_file_name="{{ params.SOURCE_FILE }}",
target_table="{{ params.SOURCE_TABLE }}",
source_format="Delimited",
source_text_delimiter="|",
)
# [END tdload_operator_howto_guide_load_from_file]
# [START tdload_operator_howto_guide_export_data]
export_data = TdLoadOperator(
task_id="export_data",
source_table="{{ params.SOURCE_TABLE }}",
target_file_name="{{ params.TARGET_FILE }}",
target_format="Delimited",
target_text_delimiter=";",
)
# [END tdload_operator_howto_guide_export_data]
# [START tdload_operator_howto_guide_transfer_data]
transfer_data = TdLoadOperator(
task_id="transfer_data",
source_table="{{ params.SOURCE_TABLE }}",
target_table="{{ params.TARGET_TABLE }}",
target_teradata_conn_id=CONN_ID,
)
# [END tdload_operator_howto_guide_transfer_data]
create_select_dest_table = DdlOperator(
task_id="create_select_dest_table",
ddl=[
"DROP TABLE {{ params.SOURCE_TABLE }}_select_dest;",
"DROP TABLE {{ params.SOURCE_TABLE }}_select_log;",
"DROP TABLE {{ params.SOURCE_TABLE }}_select_err1;",
"DROP TABLE {{ params.SOURCE_TABLE }}_select_err2;",
"CREATE TABLE {{ params.SOURCE_TABLE }}_select_dest ( \
first_name VARCHAR(100), \
last_name VARCHAR(100), \
employee_id VARCHAR(10), \
department VARCHAR(50) \
);",
],
error_list=[3706, 3803, 3807],
)
# [START tdload_operator_howto_guide_transfer_data_select_stmt]
# TdLoadOperator using select statement as source
transfer_data_select_stmt = TdLoadOperator(
task_id="transfer_data_select_stmt",
select_stmt="SELECT * FROM {{ params.SOURCE_TABLE }}",
target_table="{{ params.SOURCE_TABLE }}_select_dest",
tdload_options="--LogTable {{ params.SOURCE_TABLE }}_select_log --ErrorTable1 {{ params.SOURCE_TABLE }}_select_err1 --ErrorTable2 {{ params.SOURCE_TABLE }}_select_err2",
target_teradata_conn_id=CONN_ID,
)
# [END tdload_operator_howto_guide_transfer_data_select_stmt]
# Create table for insert statement test
create_insert_dest_table = DdlOperator(
task_id="create_insert_dest_table",
ddl=[
"DROP TABLE {{ params.SOURCE_TABLE }}_insert_dest;",
"DROP TABLE {{ params.SOURCE_TABLE }}_insert_log;",
"DROP TABLE {{ params.SOURCE_TABLE }}_insert_err1;",
"DROP TABLE {{ params.SOURCE_TABLE }}_insert_err2;",
"CREATE TABLE {{ params.SOURCE_TABLE }}_insert_dest ( \
first_name VARCHAR(100), \
last_name VARCHAR(100), \
employee_id VARCHAR(10), \
department VARCHAR(50) \
);",
],
error_list=[3706, 3803, 3807],
)
# [START tdload_operator_howto_guide_transfer_data_insert_stmt]
transfer_data_insert_stmt = TdLoadOperator(
task_id="transfer_data_insert_stmt",
source_table="{{ params.SOURCE_TABLE }}",
insert_stmt="INSERT INTO {{ params.SOURCE_TABLE }}_insert_dest VALUES (?, ?, ?, ?)",
target_table="{{ params.SOURCE_TABLE }}_insert_dest",
tdload_options="--LogTable {{ params.SOURCE_TABLE }}_insert_log --ErrorTable1 {{ params.SOURCE_TABLE }}_insert_err1 --ErrorTable2 {{ params.SOURCE_TABLE }}_insert_err2",
tdload_job_name="tdload_job_insert_stmt",
target_teradata_conn_id=CONN_ID,
)
# [END tdload_operator_howto_guide_transfer_data_insert_stmt]
# [START ddl_operator_howto_guide_rename_table]
rename_target_table = DdlOperator(
task_id="rename_target_table",
ddl=[
"RENAME TABLE {{ params.TARGET_TABLE }} TO {{ params.TARGET_TABLE }}_renamed;",
"DROP TABLE {{ params.TARGET_TABLE }}_renamed",
],
)
# [END ddl_operator_howto_guide_rename_table]
# [START ddl_operator_howto_guide_drop_index]
drop_index_on_source = DdlOperator(
task_id="drop_index_on_source",
ddl=["DROP INDEX idx_employee_id ON {{ params.SOURCE_TABLE }};"],
error_list=[3706, 3803, 3807],
)
# [END ddl_operator_howto_guide_drop_index]
# [START ddl_operator_howto_guide_alter_table]
alter_source_table = DdlOperator(
task_id="alter_source_table",
ddl=["ALTER TABLE {{ params.SOURCE_TABLE }} ADD hire_date DATE;"],
)
# [END ddl_operator_howto_guide_alter_table]
# Define the task dependencies
(
drop_table
>> create_source_table
>> create_target_table
>> create_index_on_source
>> load_file
>> export_data
>> transfer_data
>> create_select_dest_table
>> transfer_data_select_stmt
>> create_insert_dest_table
>> transfer_data_insert_stmt
>> rename_target_table
>> drop_index_on_source
>> alter_source_table
)
from tests_common.test_utils.watcher import watcher
# This test needs watcher in order to properly mark success/failure
# when "tearDown" task with trigger rule is part of the DAG
list(dag.tasks) >> watcher()
from tests_common.test_utils.system_tests import get_test_run # noqa: E402
# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest)
test_run = get_test_run(dag)
# [END tdload_operator_howto_guide]
| {
"repo_id": "apache/airflow",
"file_path": "providers/teradata/tests/system/teradata/example_tpt.py",
"license": "Apache License 2.0",
"lines": 230,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/teradata/tests/unit/teradata/hooks/test_tpt.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import logging
from unittest.mock import MagicMock, patch
import pytest
from airflow.providers.teradata.hooks.tpt import TptHook
class TestTptHook:
@patch("airflow.providers.teradata.hooks.tpt.SSHHook")
@patch("airflow.models.Connection")
def test_init_with_ssh(self, mock_conn, mock_ssh_hook):
hook = TptHook(ssh_conn_id="ssh_default")
assert hook.ssh_conn_id == "ssh_default"
assert hook.ssh_hook is not None
@patch("airflow.providers.teradata.hooks.tpt.SSHHook")
@patch("airflow.models.Connection")
def test_init_without_ssh(self, mock_conn, mock_ssh_hook):
hook = TptHook()
assert hook.ssh_conn_id is None
assert hook.ssh_hook is None
@patch("airflow.providers.teradata.hooks.tpt.SSHHook")
@patch("airflow.models.Connection")
@patch("airflow.providers.teradata.hooks.tpt.TptHook._execute_tbuild_via_ssh")
@patch("airflow.providers.teradata.hooks.tpt.TptHook._execute_tbuild_locally")
def test_execute_ddl_dispatch(self, mock_local, mock_ssh, mock_conn, mock_ssh_hook):
# Local execution
hook = TptHook()
mock_local.return_value = 0
assert hook.execute_ddl("SOME DDL", "/tmp") == 0
mock_local.assert_called_once()
# SSH execution
hook = TptHook(ssh_conn_id="ssh_default")
hook.ssh_hook = MagicMock()
mock_ssh.return_value = 0
assert hook.execute_ddl("SOME DDL", "/tmp") == 0
mock_ssh.assert_called_once()
def test_execute_ddl_empty_script(self):
hook = TptHook()
with pytest.raises(ValueError, match="TPT script must not be empty"):
hook.execute_ddl("", "/tmp")
def test_execute_ddl_empty_script_content(self):
hook = TptHook()
with pytest.raises(ValueError, match="TPT script content must not be empty after processing"):
hook.execute_ddl(" ", "/tmp") # Only whitespace
@patch("airflow.providers.teradata.hooks.tpt.SSHHook")
@patch("airflow.providers.teradata.hooks.tpt.terminate_subprocess")
@patch("airflow.providers.teradata.hooks.tpt.secure_delete")
@patch("airflow.providers.teradata.hooks.tpt.set_local_file_permissions")
@patch("airflow.providers.teradata.hooks.tpt.subprocess.Popen")
@patch("airflow.providers.teradata.hooks.tpt.shutil.which", return_value="/usr/bin/tbuild")
def test_execute_tbuild_locally_success(
self, mock_which, mock_popen, mock_set_permissions, mock_secure_delete, mock_terminate, mock_ssh_hook
):
hook = TptHook()
process = MagicMock()
process.stdout.readline.side_effect = [b"All good\n", b""]
process.wait.return_value = None
process.returncode = 0
mock_popen.return_value = process
result = hook._execute_tbuild_locally("CREATE TABLE test (id INT);")
assert result == 0
mock_set_permissions.assert_called_once()
@patch("airflow.providers.teradata.hooks.tpt.SSHHook")
@patch("airflow.providers.teradata.hooks.tpt.terminate_subprocess")
@patch("airflow.providers.teradata.hooks.tpt.secure_delete")
@patch("airflow.providers.teradata.hooks.tpt.set_local_file_permissions")
@patch("airflow.providers.teradata.hooks.tpt.subprocess.Popen")
@patch("airflow.providers.teradata.hooks.tpt.shutil.which", return_value="/usr/bin/tbuild")
def test_execute_tbuild_locally_failure(
self, mock_which, mock_popen, mock_set_permissions, mock_secure_delete, mock_terminate, mock_ssh_hook
):
hook = TptHook()
process = MagicMock()
process.stdout.readline.side_effect = [b"error: failed\n", b""]
process.wait.return_value = None
process.returncode = 1
mock_popen.return_value = process
with pytest.raises(RuntimeError):
hook._execute_tbuild_locally("CREATE TABLE test (id INT);")
mock_set_permissions.assert_called_once()
@patch("airflow.providers.teradata.hooks.tpt.SSHHook")
@patch("airflow.providers.teradata.hooks.tpt.TptHook._execute_tdload_via_ssh")
@patch("airflow.providers.teradata.hooks.tpt.TptHook._execute_tdload_locally")
def test_execute_tdload_dispatch(self, mock_local, mock_ssh, mock_ssh_hook):
# Local execution
hook = TptHook()
mock_local.return_value = 0
assert hook.execute_tdload("/tmp", "jobvar") == 0
mock_local.assert_called_once()
# SSH execution
hook = TptHook(ssh_conn_id="ssh_default")
hook.ssh_hook = MagicMock()
mock_ssh.return_value = 0
assert hook.execute_tdload("/tmp", "jobvar") == 0
mock_ssh.assert_called_once()
@patch("airflow.providers.teradata.hooks.tpt.SSHHook")
@patch("airflow.providers.teradata.hooks.tpt.execute_remote_command")
@patch("airflow.providers.teradata.hooks.tpt.remote_secure_delete")
@patch("airflow.providers.teradata.hooks.tpt.secure_delete")
@patch("airflow.providers.teradata.hooks.tpt.set_remote_file_permissions")
@patch("airflow.providers.teradata.hooks.tpt.decrypt_remote_file")
@patch("airflow.providers.teradata.hooks.tpt.transfer_file_sftp")
@patch("airflow.providers.teradata.hooks.tpt.generate_encrypted_file_with_openssl")
@patch("airflow.providers.teradata.hooks.tpt.generate_random_password")
@patch("airflow.providers.teradata.hooks.tpt.verify_tpt_utility_on_remote_host")
@patch("airflow.providers.teradata.hooks.tpt.write_file")
def test_execute_tbuild_via_ssh_success(
self,
mock_write_file,
mock_verify_tpt,
mock_gen_password,
mock_encrypt_file,
mock_transfer_file,
mock_decrypt_file,
mock_set_permissions,
mock_secure_delete,
mock_remote_secure_delete,
mock_execute_remote_command,
mock_ssh_hook,
):
"""Test successful execution of tbuild via SSH"""
# Setup hook with SSH
hook = TptHook(ssh_conn_id="ssh_default")
hook.ssh_hook = MagicMock()
# Mock SSH client
mock_ssh_client = MagicMock()
hook.ssh_hook.get_conn.return_value.__enter__.return_value = mock_ssh_client
# Mock execute_remote_command
mock_execute_remote_command.return_value = (0, "DDL executed successfully", "")
# Mock password generation
mock_gen_password.return_value = "test_password"
# Execute the method
result = hook._execute_tbuild_via_ssh("CREATE TABLE test (id INT);", "/tmp")
# Assertions
assert result == 0
mock_verify_tpt.assert_called_once_with(
mock_ssh_client, "tbuild", logging.getLogger("airflow.providers.teradata.hooks.tpt")
)
mock_write_file.assert_called_once()
mock_gen_password.assert_called_once()
mock_encrypt_file.assert_called_once()
mock_transfer_file.assert_called_once()
mock_decrypt_file.assert_called_once()
mock_set_permissions.assert_called_once()
mock_execute_remote_command.assert_called_once()
mock_remote_secure_delete.assert_called_once()
mock_secure_delete.assert_called()
@patch("airflow.providers.teradata.hooks.tpt.SSHHook")
@patch("airflow.providers.teradata.hooks.tpt.execute_remote_command")
@patch("airflow.providers.teradata.hooks.tpt.remote_secure_delete")
@patch("airflow.providers.teradata.hooks.tpt.secure_delete")
@patch("airflow.providers.teradata.hooks.tpt.set_remote_file_permissions")
@patch("airflow.providers.teradata.hooks.tpt.decrypt_remote_file")
@patch("airflow.providers.teradata.hooks.tpt.transfer_file_sftp")
@patch("airflow.providers.teradata.hooks.tpt.generate_encrypted_file_with_openssl")
@patch("airflow.providers.teradata.hooks.tpt.generate_random_password")
@patch("airflow.providers.teradata.hooks.tpt.verify_tpt_utility_on_remote_host")
@patch("airflow.providers.teradata.hooks.tpt.write_file")
def test_execute_tbuild_via_ssh_failure(
self,
mock_write_file,
mock_verify_tpt,
mock_gen_password,
mock_encrypt_file,
mock_transfer_file,
mock_decrypt_file,
mock_set_permissions,
mock_secure_delete,
mock_remote_secure_delete,
mock_execute_remote_command,
mock_ssh_hook,
):
"""Test failed execution of tbuild via SSH"""
# Setup hook with SSH
hook = TptHook(ssh_conn_id="ssh_default")
hook.ssh_hook = MagicMock()
# Mock SSH client
mock_ssh_client = MagicMock()
hook.ssh_hook.get_conn.return_value.__enter__.return_value = mock_ssh_client
# Mock execute_remote_command with failure
mock_execute_remote_command.return_value = (1, "DDL failed", "Syntax error")
# Mock password generation
mock_gen_password.return_value = "test_password"
# Execute the method and expect failure
with pytest.raises(RuntimeError, match="tbuild command failed with exit code 1"):
hook._execute_tbuild_via_ssh("CREATE TABLE test (id INT);", "/tmp")
# Verify cleanup was called even on failure
mock_remote_secure_delete.assert_called_once()
mock_secure_delete.assert_called()
@patch("airflow.providers.teradata.hooks.tpt.SSHHook")
def test_execute_tbuild_via_ssh_no_ssh_hook(self, mock_ssh_hook):
"""Test tbuild via SSH when SSH hook is not initialized"""
hook = TptHook(ssh_conn_id="ssh_default")
hook.ssh_hook = None # Simulate uninitialized SSH hook
with pytest.raises(ConnectionError, match="SSH connection is not established"):
hook._execute_tbuild_via_ssh("CREATE TABLE test (id INT);", "/tmp")
@patch("airflow.providers.teradata.hooks.tpt.SSHHook")
@patch("airflow.providers.teradata.hooks.tpt.execute_remote_command")
@patch("airflow.providers.teradata.hooks.tpt.remote_secure_delete")
@patch("airflow.providers.teradata.hooks.tpt.secure_delete")
@patch("airflow.providers.teradata.hooks.tpt.set_remote_file_permissions")
@patch("airflow.providers.teradata.hooks.tpt.decrypt_remote_file")
@patch("airflow.providers.teradata.hooks.tpt.transfer_file_sftp")
@patch("airflow.providers.teradata.hooks.tpt.generate_encrypted_file_with_openssl")
@patch("airflow.providers.teradata.hooks.tpt.generate_random_password")
@patch("airflow.providers.teradata.hooks.tpt.verify_tpt_utility_on_remote_host")
def test_transfer_to_and_execute_tdload_on_remote_success(
self,
mock_verify_tpt,
mock_gen_password,
mock_encrypt_file,
mock_transfer_file,
mock_decrypt_file,
mock_set_permissions,
mock_secure_delete,
mock_remote_secure_delete,
mock_execute_remote_command,
mock_ssh_hook,
):
"""Test successful transfer and execution of tdload on remote host"""
# Setup hook with SSH
hook = TptHook(ssh_conn_id="ssh_default")
hook.ssh_hook = MagicMock()
# Mock SSH client
mock_ssh_client = MagicMock()
hook.ssh_hook.get_conn.return_value.__enter__.return_value = mock_ssh_client
# Mock execute_remote_command
mock_execute_remote_command.return_value = (0, "Job executed successfully\n100 rows loaded", "")
# Mock password generation
mock_gen_password.return_value = "test_password"
# Execute the method
result = hook._transfer_to_and_execute_tdload_on_remote(
"/tmp/job_var_file.txt", "/remote/tmp", "-v -u", "test_job"
)
# Assertions
assert result == 0
mock_verify_tpt.assert_called_once_with(
mock_ssh_client, "tdload", logging.getLogger("airflow.providers.teradata.hooks.tpt")
)
mock_gen_password.assert_called_once()
mock_encrypt_file.assert_called_once()
mock_transfer_file.assert_called_once()
mock_decrypt_file.assert_called_once()
mock_set_permissions.assert_called_once()
mock_execute_remote_command.assert_called_once()
mock_remote_secure_delete.assert_called_once()
mock_secure_delete.assert_called()
# Verify the command was constructed correctly
call_args = mock_execute_remote_command.call_args[0][1]
assert "tdload" in call_args
assert "-j" in call_args
assert "-v" in call_args
assert "-u" in call_args
assert "test_job" in call_args
@patch("airflow.providers.teradata.hooks.tpt.SSHHook")
@patch("airflow.providers.teradata.hooks.tpt.execute_remote_command")
@patch("airflow.providers.teradata.hooks.tpt.remote_secure_delete")
@patch("airflow.providers.teradata.hooks.tpt.secure_delete")
@patch("airflow.providers.teradata.hooks.tpt.set_remote_file_permissions")
@patch("airflow.providers.teradata.hooks.tpt.decrypt_remote_file")
@patch("airflow.providers.teradata.hooks.tpt.transfer_file_sftp")
@patch("airflow.providers.teradata.hooks.tpt.generate_encrypted_file_with_openssl")
@patch("airflow.providers.teradata.hooks.tpt.generate_random_password")
@patch("airflow.providers.teradata.hooks.tpt.verify_tpt_utility_on_remote_host")
def test_transfer_to_and_execute_tdload_on_remote_failure(
self,
mock_verify_tpt,
mock_gen_password,
mock_encrypt_file,
mock_transfer_file,
mock_decrypt_file,
mock_set_permissions,
mock_secure_delete,
mock_remote_secure_delete,
mock_execute_remote_command,
mock_ssh_hook,
):
"""Test failed transfer and execution of tdload on remote host"""
# Setup hook with SSH
hook = TptHook(ssh_conn_id="ssh_default")
hook.ssh_hook = MagicMock()
# Mock SSH client
mock_ssh_client = MagicMock()
hook.ssh_hook.get_conn.return_value.__enter__.return_value = mock_ssh_client
# Mock execute_remote_command with failure
mock_execute_remote_command.return_value = (1, "Job failed", "Connection error")
# Mock password generation
mock_gen_password.return_value = "test_password"
# Execute the method and expect failure
with pytest.raises(RuntimeError, match="tdload command failed with exit code 1"):
hook._transfer_to_and_execute_tdload_on_remote(
"/tmp/job_var_file.txt", "/remote/tmp", "-v", "test_job"
)
# Verify cleanup was called even on failure
mock_remote_secure_delete.assert_called_once()
mock_secure_delete.assert_called()
@patch("airflow.providers.teradata.hooks.tpt.SSHHook")
@patch("airflow.providers.teradata.hooks.tpt.execute_remote_command")
@patch("airflow.providers.teradata.hooks.tpt.remote_secure_delete")
@patch("airflow.providers.teradata.hooks.tpt.secure_delete")
@patch("airflow.providers.teradata.hooks.tpt.set_remote_file_permissions")
@patch("airflow.providers.teradata.hooks.tpt.decrypt_remote_file")
@patch("airflow.providers.teradata.hooks.tpt.transfer_file_sftp")
@patch("airflow.providers.teradata.hooks.tpt.generate_encrypted_file_with_openssl")
@patch("airflow.providers.teradata.hooks.tpt.generate_random_password")
@patch("airflow.providers.teradata.hooks.tpt.verify_tpt_utility_on_remote_host")
def test_transfer_to_and_execute_tdload_on_remote_no_options(
self,
mock_verify_tpt,
mock_gen_password,
mock_encrypt_file,
mock_transfer_file,
mock_decrypt_file,
mock_set_permissions,
mock_secure_delete,
mock_remote_secure_delete,
mock_execute_remote_command,
mock_ssh_hook,
):
"""Test transfer and execution of tdload on remote host with no options"""
# Setup hook with SSH
hook = TptHook(ssh_conn_id="ssh_default")
hook.ssh_hook = MagicMock()
# Mock SSH client
mock_ssh_client = MagicMock()
hook.ssh_hook.get_conn.return_value.__enter__.return_value = mock_ssh_client
# Mock execute_remote_command
mock_execute_remote_command.return_value = (0, "Job executed successfully", "")
# Mock password generation
mock_gen_password.return_value = "test_password"
# Execute the method with valid remote directory but no options
result = hook._transfer_to_and_execute_tdload_on_remote(
"/tmp/job_var_file.txt", "/remote/tmp", None, None
)
# Assertions
assert result == 0
# Verify the command was constructed correctly without options
call_args = mock_execute_remote_command.call_args[0][1]
assert "tdload" in call_args
assert "-j" in call_args
# Should not contain extra options
assert "-v" not in call_args
assert "-u" not in call_args
@patch("airflow.providers.teradata.hooks.tpt.SSHHook")
def test_transfer_to_and_execute_tdload_on_remote_no_ssh_hook(self, mock_ssh_hook):
"""Test transfer and execution when SSH hook is not initialized"""
hook = TptHook(ssh_conn_id="ssh_default")
hook.ssh_hook = None # Simulate uninitialized SSH hook
with pytest.raises(ConnectionError, match="SSH connection is not established"):
hook._transfer_to_and_execute_tdload_on_remote(
"/tmp/job_var_file.txt", "/remote/tmp", "-v", "test_job"
)
@patch("airflow.providers.teradata.hooks.tpt.SSHHook")
@patch("airflow.providers.teradata.hooks.tpt.remote_secure_delete")
@patch("airflow.providers.teradata.hooks.tpt.secure_delete")
@patch("airflow.providers.teradata.hooks.tpt.set_remote_file_permissions")
@patch("airflow.providers.teradata.hooks.tpt.decrypt_remote_file")
@patch("airflow.providers.teradata.hooks.tpt.transfer_file_sftp")
@patch("airflow.providers.teradata.hooks.tpt.generate_encrypted_file_with_openssl")
@patch("airflow.providers.teradata.hooks.tpt.generate_random_password")
@patch(
"airflow.providers.teradata.hooks.tpt.verify_tpt_utility_on_remote_host",
side_effect=Exception("TPT utility not found"),
)
def test_transfer_to_and_execute_tdload_on_remote_utility_check_fail(
self,
mock_verify_tpt,
mock_gen_password,
mock_encrypt_file,
mock_transfer_file,
mock_decrypt_file,
mock_set_permissions,
mock_secure_delete,
mock_remote_secure_delete,
mock_ssh_hook,
):
"""Test transfer and execution when TPT utility verification fails"""
# Setup hook with SSH
hook = TptHook(ssh_conn_id="ssh_default")
hook.ssh_hook = MagicMock()
# Mock SSH client
mock_ssh_client = MagicMock()
hook.ssh_hook.get_conn.return_value.__enter__.return_value = mock_ssh_client
# Execute the method and expect failure
with pytest.raises(
RuntimeError,
match="Unexpected error while executing tdload script on remote machine",
):
hook._transfer_to_and_execute_tdload_on_remote(
"/tmp/job_var_file.txt", "/remote/tmp", "-v", "test_job"
)
# Verify cleanup was called even on utility check failure
mock_secure_delete.assert_called()
def test_build_tdload_command_basic(self):
"""Test building tdload command with basic parameters"""
hook = TptHook()
cmd = hook._build_tdload_command("/tmp/job.txt", None, "test_job")
assert cmd == ["tdload", "-j", "/tmp/job.txt", "test_job"]
def test_build_tdload_command_with_options(self):
"""Test building tdload command with options"""
hook = TptHook()
cmd = hook._build_tdload_command("/tmp/job.txt", "-v -u", "test_job")
assert cmd == ["tdload", "-j", "/tmp/job.txt", "-v", "-u", "test_job"]
def test_build_tdload_command_with_quoted_options(self):
"""Test building tdload command with quoted options"""
hook = TptHook()
cmd = hook._build_tdload_command("/tmp/job.txt", "-v --option 'value with spaces'", "test_job")
assert cmd == ["tdload", "-j", "/tmp/job.txt", "-v", "--option", "value with spaces", "test_job"]
def test_build_tdload_command_no_job_name(self):
"""Test building tdload command without job name"""
hook = TptHook()
cmd = hook._build_tdload_command("/tmp/job.txt", "-v", None)
assert cmd == ["tdload", "-j", "/tmp/job.txt", "-v"]
def test_build_tdload_command_empty_job_name(self):
"""Test building tdload command with empty job name"""
hook = TptHook()
cmd = hook._build_tdload_command("/tmp/job.txt", "-v", "")
assert cmd == ["tdload", "-j", "/tmp/job.txt", "-v"]
@patch("shlex.split", side_effect=ValueError("Invalid quote"))
def test_build_tdload_command_invalid_options(self, mock_shlex_split):
"""Test building tdload command with invalid quoted options"""
hook = TptHook()
cmd = hook._build_tdload_command("/tmp/job.txt", "-v --option 'unclosed quote", "test_job")
# Should fallback to simple split
assert cmd == ["tdload", "-j", "/tmp/job.txt", "-v", "--option", "'unclosed", "quote", "test_job"]
def test_on_kill(self):
"""Test on_kill method"""
hook = TptHook()
# Should not raise any exception
hook.on_kill()
| {
"repo_id": "apache/airflow",
"file_path": "providers/teradata/tests/unit/teradata/hooks/test_tpt.py",
"license": "Apache License 2.0",
"lines": 444,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/teradata/tests/unit/teradata/operators/test_tpt.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Tests for Teradata Parallel Transporter (TPT) operators.
These tests validate the functionality of:
- DdlOperator: For DDL operations on Teradata databases
- TdLoadOperator: For data transfers between files and tables
"""
from __future__ import annotations
from unittest.mock import MagicMock, Mock, patch
import pytest
from airflow.providers.teradata.hooks.tpt import TptHook
from airflow.providers.teradata.operators.tpt import DdlOperator, TdLoadOperator
@pytest.fixture(autouse=True)
def patch_secure_delete():
"""Patch secure_delete for all tests to avoid ValueError from subprocess.run"""
# Patch the reference used in hooks.tpt, not just tpt_util
with patch("airflow.providers.teradata.hooks.tpt.secure_delete", return_value=None):
yield
@pytest.fixture(autouse=True)
def patch_binary_checks():
# Mock binary availability checks to prevent "binary not found" errors
with patch("airflow.providers.teradata.hooks.tpt.shutil.which") as mock_which:
mock_which.return_value = "/usr/bin/mock_binary" # Always return a path
yield
@pytest.fixture(autouse=True)
def patch_subprocess():
# Mock subprocess calls to prevent actual binary execution
with patch("airflow.providers.teradata.hooks.tpt.subprocess.Popen") as mock_popen:
mock_process = MagicMock()
mock_process.returncode = 0
mock_process.stdout.readline.side_effect = [b"Mock output\n", b""]
mock_process.communicate.return_value = (b"Mock stdout", b"")
mock_popen.return_value = mock_process
yield
class TestDdlOperator:
"""
Tests for DdlOperator.
This test suite validates the DdlOperator functionality for:
- Executing DDL statements on Teradata databases
- Parameter validation
- Error handling and error code management
- Template rendering
- Resource cleanup
"""
def setup_method(self, method):
# No MagicMock connections needed; use only string conn_ids in tests
pass
# ----- DDL Execution Tests -----
@patch("airflow.providers.teradata.operators.tpt.TptHook")
def test_ddl_execution(self, mock_tpt_hook):
mock_hook_instance = mock_tpt_hook.return_value
mock_hook_instance.get_conn.return_value = {
"host": "mock_host",
"login": "mock_user",
"password": "mock_pass",
}
mock_hook_instance.execute_ddl.return_value = 0
operator = DdlOperator(
task_id="test_ddl",
ddl=["CREATE TABLE test_db.test_table (id INT)", "CREATE INDEX idx ON test_db.test_table (id)"],
teradata_conn_id="teradata_default",
ddl_job_name="test_ddl_job",
)
result = operator.execute({})
assert result == 0
@patch("airflow.providers.teradata.operators.tpt.TptHook")
def test_ddl_execution_with_multiple_statements(self, mock_tpt_hook):
mock_hook_instance = mock_tpt_hook.return_value
mock_hook_instance.get_conn.return_value = {
"host": "mock_host",
"login": "mock_user",
"password": "mock_pass",
}
mock_hook_instance.execute_ddl.return_value = 0
ddl_statements = [
"CREATE TABLE test_db.customers (customer_id INTEGER, name VARCHAR(100), email VARCHAR(255))",
"CREATE INDEX idx_customer_name ON test_db.customers (name)",
"""CREATE TABLE test_db.orders (
order_id INTEGER,
customer_id INTEGER,
order_date DATE,
FOREIGN KEY (customer_id) REFERENCES test_db.customers(customer_id)
)""",
"CREATE PROCEDURE test_db.get_customer(IN p_id INTEGER) BEGIN SELECT * FROM test_db.customers WHERE customer_id = p_id; END;",
]
operator = DdlOperator(
task_id="test_multiple_ddl",
ddl=ddl_statements,
teradata_conn_id="teradata_default",
)
result = operator.execute({})
assert result == 0
# ----- Parameter Validation Tests -----
def test_ddl_parameter_validation(self):
# Test empty DDL list
with pytest.raises(ValueError, match="ddl parameter must be a non-empty list"):
DdlOperator(
task_id="test_empty_ddl",
ddl=[],
teradata_conn_id="teradata_default",
).execute({})
# Test non-list DDL parameter
with pytest.raises(ValueError, match="ddl parameter must be a non-empty list"):
DdlOperator(
task_id="test_non_list_ddl",
ddl="CREATE TABLE test_table (id INT)", # string instead of list
teradata_conn_id="teradata_default",
).execute({})
# Test DDL with empty string
with pytest.raises(ValueError, match="ddl parameter must be a non-empty list"):
DdlOperator(
task_id="test_empty_string_ddl",
ddl=["CREATE TABLE test_table (id INT)", ""],
teradata_conn_id="teradata_default",
).execute({})
# Test DDL with None value
with pytest.raises(ValueError, match="ddl parameter must be a non-empty list"):
DdlOperator(
task_id="test_none_ddl",
ddl=None,
teradata_conn_id="teradata_default",
).execute({})
# Test DDL with list containing non-string values
with pytest.raises(ValueError, match="ddl parameter must be a non-empty list"):
DdlOperator(
task_id="test_non_string_ddl",
ddl=["CREATE TABLE test_table (id INT)", 123],
teradata_conn_id="teradata_default",
).execute({})
@patch("airflow.providers.teradata.operators.tpt.TptHook")
def test_error_list_validation(self, mock_tpt_hook):
mock_hook_instance = mock_tpt_hook.return_value
mock_hook_instance.get_conn.return_value = {
"host": "mock_host",
"login": "mock_user",
"password": "mock_pass",
}
mock_hook_instance.execute_ddl.return_value = 0
# Test with integer error code
operator = DdlOperator(
task_id="test_int_error_list",
ddl=["CREATE TABLE test_table (id INT)"],
error_list=3803, # single integer
teradata_conn_id="teradata_default",
)
result = operator.execute({})
assert result == 0
assert operator.error_list == 3803 # Original value should remain unchanged
# Test with list of integers
operator = DdlOperator(
task_id="test_list_error_list",
ddl=["CREATE TABLE test_table (id INT)"],
error_list=[3803, 3807, 5495], # list of integers
teradata_conn_id="teradata_default",
)
result = operator.execute({})
assert result == 0
assert operator.error_list == [3803, 3807, 5495]
# Test with invalid error_list type (string)
with pytest.raises(ValueError, match="error_list must be an int or a list of ints"):
DdlOperator(
task_id="test_invalid_error_list_string",
ddl=["CREATE TABLE test_table (id INT)"],
error_list="3803", # string instead of int or list
teradata_conn_id="teradata_default",
).execute({})
# Test with invalid error_list type (dict)
with pytest.raises(ValueError, match="error_list must be an int or a list of ints"):
DdlOperator(
task_id="test_invalid_error_list_dict",
ddl=["CREATE TABLE test_table (id INT)"],
error_list={"code": 3803}, # dict instead of int or list
teradata_conn_id="teradata_default",
).execute({})
# ----- Error Handling Tests -----
@patch(
"airflow.providers.teradata.hooks.tpt.TptHook.get_conn",
side_effect=RuntimeError("Connection not found"),
)
def test_ddl_execution_with_error_handling(self, mock_get_conn):
# Configure operator with error list
operator = DdlOperator(
task_id="test_ddl_with_errors",
ddl=[
"DROP TABLE test_db.nonexistent_table", # This might generate error 3807 (object not found)
"CREATE TABLE test_db.new_table (id INT)",
],
error_list=[3807], # Ignore "object does not exist" errors
teradata_conn_id="teradata_default",
)
# Execute and verify RuntimeError is raised
with pytest.raises(RuntimeError, match="Connection not found"):
operator.execute({})
@patch(
"airflow.providers.teradata.hooks.tpt.TptHook.get_conn",
side_effect=RuntimeError("Connection not found"),
)
def test_ddl_execution_error(self, mock_get_conn):
# This test verifies the normal case since we can't easily simulate real DDL errors
# In a real environment, DDL errors would be handled by the TPT hooks
# Configure operator
operator = DdlOperator(
task_id="test_ddl_execution_error",
ddl=["CREATE TABLE test_db.test_table (id INT)"],
teradata_conn_id="teradata_default",
)
# Execute and verify RuntimeError is raised
with pytest.raises(RuntimeError, match="Connection not found"):
operator.execute({})
# ----- Resource Cleanup Tests -----
@patch("airflow.providers.teradata.hooks.tpt.TptHook")
def test_ddl_on_kill(self, mock_tpt_hook):
# Set up mocks
mock_hook_instance = mock_tpt_hook.return_value
# Configure operator
operator = DdlOperator(
task_id="test_ddl_on_kill",
ddl=["CREATE TABLE test_table (id INT)"],
teradata_conn_id="teradata_default",
)
# Set hook manually
operator._hook = mock_hook_instance
# Call on_kill
operator.on_kill()
# Verify hook was cleaned up
mock_hook_instance.on_kill.assert_called_once()
@patch("airflow.providers.teradata.hooks.tpt.TptHook")
def test_ddl_on_kill_no_hook(self, mock_tpt_hook):
# Configure operator
operator = DdlOperator(
task_id="test_ddl_on_kill_no_hook",
ddl=["CREATE TABLE test_table (id INT)"],
teradata_conn_id="teradata_default",
)
# Set hook to None
operator._hook = None
# Call on_kill (should not raise any exceptions)
operator.on_kill()
# ----- Templating Tests -----
@patch("airflow.providers.ssh.hooks.ssh.SSHHook")
@patch("airflow.providers.teradata.operators.tpt.TptHook")
def test_template_ext(self, mock_tpt_hook, mock_ssh_hook):
mock_hook_instance = mock_tpt_hook.return_value
mock_hook_instance.get_conn.return_value = {
"host": "mock_host",
"login": "mock_user",
"password": "mock_pass",
}
mock_hook_instance.execute_ddl.return_value = 0
# Verify template_ext contains .sql
assert ".sql" in DdlOperator.template_ext
operator = DdlOperator(
task_id="test_sql_file",
ddl=["SELECT * FROM test_table;"],
teradata_conn_id="teradata_default",
)
result = operator.execute({})
assert result == 0
# ----- SSL Connection Tests -----
@patch(
"airflow.providers.teradata.hooks.ttu.TtuHook.get_conn",
return_value={"host": "mock_host", "login": "mock_user", "password": "mock_pass"},
)
@patch("airflow.models.Connection")
def test_ddl_with_ssl_connection(self, mock_conn, mock_get_conn):
"""Test DDL operations with SSL-enabled Teradata connection"""
operator = DdlOperator(
task_id="test_ddl_with_ssl_connection",
ddl=["CREATE TABLE test_table (id INT)"],
teradata_conn_id="teradata_ssl",
)
result = operator.execute({})
assert result == 0
mock_get_conn.assert_called()
class TestTdLoadOperator:
"""
Tests for TdLoadOperator.
This test suite validates the TdLoadOperator functionality across different modes:
- file_to_table: Loading data from a file to a Teradata table
- table_to_file: Exporting data from a Teradata table to a file
- select_stmt_to_file: Exporting data from a SQL SELECT statement to a file
- table_to_table: Transferring data between two Teradata tables
It also tests parameter validation, error handling, templating, and resource cleanup.
"""
def setup_method(self, method):
# No MagicMock connections needed; use only string conn_ids in tests
pass
# ----- Tests for Basic Operation Modes -----
@patch(
"airflow.providers.teradata.hooks.ttu.TtuHook.get_conn",
return_value={"host": "mock_host", "login": "mock_user", "password": "mock_pass"},
)
@patch(
"airflow.providers.teradata.operators.tpt.prepare_tdload_job_var_file",
return_value="dummy job var content",
)
@patch("airflow.providers.teradata.hooks.tpt.TptHook.execute_tdload", return_value=0)
def test_file_to_table_mode(self, mock_execute_tdload, mock_prepare_job_var, mock_get_conn):
"""Test loading data from a file to a Teradata table (with connection and job var patching)"""
# Create operator
operator = TdLoadOperator(
task_id="test_file_to_table",
source_file_name="/path/to/data.csv",
target_table="target_db.target_table",
teradata_conn_id="teradata_default",
target_teradata_conn_id="teradata_target",
)
# Execute operator
result = operator.execute({})
# Assertions
assert result == 0
mock_execute_tdload.assert_called_once()
mock_prepare_job_var.assert_called_once()
mock_get_conn.assert_called()
# Verify that the operator initialized correctly
assert operator._src_hook is not None
assert operator._dest_hook is not None
@patch(
"airflow.providers.teradata.hooks.ttu.TtuHook.get_conn",
return_value={"host": "mock_host", "login": "mock_user", "password": "mock_pass"},
)
@patch(
"airflow.providers.teradata.operators.tpt.prepare_tdload_job_var_file",
return_value="dummy job var content",
)
@patch("airflow.providers.teradata.hooks.tpt.TptHook.execute_tdload", return_value=0)
def test_file_to_table_with_default_target_conn(
self, mock_execute_tdload, mock_prepare_job_var, mock_get_conn
):
"""Test file to table loading with default target connection"""
operator = TdLoadOperator(
task_id="test_file_to_table_default_target",
source_file_name="/path/to/data.csv",
target_table="target_db.target_table",
teradata_conn_id="teradata_default",
# No target_teradata_conn_id - should default to teradata_conn_id
)
# Execute the operator
result = operator.execute({})
# Verify the results
assert result == 0
# Verify that target_teradata_conn_id was set to teradata_conn_id
assert operator.target_teradata_conn_id == "teradata_default"
# Verify that hooks were initialized
assert operator._src_hook is not None
assert operator._dest_hook is not None
mock_execute_tdload.assert_called_once()
mock_prepare_job_var.assert_called_once()
mock_get_conn.assert_called()
@patch(
"airflow.providers.teradata.hooks.ttu.TtuHook.get_conn",
return_value={"host": "mock_host", "login": "mock_user", "password": "mock_pass"},
)
@patch(
"airflow.providers.teradata.operators.tpt.prepare_tdload_job_var_file",
return_value="dummy job var content",
)
@patch("airflow.providers.teradata.hooks.tpt.TptHook.execute_tdload", return_value=0)
def test_table_to_file_mode(self, mock_execute_tdload, mock_prepare_job_var, mock_get_conn):
"""Test exporting data from a Teradata table to a file"""
# Configure the operator
operator = TdLoadOperator(
task_id="test_table_to_file",
source_table="source_db.source_table",
target_file_name="/path/to/export.csv",
teradata_conn_id="teradata_default",
)
# Execute the operator
result = operator.execute({})
# Verify the results
assert result == 0
# Verify that hooks were initialized correctly
assert operator._src_hook is not None
assert operator._dest_hook is None # No destination hook for table_to_file
mock_execute_tdload.assert_called_once()
mock_prepare_job_var.assert_called_once()
mock_get_conn.assert_called()
@patch(
"airflow.providers.teradata.hooks.ttu.TtuHook.get_conn",
return_value={"host": "mock_host", "login": "mock_user", "password": "mock_pass"},
)
@patch(
"airflow.providers.teradata.operators.tpt.prepare_tdload_job_var_file",
return_value="dummy job var content",
)
@patch("airflow.providers.teradata.hooks.tpt.TptHook.execute_tdload", return_value=0)
def test_select_stmt_to_file_mode(self, mock_execute_tdload, mock_prepare_job_var, mock_get_conn):
"""Test exporting data from a SELECT statement to a file"""
# Configure the operator
operator = TdLoadOperator(
task_id="test_select_to_file",
select_stmt="SELECT * FROM source_db.source_table WHERE id > 1000",
target_file_name="/path/to/export.csv",
teradata_conn_id="teradata_default",
)
# Execute the operator
result = operator.execute({})
# Verify the results
assert result == 0
# Verify that hooks were initialized correctly
assert operator._src_hook is not None
assert operator._dest_hook is None # No destination hook for select_to_file
mock_execute_tdload.assert_called_once()
mock_prepare_job_var.assert_called_once()
mock_get_conn.assert_called()
@patch(
"airflow.providers.teradata.hooks.ttu.TtuHook.get_conn",
return_value={"host": "mock_host", "login": "mock_user", "password": "mock_pass"},
)
@patch(
"airflow.providers.teradata.operators.tpt.prepare_tdload_job_var_file",
return_value="dummy job var content",
)
@patch("airflow.providers.teradata.hooks.tpt.TptHook.execute_tdload", return_value=0)
def test_table_to_table_mode(self, mock_execute_tdload, mock_prepare_job_var, mock_get_conn):
"""Test transferring data between two Teradata tables"""
# Configure the operator
operator = TdLoadOperator(
task_id="test_table_to_table",
source_table="source_db.source_table",
target_table="target_db.target_table",
teradata_conn_id="teradata_default",
target_teradata_conn_id="teradata_target",
)
# Execute the operator
result = operator.execute({})
# Verify the results
assert result == 0
# Verify that both hooks were initialized
assert operator._src_hook is not None
assert operator._dest_hook is not None
# ----- Tests for Advanced Operation Modes -----
@patch(
"airflow.providers.teradata.hooks.ttu.TtuHook.get_conn",
return_value={"host": "mock_host", "login": "mock_user", "password": "mock_pass"},
)
@patch(
"airflow.providers.teradata.operators.tpt.prepare_tdload_job_var_file",
return_value="dummy job var content",
)
@patch("airflow.providers.teradata.hooks.tpt.TptHook.execute_tdload", return_value=0)
def test_file_to_table_with_insert_stmt(self, mock_execute_tdload, mock_prepare_job_var, mock_get_conn):
"""Test loading from file to table with custom INSERT statement"""
# Configure the operator with custom INSERT statement
operator = TdLoadOperator(
task_id="test_file_to_table_with_insert",
source_file_name="/path/to/data.csv",
target_table="target_db.target_table",
insert_stmt="INSERT INTO target_db.target_table (col1, col2) VALUES (?, ?)",
teradata_conn_id="teradata_default",
target_teradata_conn_id="teradata_target",
)
# Execute the operator
result = operator.execute({})
# Verify the results
assert result == 0
# Verify that both hooks were initialized for file_to_table mode
assert operator._src_hook is not None
assert operator._dest_hook is not None
@patch(
"airflow.providers.teradata.hooks.ttu.TtuHook.get_conn",
return_value={"host": "mock_host", "login": "mock_user", "password": "mock_pass"},
)
@patch(
"airflow.providers.teradata.operators.tpt.prepare_tdload_job_var_file",
return_value="dummy job var content",
)
@patch("airflow.providers.teradata.hooks.tpt.TptHook.execute_tdload", return_value=0)
def test_table_to_table_with_select_and_insert(
self, mock_execute_tdload, mock_prepare_job_var, mock_get_conn
):
"""Test transferring data between tables with custom SELECT and INSERT statements"""
# Configure the operator with custom SELECT and INSERT statements
operator = TdLoadOperator(
task_id="test_table_to_table_with_select_insert",
select_stmt="SELECT col1, col2 FROM source_db.source_table WHERE col3 > 1000",
target_table="target_db.target_table",
insert_stmt="INSERT INTO target_db.target_table (col1, col2) VALUES (?, ?)",
teradata_conn_id="teradata_default",
target_teradata_conn_id="teradata_target",
)
# Execute the operator
result = operator.execute({})
# Verify the results
assert result == 0
# Verify that both hooks were initialized for table_to_table mode
assert operator._src_hook is not None
assert operator._dest_hook is not None
# ----- Parameter Validation Tests -----
def test_invalid_parameter_combinations(self):
"""Test validation of invalid parameter combinations"""
# Test 1: Missing both source and target parameters
with pytest.raises(ValueError, match="Invalid parameter combination"):
TdLoadOperator(
task_id="test_invalid_params",
teradata_conn_id="teradata_default",
).execute({})
# Test 2: Missing target_teradata_conn_id for table_to_table mode
with pytest.raises(ValueError, match="target_teradata_conn_id must be provided"):
TdLoadOperator(
task_id="test_missing_target_conn",
source_table="source_db.source_table",
target_table="target_db.target_table",
teradata_conn_id="teradata_default",
# Missing target_teradata_conn_id for table_to_table
).execute({})
# Test 3: Both source_table and select_stmt provided (contradictory sources)
with pytest.raises(
ValueError, match="Both source_table and select_stmt cannot be provided simultaneously"
):
TdLoadOperator(
task_id="test_both_source_and_select",
source_table="source_db.table",
select_stmt="SELECT * FROM other_db.table",
target_file_name="/path/to/export.csv",
teradata_conn_id="teradata_default",
).execute({})
# Test 4: insert_stmt without target_table
with pytest.raises(ValueError, match="insert_stmt is provided but target_table is not specified"):
TdLoadOperator(
task_id="test_insert_stmt_no_target",
source_file_name="/path/to/source.csv",
insert_stmt="INSERT INTO mytable VALUES (?, ?)",
teradata_conn_id="teradata_default",
).execute({})
# Test 5: Only target_file_name provided (no source)
with pytest.raises(ValueError, match="Invalid parameter combination"):
TdLoadOperator(
task_id="test_no_source_with_target_file",
target_file_name="/path/to/file.csv",
teradata_conn_id="teradata_default",
).execute({})
# Test 6: Only source_file_name provided (no target)
with pytest.raises(ValueError, match="Invalid parameter combination"):
TdLoadOperator(
task_id="test_source_file_no_target_table",
source_file_name="/path/to/source.csv",
teradata_conn_id="teradata_default",
).execute({})
# ----- Error Handling Tests -----
@patch(
"airflow.providers.teradata.hooks.ttu.TtuHook.get_conn",
side_effect=RuntimeError("Connection not found"),
)
def test_error_handling_execute_tdload(self, mock_get_conn):
"""Test error handling with invalid connection ID"""
operator = TdLoadOperator(
task_id="test_error_handling",
source_file_name="/path/to/data.csv",
target_table="target_db.target_table",
teradata_conn_id="nonexistent_connection",
target_teradata_conn_id="teradata_target",
)
with pytest.raises((RuntimeError, ValueError, KeyError)):
operator.execute({})
@patch(
"airflow.providers.teradata.hooks.ttu.TtuHook.get_conn",
side_effect=RuntimeError("Connection not found"),
)
def test_error_handling_get_conn(self, mock_get_conn):
"""Test error handling with invalid target connection ID"""
operator = TdLoadOperator(
task_id="test_error_handling_conn",
source_file_name="/path/to/data.csv",
target_table="target_db.target_table",
teradata_conn_id="teradata_default",
target_teradata_conn_id="nonexistent_target_connection",
)
with pytest.raises((RuntimeError, ValueError, KeyError)):
operator.execute({})
# ----- Resource Cleanup Tests -----
@patch("airflow.providers.teradata.hooks.tpt.TptHook")
@patch("airflow.providers.ssh.hooks.ssh.SSHHook")
def test_on_kill(self, mock_ssh_hook, mock_tpt_hook):
"""Test on_kill method cleans up resources properly"""
# Set up operator
operator = TdLoadOperator(
task_id="test_on_kill",
source_table="source_db.source_table",
target_table="target_db.target_table",
teradata_conn_id="teradata_default",
target_teradata_conn_id="teradata_target",
)
# Set up hooks manually
operator._src_hook = MagicMock()
operator._dest_hook = MagicMock()
# Call on_kill
operator.on_kill()
# Verify hooks were cleaned up
operator._src_hook.on_kill.assert_called_once()
operator._dest_hook.on_kill.assert_called_once()
@patch("airflow.providers.teradata.hooks.tpt.TptHook")
def test_on_kill_no_hooks(self, mock_tpt_hook):
"""Test on_kill method when no hooks are initialized"""
# Set up operator
operator = TdLoadOperator(
task_id="test_on_kill_no_hooks",
source_table="source_db.source_table",
target_table="target_db.target_table",
teradata_conn_id="teradata_default",
target_teradata_conn_id="teradata_target",
)
# Set hooks to None
operator._src_hook = None
operator._dest_hook = None
# Call on_kill (should not raise any exceptions)
operator.on_kill()
@patch("airflow.providers.teradata.hooks.tpt.TptHook")
@patch("airflow.providers.ssh.hooks.ssh.SSHHook")
def test_on_kill_with_only_src_hook(self, mock_ssh_hook, mock_tpt_hook):
"""Test on_kill with only source hook initialized"""
# Set up operator
operator = TdLoadOperator(
task_id="test_on_kill_src_only",
source_table="source_db.source_table",
target_file_name="/path/to/export.csv", # table_to_file mode
teradata_conn_id="teradata_default",
)
# Set up only source hook
operator._src_hook = MagicMock()
operator._dest_hook = None
# Call on_kill
operator.on_kill()
# Verify source hook was cleaned up
operator._src_hook.on_kill.assert_called_once()
# ----- Job Variable File Tests -----
@patch("airflow.providers.teradata.operators.tpt.is_valid_file", return_value=True)
@patch("airflow.providers.teradata.operators.tpt.read_file", return_value="job var content")
def test_with_local_job_var_file(self, mock_read_file, mock_is_valid_file):
"""Test using a local job variable file"""
# Configure operator with only job var file (no source/target parameters needed)
operator = TdLoadOperator(
task_id="test_with_job_var_file",
tdload_job_var_file="/path/to/job_vars.txt",
teradata_conn_id="teradata_default",
)
# Execute
result = operator.execute({})
# Verify the execution was successful (returns 0 for success)
assert result == 0
@patch("airflow.providers.teradata.operators.tpt.is_valid_file", return_value=True)
@patch("airflow.providers.teradata.operators.tpt.read_file", return_value="job var content")
def test_with_local_job_var_file_and_options(self, mock_read_file, mock_is_valid_file):
"""Test using a local job variable file with additional tdload options"""
# Set up mocks
with patch("airflow.providers.teradata.hooks.tpt.TptHook") as mock_tpt_hook:
mock_tpt_hook_instance = mock_tpt_hook.return_value
mock_tpt_hook_instance._execute_tdload_locally.return_value = 0
with (
patch("airflow.providers.teradata.operators.tpt.is_valid_file", return_value=True),
patch("airflow.providers.teradata.operators.tpt.read_file", return_value="job var content"),
):
# Configure operator with job var file and additional options
operator = TdLoadOperator(
task_id="test_with_job_var_file_and_options",
tdload_job_var_file="/path/to/job_vars.txt",
tdload_options="-v -u", # Add verbose and Unicode options
tdload_job_name="custom_job_name",
teradata_conn_id="teradata_default",
)
# Execute
result = operator.execute({})
# Verify the execution was successful (returns 0 for success)
assert result == 0
@patch("airflow.providers.teradata.hooks.tpt.TptHook")
@patch("airflow.providers.ssh.hooks.ssh.SSHHook")
def test_with_invalid_local_job_var_file(self, mock_ssh_hook, mock_tpt_hook):
"""Test with invalid local job variable file path"""
# Set up mocks
with patch("airflow.providers.teradata.operators.tpt.is_valid_file", return_value=False):
# Configure operator
operator = TdLoadOperator(
task_id="test_with_invalid_job_var_file",
tdload_job_var_file="/path/to/nonexistent_file.txt",
teradata_conn_id="teradata_default",
)
# Execute and check for exception
with pytest.raises(ValueError, match="is invalid or does not exist"):
operator.execute({})
# ----- Specific subprocess mocking tests -----
@patch("airflow.providers.teradata.hooks.tpt.subprocess.Popen")
@patch("airflow.providers.teradata.hooks.tpt.shutil.which")
@patch("airflow.models.Connection")
def test_direct_tdload_execution_mocking(self, mock_conn, mock_which, mock_popen):
"""Test the direct execution of tdload with proper mocking"""
# Ensure the binary is found
mock_which.return_value = "/usr/bin/tdload"
# Mock the subprocess
mock_process = MagicMock()
mock_process.returncode = 0
mock_process.stdout = MagicMock()
mock_process.stdout.readline.side_effect = [
b"Starting TDLOAD...\n",
b"Processing data...\n",
b"1000 rows loaded successfully\n",
b"",
]
mock_popen.return_value = mock_process
# Create the TPT hook directly
hook = TptHook(teradata_conn_id="teradata_default")
# Execute the command directly
result = hook._execute_tdload_locally(
job_var_content="DEFINE JOB sample_job;\nUSING OPERATOR sel;\nSELECT * FROM source_table;\n",
tdload_options="-v",
tdload_job_name="sample_job",
)
# Verify the result
assert result == 0
mock_popen.assert_called_once()
cmd_args = mock_popen.call_args[0][0]
assert cmd_args[0] == "tdload"
assert "-j" in cmd_args
assert "-v" in cmd_args
assert "sample_job" in cmd_args
@patch.object(TdLoadOperator, "_src_hook", create=True)
@patch.object(TdLoadOperator, "_dest_hook", create=True)
@patch("airflow.providers.teradata.hooks.tpt.TptHook._execute_tdload_locally")
@patch("airflow.providers.teradata.hooks.tpt.TptHook.__init__", return_value=None)
@patch("airflow.models.Connection")
def test_execute_with_local_job_var_file_direct_patch(
self, mock_conn, mock_hook_init, mock_execute_local, mock_dest_hook, mock_src_hook
):
"""Test TdLoadOperator with a local job var file using direct patching (bteq style)"""
# Arrange
mock_execute_local.return_value = 0
operator = TdLoadOperator(
task_id="test_with_local_job_var_file_direct_patch",
tdload_job_var_file="/path/to/job_vars.txt",
teradata_conn_id="teradata_default",
)
# Manually set hooks since we bypassed __init__
operator._src_hook = mock_src_hook
operator._dest_hook = mock_dest_hook
operator._src_hook._execute_tdload_locally = mock_execute_local
# Patch file validation and reading
with (
patch("airflow.providers.teradata.operators.tpt.is_valid_file", return_value=True),
patch("airflow.providers.teradata.operators.tpt.read_file", return_value="job var content"),
):
# Act
result = operator.execute({})
# Assert
mock_execute_local.assert_called_once_with("job var content", None, None)
assert result == 0
@patch.object(TdLoadOperator, "_src_hook", create=True)
@patch.object(TdLoadOperator, "_dest_hook", create=True)
@patch("airflow.providers.teradata.hooks.tpt.TptHook.__init__", return_value=None)
@patch("airflow.models.Connection")
def test_on_kill_direct_patch(self, mock_conn, mock_hook_init, mock_dest_hook, mock_src_hook):
"""Test on_kill method with direct patching (bteq style)"""
operator = TdLoadOperator(
task_id="test_on_kill_direct_patch",
source_table="source_db.source_table",
target_table="target_db.target_table",
teradata_conn_id="teradata_default",
target_teradata_conn_id="teradata_target",
)
# Set up mocked attributes
operator._src_hook = mock_src_hook
operator._dest_hook = mock_dest_hook
# Ensure the mocked hooks have on_kill methods
mock_src_hook.on_kill = Mock()
mock_dest_hook.on_kill = Mock()
# Act
operator.on_kill()
# Assert
mock_src_hook.on_kill.assert_called_once()
mock_dest_hook.on_kill.assert_called_once()
@patch("airflow.providers.ssh.hooks.ssh.SSHHook")
@patch.object(TdLoadOperator, "_src_hook", create=True)
@patch.object(TdLoadOperator, "_dest_hook", create=True)
@patch("airflow.providers.teradata.hooks.tpt.TptHook.__init__", return_value=None)
@patch("airflow.models.Connection")
def test_on_kill_no_hooks_direct_patch(
self, mock_conn, mock_hook_init, mock_dest_hook, mock_src_hook, mock_ssh_hook
):
"""Test on_kill method when no hooks are initialized (bteq style)"""
operator = TdLoadOperator(
task_id="test_on_kill_no_hooks_direct_patch",
source_table="source_db.source_table",
target_table="target_db.target_table",
teradata_conn_id="teradata_default",
target_teradata_conn_id="teradata_target",
)
operator._src_hook = None
operator._dest_hook = None
# Act
operator.on_kill()
| {
"repo_id": "apache/airflow",
"file_path": "providers/teradata/tests/unit/teradata/operators/test_tpt.py",
"license": "Apache License 2.0",
"lines": 803,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/teradata/tests/unit/teradata/utils/test_tpt_util.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import os
import stat
import subprocess
import tempfile
from unittest.mock import Mock, patch
import pytest
from airflow.providers.teradata.utils.tpt_util import (
TPTConfig,
decrypt_remote_file,
execute_remote_command,
get_remote_os,
get_remote_temp_directory,
is_valid_file,
is_valid_remote_job_var_file,
prepare_tdload_job_var_file,
prepare_tpt_ddl_script,
read_file,
remote_secure_delete,
secure_delete,
set_local_file_permissions,
set_remote_file_permissions,
terminate_subprocess,
transfer_file_sftp,
verify_tpt_utility_installed,
verify_tpt_utility_on_remote_host,
write_file,
)
class TestTptUtil:
"""Test cases for TPT utility functions."""
def test_write_file(self):
"""Test write_file function."""
with tempfile.NamedTemporaryFile(mode="w", delete=False) as tmp_file:
test_path = tmp_file.name
try:
test_content = "Test content\nLine 2"
write_file(test_path, test_content)
# Verify file was written correctly
with open(test_path, encoding="utf-8") as f:
assert f.read() == test_content
finally:
if os.path.exists(test_path):
os.unlink(test_path)
@patch("os.path.exists")
def test_secure_delete_file_not_exists(self, mock_exists):
"""Test secure_delete when file doesn't exist."""
mock_exists.return_value = False
mock_logger = Mock()
# Should return early without error
secure_delete("/nonexistent/file", mock_logger)
mock_exists.assert_called_once_with("/nonexistent/file")
@patch("shutil.which")
@patch("subprocess.run")
@patch("os.path.exists")
def test_secure_delete_with_shred(self, mock_exists, mock_subprocess, mock_which):
"""Test secure_delete with shred available."""
mock_exists.return_value = True
mock_which.return_value = "/usr/bin/shred"
mock_logger = Mock()
secure_delete("/test/file", mock_logger)
mock_which.assert_called_once_with("shred")
mock_subprocess.assert_called_once_with(
["shred", "--remove", "/test/file"], check=True, timeout=TPTConfig.DEFAULT_TIMEOUT
)
mock_logger.info.assert_called_with("Securely removed file using shred: %s", "/test/file")
@patch("shutil.which")
@patch("os.remove")
@patch("os.path.exists")
def test_secure_delete_without_shred(self, mock_exists, mock_remove, mock_which):
"""Test secure_delete without shred available."""
mock_exists.return_value = True
mock_which.return_value = None
mock_logger = Mock()
secure_delete("/test/file", mock_logger)
mock_which.assert_called_once_with("shred")
mock_remove.assert_called_once_with("/test/file")
mock_logger.info.assert_called_with("Removed file: %s", "/test/file")
@patch("shutil.which")
@patch("os.remove")
@patch("os.path.exists")
def test_secure_delete_os_error(self, mock_exists, mock_remove, mock_which):
"""Test secure_delete handles OSError gracefully."""
mock_exists.return_value = True
mock_which.return_value = None
mock_remove.side_effect = OSError("Permission denied")
mock_logger = Mock()
secure_delete("/test/file", mock_logger)
mock_logger.warning.assert_called_with(
"Failed to remove file %s: %s", "/test/file", "Permission denied"
)
def test_remote_secure_delete_no_ssh_client(self):
"""Test remote_secure_delete with no SSH client."""
mock_logger = Mock()
remote_secure_delete(None, ["/remote/file"], mock_logger)
# Should return early without errors
def test_remote_secure_delete_no_files(self):
"""Test remote_secure_delete with no files."""
mock_ssh = Mock()
mock_logger = Mock()
remote_secure_delete(mock_ssh, [], mock_logger)
# Should return early without errors
@patch("airflow.providers.teradata.utils.tpt_util.get_remote_os")
@patch("airflow.providers.teradata.utils.tpt_util.execute_remote_command")
def test_remote_secure_delete_with_shred(self, mock_execute_cmd, mock_get_remote_os):
"""Test remote_secure_delete when shred is available."""
mock_ssh = Mock()
mock_logger = Mock()
mock_get_remote_os.return_value = "unix"
# Mock the command execution for shred availability check and file deletion
mock_execute_cmd.side_effect = [
(0, "/usr/bin/shred", ""), # shred availability check
(0, "", ""), # file1 deletion
(0, "", ""), # file2 deletion
]
remote_secure_delete(mock_ssh, ["/remote/file1", "/remote/file2"], mock_logger)
# Should call get_remote_os and execute_remote_command
mock_get_remote_os.assert_called_once_with(mock_ssh, mock_logger)
expected_calls = [
((mock_ssh, "command -v shred"),),
((mock_ssh, "shred --remove /remote/file1"),),
((mock_ssh, "shred --remove /remote/file2"),),
]
assert mock_execute_cmd.call_args_list == expected_calls
mock_logger.info.assert_called_with("Processed remote files: %s", "/remote/file1, /remote/file2")
@patch("airflow.providers.teradata.utils.tpt_util.get_remote_os")
@patch("airflow.providers.teradata.utils.tpt_util.execute_remote_command")
def test_remote_secure_delete_without_shred(self, mock_execute_cmd, mock_get_remote_os):
"""Test remote_secure_delete when shred is not available."""
mock_ssh = Mock()
mock_logger = Mock()
mock_get_remote_os.return_value = "unix"
# Mock the command execution for shred not available and fallback deletion
mock_execute_cmd.side_effect = [
(1, "", ""), # shred not available
(0, "", ""), # fallback deletion
]
remote_secure_delete(mock_ssh, ["/remote/file"], mock_logger)
# Should call get_remote_os and execute_remote_command
mock_get_remote_os.assert_called_once_with(mock_ssh, mock_logger)
calls = mock_execute_cmd.call_args_list
assert len(calls) == 2
assert calls[0][0] == (mock_ssh, "command -v shred")
# Check that fallback command contains expected elements
fallback_cmd = calls[1][0][1]
assert "dd if=/dev/zero" in fallback_cmd
assert "rm -f" in fallback_cmd
@patch("airflow.providers.teradata.utils.tpt_util.get_remote_os")
@patch("airflow.providers.teradata.utils.tpt_util.execute_remote_command")
def test_remote_secure_delete_windows(self, mock_execute_cmd, mock_get_remote_os):
"""Test remote_secure_delete on Windows."""
mock_ssh = Mock()
mock_logger = Mock()
mock_get_remote_os.return_value = "windows"
# Mock the command execution for Windows deletion
mock_execute_cmd.return_value = (0, "", "")
remote_secure_delete(mock_ssh, ["/remote/file"], mock_logger)
# Should call get_remote_os and execute_remote_command for Windows
mock_get_remote_os.assert_called_once_with(mock_ssh, mock_logger)
calls = mock_execute_cmd.call_args_list
assert len(calls) == 1
windows_cmd = calls[0][0][1]
assert windows_cmd == 'if exist "\\remote\\file" del /f /q "\\remote\\file"'
@patch("airflow.providers.teradata.utils.tpt_util.get_remote_os")
def test_remote_secure_delete_exception(self, mock_get_remote_os):
"""Test remote_secure_delete handles exceptions gracefully."""
mock_ssh = Mock()
mock_get_remote_os.side_effect = Exception("SSH error")
mock_logger = Mock()
remote_secure_delete(mock_ssh, ["/remote/file"], mock_logger)
mock_logger.warning.assert_called_with("Failed to remove remote files: %s", "SSH error")
def test_terminate_subprocess_none(self):
"""Test terminate_subprocess with None subprocess."""
mock_logger = Mock()
terminate_subprocess(None, mock_logger)
# Should return early without errors
def test_terminate_subprocess_running(self):
"""Test terminate_subprocess with running subprocess."""
mock_sp = Mock()
mock_sp.poll.return_value = None # Process is running
mock_sp.pid = 12345
mock_sp.wait.return_value = 0
mock_logger = Mock()
terminate_subprocess(mock_sp, mock_logger)
mock_sp.terminate.assert_called_once()
mock_sp.wait.assert_called_with(timeout=TPTConfig.DEFAULT_TIMEOUT)
mock_logger.info.assert_any_call("Terminating subprocess (PID: %s)", 12345)
mock_logger.info.assert_any_call("Subprocess terminated gracefully")
def test_terminate_subprocess_timeout(self):
"""Test terminate_subprocess with timeout."""
mock_sp = Mock()
mock_sp.poll.return_value = None
mock_sp.pid = 12345
mock_sp.wait.side_effect = [
subprocess.TimeoutExpired("cmd", 5),
0,
] # First call times out, second succeeds
mock_logger = Mock()
terminate_subprocess(mock_sp, mock_logger)
# Should call terminate first, then kill
mock_sp.terminate.assert_called_once()
mock_sp.kill.assert_called_once()
mock_logger.warning.assert_called_with(
"Subprocess did not terminate gracefully within %d seconds, killing it", TPTConfig.DEFAULT_TIMEOUT
)
mock_logger.info.assert_any_call("Subprocess killed successfully")
def test_terminate_subprocess_kill_error(self):
"""Test terminate_subprocess handles kill errors."""
mock_sp = Mock()
mock_sp.poll.return_value = None
mock_sp.pid = 12345
mock_sp.wait.side_effect = [subprocess.TimeoutExpired("cmd", 5), Exception("Kill failed")]
mock_logger = Mock()
terminate_subprocess(mock_sp, mock_logger)
mock_sp.terminate.assert_called_once()
mock_sp.kill.assert_called_once()
mock_logger.error.assert_called_with("Error killing subprocess: %s", "Kill failed")
def test_terminate_subprocess_not_running(self):
"""Test terminate_subprocess with already terminated subprocess."""
mock_sp = Mock()
mock_sp.poll.return_value = 0 # Process has terminated
mock_logger = Mock()
terminate_subprocess(mock_sp, mock_logger)
# Should not attempt to terminate or kill
mock_sp.terminate.assert_not_called()
mock_sp.wait.assert_not_called()
def test_terminate_subprocess_terminate_error(self):
"""Test terminate_subprocess handles terminate errors."""
mock_sp = Mock()
mock_sp.poll.return_value = None
mock_sp.pid = 12345
mock_sp.terminate.side_effect = Exception("Terminate failed")
mock_logger = Mock()
terminate_subprocess(mock_sp, mock_logger)
mock_sp.terminate.assert_called_once()
mock_logger.error.assert_called_with("Error terminating subprocess: %s", "Terminate failed")
@patch("shutil.which")
def test_verify_tpt_utility_installed_success(self, mock_which):
"""Test verify_tpt_utility_installed when utility is found."""
mock_which.return_value = "/usr/bin/tbuild"
# Should not raise exception
verify_tpt_utility_installed("tbuild")
mock_which.assert_called_once_with("tbuild")
@patch("airflow.providers.teradata.utils.tpt_util.get_remote_os")
@patch("airflow.providers.teradata.utils.tpt_util.execute_remote_command")
def test_verify_tpt_utility_on_remote_host_success(self, mock_execute_cmd, mock_get_remote_os):
"""Test verify_tpt_utility_on_remote_host when utility is found."""
mock_ssh = Mock()
mock_get_remote_os.return_value = "unix"
mock_execute_cmd.return_value = (0, "/usr/bin/tbuild", "")
# Should not raise exception
verify_tpt_utility_on_remote_host(mock_ssh, "tbuild")
mock_get_remote_os.assert_called_once()
mock_execute_cmd.assert_called_once_with(mock_ssh, "which tbuild")
@patch("airflow.providers.teradata.utils.tpt_util.get_remote_os")
@patch("airflow.providers.teradata.utils.tpt_util.execute_remote_command")
def test_verify_tpt_utility_on_remote_host_windows(self, mock_execute_cmd, mock_get_remote_os):
"""Test verify_tpt_utility_on_remote_host on Windows."""
mock_ssh = Mock()
mock_get_remote_os.return_value = "windows"
mock_execute_cmd.return_value = (0, "C:\\Program Files\\tbuild.exe", "")
# Should not raise exception
verify_tpt_utility_on_remote_host(mock_ssh, "tbuild")
mock_get_remote_os.assert_called_once()
mock_execute_cmd.assert_called_once_with(mock_ssh, "where tbuild")
@patch("airflow.providers.teradata.utils.tpt_util.get_remote_os")
@patch("airflow.providers.teradata.utils.tpt_util.execute_remote_command")
def test_verify_tpt_utility_on_remote_host_exception(self, mock_execute_cmd, mock_get_remote_os):
"""Test verify_tpt_utility_on_remote_host handles exceptions."""
mock_ssh = Mock()
mock_get_remote_os.side_effect = Exception("SSH connection failed")
with pytest.raises(RuntimeError, match="Failed to verify TPT utility 'tbuild'"):
verify_tpt_utility_on_remote_host(mock_ssh, "tbuild")
def test_prepare_tpt_ddl_script_basic(self):
"""Test prepare_tpt_ddl_script with basic input."""
sql = ["CREATE TABLE test (id INT)", "INSERT INTO test VALUES (1)"]
error_list = [1001, 1002]
source_conn = {"host": "testhost", "login": "testuser", "password": "testpass"}
job_name = "test_job"
result = prepare_tpt_ddl_script(sql, error_list, source_conn, job_name)
assert "DEFINE JOB test_job" in result
assert "TdpId = 'testhost'" in result
assert "UserName = 'testuser'" in result
assert "UserPassword = 'testpass'" in result
assert "ErrorList = ['1001', '1002']" in result
assert "('CREATE TABLE test (id INT);')" in result
assert "('INSERT INTO test VALUES (1);')" in result
def test_prepare_tpt_ddl_script_auto_job_name(self):
"""Test prepare_tpt_ddl_script with auto-generated job name."""
sql = ["CREATE TABLE test (id INT)"]
source_conn = {"host": "testhost", "login": "testuser", "password": "testpass"}
result = prepare_tpt_ddl_script(sql, None, source_conn, None)
assert "DEFINE JOB airflow_tptddl_" in result
assert "ErrorList = ['']" in result
def test_prepare_tpt_ddl_script_empty_sql(self):
"""Test prepare_tpt_ddl_script with empty SQL list."""
source_conn = {"host": "testhost", "login": "testuser", "password": "testpass"}
with pytest.raises(ValueError, match="SQL statement list must be a non-empty list"):
prepare_tpt_ddl_script([], None, source_conn)
def test_prepare_tpt_ddl_script_invalid_sql(self):
"""Test prepare_tpt_ddl_script with invalid SQL input."""
source_conn = {"host": "testhost", "login": "testuser", "password": "testpass"}
with pytest.raises(ValueError, match="SQL statement list must be a non-empty list"):
prepare_tpt_ddl_script("not a list", None, source_conn)
def test_prepare_tpt_ddl_script_empty_statements(self):
"""Test prepare_tpt_ddl_script with empty SQL statements."""
sql = ["", " ", None]
source_conn = {"host": "testhost", "login": "testuser", "password": "testpass"}
with pytest.raises(ValueError, match="No valid SQL statements found"):
prepare_tpt_ddl_script(sql, None, source_conn)
def test_prepare_tpt_ddl_script_sql_escaping(self):
"""Test prepare_tpt_ddl_script properly escapes single quotes."""
sql = ["INSERT INTO test VALUES ('O''Reilly')"]
source_conn = {"host": "testhost", "login": "testuser", "password": "testpass"}
result = prepare_tpt_ddl_script(sql, None, source_conn)
assert "('INSERT INTO test VALUES (''O''''Reilly'');')" in result
@patch("airflow.providers.teradata.utils.tpt_util.get_remote_os")
@patch("airflow.providers.teradata.utils.tpt_util.execute_remote_command")
def test_decrypt_remote_file_success(self, mock_execute_cmd, mock_get_remote_os):
"""Test decrypt_remote_file with successful decryption."""
mock_ssh = Mock()
mock_get_remote_os.return_value = "unix"
mock_execute_cmd.return_value = (0, "", "")
result = decrypt_remote_file(
mock_ssh, "/remote/encrypted.file", "/remote/decrypted.file", "password123"
)
assert result == 0
expected_cmd = "openssl enc -d -aes-256-cbc -salt -pbkdf2 -pass pass:'password123' -in /remote/encrypted.file -out /remote/decrypted.file"
mock_execute_cmd.assert_called_once_with(mock_ssh, expected_cmd)
@patch("airflow.providers.teradata.utils.tpt_util.get_remote_os")
@patch("airflow.providers.teradata.utils.tpt_util.execute_remote_command")
def test_decrypt_remote_file_with_quotes_in_password(self, mock_execute_cmd, mock_get_remote_os):
"""Test decrypt_remote_file with quotes in password."""
mock_ssh = Mock()
mock_get_remote_os.return_value = "unix"
mock_execute_cmd.return_value = (0, "", "")
decrypt_remote_file(mock_ssh, "/remote/encrypted.file", "/remote/decrypted.file", "pass'word")
# Should escape single quotes
expected_cmd = "openssl enc -d -aes-256-cbc -salt -pbkdf2 -pass pass:'pass'\\''word' -in /remote/encrypted.file -out /remote/decrypted.file"
mock_execute_cmd.assert_called_once_with(mock_ssh, expected_cmd)
@patch("airflow.providers.teradata.utils.tpt_util.get_remote_os")
@patch("airflow.providers.teradata.utils.tpt_util.execute_remote_command")
def test_decrypt_remote_file_windows(self, mock_execute_cmd, mock_get_remote_os):
"""Test decrypt_remote_file on Windows."""
mock_ssh = Mock()
mock_get_remote_os.return_value = "windows"
mock_execute_cmd.return_value = (0, "", "")
decrypt_remote_file(mock_ssh, "/remote/encrypted.file", "/remote/decrypted.file", 'pass"word')
# Should escape double quotes for Windows
expected_cmd = 'openssl enc -d -aes-256-cbc -salt -pbkdf2 -pass pass:"pass""word" -in "/remote/encrypted.file" -out "/remote/decrypted.file"'
mock_execute_cmd.assert_called_once_with(mock_ssh, expected_cmd)
@patch("airflow.providers.teradata.utils.tpt_util.get_remote_os")
@patch("airflow.providers.teradata.utils.tpt_util.execute_remote_command")
def test_decrypt_remote_file_failure(self, mock_execute_cmd, mock_get_remote_os):
"""Test decrypt_remote_file with decryption failure."""
mock_ssh = Mock()
mock_get_remote_os.return_value = "unix"
mock_execute_cmd.return_value = (1, "", "Bad decrypt")
with pytest.raises(RuntimeError, match="Decryption failed with exit status 1"):
decrypt_remote_file(mock_ssh, "/remote/encrypted.file", "/remote/decrypted.file", "password123")
def test_tpt_config_constants(self):
"""Test TPTConfig constants."""
assert TPTConfig.DEFAULT_TIMEOUT == 5
assert TPTConfig.FILE_PERMISSIONS_READ_ONLY == 0o400
assert TPTConfig.TEMP_DIR_WINDOWS == "C:\\Windows\\Temp"
assert TPTConfig.TEMP_DIR_UNIX == "/tmp"
def test_execute_remote_command_success(self):
"""Test execute_remote_command with successful execution."""
mock_ssh = Mock()
mock_stdin = Mock()
mock_stdout = Mock()
mock_stderr = Mock()
mock_channel = Mock()
mock_stdout.channel = mock_channel
mock_channel.recv_exit_status.return_value = 0
mock_stdout.read.return_value.decode.return_value.strip.return_value = "output"
mock_stderr.read.return_value.decode.return_value.strip.return_value = ""
mock_ssh.exec_command.return_value = (mock_stdin, mock_stdout, mock_stderr)
exit_status, stdout_data, stderr_data = execute_remote_command(mock_ssh, "test command")
assert exit_status == 0
assert stdout_data == "output"
assert stderr_data == ""
mock_ssh.exec_command.assert_called_once_with("test command")
mock_stdin.close.assert_called_once()
mock_stdout.close.assert_called_once()
mock_stderr.close.assert_called_once()
def test_get_remote_os_windows(self):
"""Test get_remote_os detects Windows."""
mock_ssh = Mock()
mock_logger = Mock()
with patch("airflow.providers.teradata.utils.tpt_util.execute_remote_command") as mock_execute:
mock_execute.return_value = (0, "Windows_NT", "")
result = get_remote_os(mock_ssh, mock_logger)
assert result == "windows"
mock_execute.assert_called_once_with(mock_ssh, "echo %OS%")
def test_get_remote_os_unix(self):
"""Test get_remote_os detects Unix."""
mock_ssh = Mock()
mock_logger = Mock()
with patch("airflow.providers.teradata.utils.tpt_util.execute_remote_command") as mock_execute:
mock_execute.return_value = (0, "", "")
result = get_remote_os(mock_ssh, mock_logger)
assert result == "unix"
def test_get_remote_os_exception(self):
"""Test get_remote_os handles exceptions."""
mock_ssh = Mock()
mock_logger = Mock()
with patch("airflow.providers.teradata.utils.tpt_util.execute_remote_command") as mock_execute:
mock_execute.side_effect = Exception("SSH error")
result = get_remote_os(mock_ssh, mock_logger)
assert result == "unix"
mock_logger.error.assert_called_with("Error detecting remote OS: %s", "SSH error")
def test_set_local_file_permissions_success(self):
"""Test set_local_file_permissions with successful permission setting."""
mock_logger = Mock()
with tempfile.NamedTemporaryFile() as tmp_file:
set_local_file_permissions(tmp_file.name, mock_logger)
# Check if permissions were set correctly
file_stat = os.stat(tmp_file.name)
assert file_stat.st_mode & 0o777 == 0o400
def test_set_local_file_permissions_file_not_exists(self):
"""Test set_local_file_permissions with non-existent file."""
mock_logger = Mock()
with pytest.raises(FileNotFoundError, match="File does not exist"):
set_local_file_permissions("/nonexistent/file", mock_logger)
def test_set_local_file_permissions_empty_path(self):
"""Test set_local_file_permissions with empty path."""
mock_logger = Mock()
set_local_file_permissions("", mock_logger)
mock_logger.warning.assert_called_with("No file path provided for permission setting")
@patch("airflow.providers.teradata.utils.tpt_util.get_remote_os")
@patch("airflow.providers.teradata.utils.tpt_util.execute_remote_command")
def test_set_remote_file_permissions_unix(self, mock_execute_cmd, mock_get_remote_os):
"""Test set_remote_file_permissions on Unix."""
mock_ssh = Mock()
mock_logger = Mock()
mock_get_remote_os.return_value = "unix"
mock_execute_cmd.return_value = (0, "", "")
set_remote_file_permissions(mock_ssh, "/remote/file", mock_logger)
mock_get_remote_os.assert_called_once_with(mock_ssh, mock_logger)
mock_execute_cmd.assert_called_once_with(mock_ssh, "chmod 400 /remote/file")
@patch("airflow.providers.teradata.utils.tpt_util.get_remote_os")
@patch("airflow.providers.teradata.utils.tpt_util.execute_remote_command")
def test_set_remote_file_permissions_windows(self, mock_execute_cmd, mock_get_remote_os):
"""Test set_remote_file_permissions on Windows."""
mock_ssh = Mock()
mock_logger = Mock()
mock_get_remote_os.return_value = "windows"
mock_execute_cmd.return_value = (0, "", "")
set_remote_file_permissions(mock_ssh, "/remote/file", mock_logger)
mock_get_remote_os.assert_called_once_with(mock_ssh, mock_logger)
expected_cmd = 'icacls "/remote/file" /inheritance:r /grant:r "%USERNAME%":R'
mock_execute_cmd.assert_called_once_with(mock_ssh, expected_cmd)
@patch("airflow.providers.teradata.utils.tpt_util.get_remote_os")
@patch("airflow.providers.teradata.utils.tpt_util.execute_remote_command")
def test_set_remote_file_permissions_failure(self, mock_execute_cmd, mock_get_remote_os):
"""Test set_remote_file_permissions with command failure."""
mock_ssh = Mock()
mock_logger = Mock()
mock_get_remote_os.return_value = "unix"
mock_execute_cmd.return_value = (1, "", "Permission denied")
with pytest.raises(RuntimeError, match="Failed to set permissions"):
set_remote_file_permissions(mock_ssh, "/remote/file", mock_logger)
@patch("airflow.providers.teradata.utils.tpt_util.get_remote_os")
@patch("airflow.providers.teradata.utils.tpt_util.execute_remote_command")
def test_get_remote_temp_directory_windows(self, mock_execute_cmd, mock_get_remote_os):
"""Test get_remote_temp_directory on Windows."""
mock_ssh = Mock()
mock_logger = Mock()
mock_get_remote_os.return_value = "windows"
mock_execute_cmd.return_value = (0, "C:\\Users\\User\\AppData\\Local\\Temp", "")
result = get_remote_temp_directory(mock_ssh, mock_logger)
assert result == "C:\\Users\\User\\AppData\\Local\\Temp"
mock_execute_cmd.assert_called_once_with(mock_ssh, "echo %TEMP%")
@patch("airflow.providers.teradata.utils.tpt_util.get_remote_os")
def test_get_remote_temp_directory_unix(self, mock_get_remote_os):
"""Test get_remote_temp_directory on Unix."""
mock_ssh = Mock()
mock_logger = Mock()
mock_get_remote_os.return_value = "unix"
result = get_remote_temp_directory(mock_ssh, mock_logger)
assert result == "/tmp"
@patch("airflow.providers.teradata.utils.tpt_util.get_remote_os")
@patch("airflow.providers.teradata.utils.tpt_util.execute_remote_command")
def test_get_remote_temp_directory_windows_fallback(self, mock_execute_cmd, mock_get_remote_os):
"""Test get_remote_temp_directory Windows fallback."""
mock_ssh = Mock()
mock_logger = Mock()
mock_get_remote_os.return_value = "windows"
mock_execute_cmd.return_value = (0, "%TEMP%", "") # Command didn't expand
result = get_remote_temp_directory(mock_ssh, mock_logger)
assert result == TPTConfig.TEMP_DIR_WINDOWS
mock_logger.warning.assert_called_with(
"Could not get TEMP directory, using default: %s", TPTConfig.TEMP_DIR_WINDOWS
)
def test_transfer_file_sftp_success(self):
"""Test transfer_file_sftp with successful transfer."""
mock_ssh = Mock()
mock_sftp = Mock()
mock_ssh.open_sftp.return_value = mock_sftp
mock_logger = Mock()
with tempfile.NamedTemporaryFile() as tmp_file:
transfer_file_sftp(mock_ssh, tmp_file.name, "/remote/path/file.txt", mock_logger)
mock_ssh.open_sftp.assert_called_once()
mock_sftp.put.assert_called_once_with(tmp_file.name, "/remote/path/file.txt")
mock_sftp.close.assert_called_once()
def test_transfer_file_sftp_local_file_not_exists(self):
"""Test transfer_file_sftp with non-existent local file."""
mock_ssh = Mock()
mock_logger = Mock()
with pytest.raises(FileNotFoundError, match="Local file does not exist"):
transfer_file_sftp(mock_ssh, "/nonexistent/local/file", "/remote/path/file.txt", mock_logger)
def test_transfer_file_sftp_transfer_error(self):
"""Test transfer_file_sftp with transfer error."""
mock_ssh = Mock()
mock_sftp = Mock()
mock_sftp.put.side_effect = Exception("Transfer failed")
mock_ssh.open_sftp.return_value = mock_sftp
mock_logger = Mock()
with tempfile.NamedTemporaryFile() as tmp_file:
with pytest.raises(RuntimeError, match="Failed to transfer file"):
transfer_file_sftp(mock_ssh, tmp_file.name, "/remote/path/file.txt", mock_logger)
mock_sftp.close.assert_called_once()
@patch("airflow.providers.teradata.utils.tpt_util.get_remote_os")
@patch("airflow.providers.teradata.utils.tpt_util.execute_remote_command")
def test_verify_tpt_utility_on_remote_host_not_found(self, mock_execute_cmd, mock_get_remote_os):
"""Test verify_tpt_utility_on_remote_host when utility is not found."""
mock_ssh = Mock()
mock_get_remote_os.return_value = "unix"
mock_execute_cmd.return_value = (1, "", "command not found")
with pytest.raises(FileNotFoundError, match="TPT utility 'tdload' is not installed"):
verify_tpt_utility_on_remote_host(mock_ssh, "tdload")
def test_is_valid_file_true(self):
"""Test is_valid_file returns True for existing file."""
with tempfile.NamedTemporaryFile() as tmp_file:
assert is_valid_file(tmp_file.name) is True
def test_is_valid_file_false(self):
"""Test is_valid_file returns False for non-existing file."""
assert is_valid_file("/nonexistent/file") is False
def test_is_valid_file_directory(self):
"""Test is_valid_file returns False for directory."""
with tempfile.TemporaryDirectory() as tmp_dir:
assert is_valid_file(tmp_dir) is False
@patch("shutil.which")
def test_verify_tpt_utility_installed_not_found(self, mock_which):
"""Test verify_tpt_utility_installed when utility is not found."""
mock_which.return_value = None
with pytest.raises(FileNotFoundError, match="TPT utility 'tdload' is not installed"):
verify_tpt_utility_installed("tdload")
def test_prepare_tdload_job_var_file_file_to_table(self):
"""Test prepare_tdload_job_var_file for file_to_table mode."""
source_conn = {"host": "source_host", "login": "source_user", "password": "source_pass"}
result = prepare_tdload_job_var_file(
mode="file_to_table",
source_table=None,
select_stmt=None,
insert_stmt="INSERT INTO target_table SELECT * FROM temp",
target_table="target_table",
source_file_name="/path/to/source.txt",
target_file_name=None,
source_format="TEXT",
target_format="",
source_text_delimiter="|",
target_text_delimiter="",
source_conn=source_conn,
)
assert "TargetTdpId='source_host'" in result
assert "TargetUserName='source_user'" in result
assert "TargetUserPassword='source_pass'" in result
assert "TargetTable='target_table'" in result
assert "SourceFileName='/path/to/source.txt'" in result
assert "InsertStmt='INSERT INTO target_table SELECT * FROM temp'" in result
assert "SourceFormat='TEXT'" in result
assert "SourceTextDelimiter='|'" in result
def test_prepare_tdload_job_var_file_table_to_file(self):
"""Test prepare_tdload_job_var_file for table_to_file mode."""
source_conn = {"host": "source_host", "login": "source_user", "password": "source_pass"}
result = prepare_tdload_job_var_file(
mode="table_to_file",
source_table="source_table",
select_stmt=None,
insert_stmt=None,
target_table=None,
source_file_name=None,
target_file_name="/path/to/target.txt",
source_format="",
target_format="TEXT",
source_text_delimiter="",
target_text_delimiter=",",
source_conn=source_conn,
)
assert "SourceTdpId='source_host'" in result
assert "SourceUserName='source_user'" in result
assert "SourceUserPassword='source_pass'" in result
assert "SourceTable='source_table'" in result
assert "TargetFileName='/path/to/target.txt'" in result
assert "TargetFormat='TEXT'" in result
assert "TargetTextDelimiter=','" in result
def test_prepare_tdload_job_var_file_table_to_file_with_select(self):
"""Test prepare_tdload_job_var_file for table_to_file mode with SELECT statement."""
source_conn = {"host": "source_host", "login": "source_user", "password": "source_pass"}
result = prepare_tdload_job_var_file(
mode="table_to_file",
source_table=None,
select_stmt="SELECT * FROM source_table WHERE id > 100",
insert_stmt=None,
target_table=None,
source_file_name=None,
target_file_name="/path/to/target.txt",
source_format="",
target_format="TEXT",
source_text_delimiter="",
target_text_delimiter=",",
source_conn=source_conn,
)
assert "SourceSelectStmt='SELECT * FROM source_table WHERE id > 100'" in result
assert "SourceTable=" not in result
def test_prepare_tdload_job_var_file_table_to_table(self):
"""Test prepare_tdload_job_var_file for table_to_table mode."""
source_conn = {"host": "source_host", "login": "source_user", "password": "source_pass"}
target_conn = {"host": "target_host", "login": "target_user", "password": "target_pass"}
result = prepare_tdload_job_var_file(
mode="table_to_table",
source_table="source_table",
select_stmt=None,
insert_stmt="INSERT INTO target_table SELECT * FROM source",
target_table="target_table",
source_file_name=None,
target_file_name=None,
source_format="",
target_format="",
source_text_delimiter="",
target_text_delimiter="",
source_conn=source_conn,
target_conn=target_conn,
)
assert "SourceTdpId='source_host'" in result
assert "TargetTdpId='target_host'" in result
assert "TargetUserName='target_user'" in result
assert "TargetUserPassword='target_pass'" in result
assert "SourceTable='source_table'" in result
assert "TargetTable='target_table'" in result
assert "InsertStmt='INSERT INTO target_table SELECT * FROM source'" in result
def test_prepare_tdload_job_var_file_table_to_table_no_target_conn(self):
"""Test prepare_tdload_job_var_file for table_to_table mode without target_conn."""
source_conn = {"host": "source_host", "login": "source_user", "password": "source_pass"}
with pytest.raises(ValueError, match="target_conn must be provided for 'table_to_table' mode"):
prepare_tdload_job_var_file(
mode="table_to_table",
source_table="source_table",
select_stmt=None,
insert_stmt=None,
target_table="target_table",
source_file_name=None,
target_file_name=None,
source_format="",
target_format="",
source_text_delimiter="",
target_text_delimiter="",
source_conn=source_conn,
target_conn=None,
)
def test_is_valid_remote_job_var_file_success(self):
"""Test is_valid_remote_job_var_file with valid file."""
mock_ssh = Mock()
mock_sftp = Mock()
mock_file_stat = Mock()
mock_file_stat.st_mode = stat.S_IFREG | 0o644 # Regular file
mock_sftp.stat.return_value = mock_file_stat
mock_ssh.open_sftp.return_value = mock_sftp
result = is_valid_remote_job_var_file(mock_ssh, "/remote/path/job.var")
assert result is True
mock_ssh.open_sftp.assert_called_once()
mock_sftp.stat.assert_called_once_with("/remote/path/job.var")
mock_sftp.close.assert_called_once()
def test_is_valid_remote_job_var_file_not_regular_file(self):
"""Test is_valid_remote_job_var_file with directory."""
mock_ssh = Mock()
mock_sftp = Mock()
mock_file_stat = Mock()
mock_file_stat.st_mode = stat.S_IFDIR | 0o755 # Directory
mock_sftp.stat.return_value = mock_file_stat
mock_ssh.open_sftp.return_value = mock_sftp
result = is_valid_remote_job_var_file(mock_ssh, "/remote/path/directory")
assert result is False
def test_is_valid_remote_job_var_file_not_found(self):
"""Test is_valid_remote_job_var_file with non-existent file."""
mock_ssh = Mock()
mock_sftp = Mock()
mock_sftp.stat.side_effect = FileNotFoundError("File not found")
mock_ssh.open_sftp.return_value = mock_sftp
mock_logger = Mock()
result = is_valid_remote_job_var_file(mock_ssh, "/remote/path/nonexistent", mock_logger)
assert result is False
mock_logger.error.assert_called_with(
"File does not exist on remote at : %s", "/remote/path/nonexistent"
)
mock_sftp.close.assert_called_once()
def test_is_valid_remote_job_var_file_empty_path(self):
"""Test is_valid_remote_job_var_file with empty path."""
mock_ssh = Mock()
result = is_valid_remote_job_var_file(mock_ssh, "")
assert result is False
mock_ssh.open_sftp.assert_not_called()
def test_is_valid_remote_job_var_file_none_path(self):
"""Test is_valid_remote_job_var_file with None path."""
mock_ssh = Mock()
result = is_valid_remote_job_var_file(mock_ssh, None)
assert result is False
def test_read_file_success(self):
"""Test read_file with existing file."""
test_content = "Test file content\nLine 2\nLine 3"
with tempfile.NamedTemporaryFile(mode="w", delete=False, encoding="utf-8") as tmp_file:
tmp_file.write(test_content)
tmp_file_path = tmp_file.name
try:
result = read_file(tmp_file_path)
assert result == test_content
finally:
os.unlink(tmp_file_path)
def test_read_file_with_encoding(self):
"""Test read_file with specific encoding."""
test_content = "Test content with special chars: ñáéíóú"
with tempfile.NamedTemporaryFile(mode="w", delete=False, encoding="latin-1") as tmp_file:
tmp_file.write(test_content)
tmp_file_path = tmp_file.name
try:
result = read_file(tmp_file_path, encoding="latin-1")
assert result == test_content
finally:
os.unlink(tmp_file_path)
def test_read_file_not_found(self):
"""Test read_file with non-existent file."""
with pytest.raises(FileNotFoundError, match="The file /nonexistent/file does not exist"):
read_file("/nonexistent/file")
| {
"repo_id": "apache/airflow",
"file_path": "providers/teradata/tests/unit/teradata/utils/test_tpt_util.py",
"license": "Apache License 2.0",
"lines": 750,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/google/tests/system/google/gcp_api_client_helpers.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import json
from typing import Any
from requests.exceptions import HTTPError
from tests_common.test_utils.api_client_helpers import make_authenticated_rest_api_request
def create_connection_request(connection_id: str, connection: dict[str, Any], is_composer: bool = False):
if is_composer:
from airflow.providers.common.compat.sdk import conf
from airflow.providers.google.cloud.hooks.cloud_composer import CloudComposerHook
hook = CloudComposerHook()
composer_airflow_uri = conf.get("api", "base_url").rstrip("/")
response = hook.make_composer_airflow_api_request(
method="POST",
airflow_uri=composer_airflow_uri,
path="/api/v2/connections",
data=json.dumps(
{
"connection_id": connection_id,
**connection,
}
),
)
response.raise_for_status()
if response.text != "":
return response.json()
else:
return make_authenticated_rest_api_request(
path="/api/v2/connections",
method="POST",
body={
"connection_id": connection_id,
**connection,
},
)
def delete_connection_request(connection_id: str, is_composer: bool = False):
if is_composer:
from airflow.providers.common.compat.sdk import conf
from airflow.providers.google.cloud.hooks.cloud_composer import CloudComposerHook
hook = CloudComposerHook()
composer_airflow_uri = conf.get("api", "base_url").rstrip("/")
response = hook.make_composer_airflow_api_request(
method="DELETE", airflow_uri=composer_airflow_uri, path=f"/api/v2/connections/{connection_id}"
)
response.raise_for_status()
if response.text != "":
return response.json()
else:
return make_authenticated_rest_api_request(
path=f"/api/v2/connections/{connection_id}",
method="DELETE",
)
def create_airflow_connection(
connection_id: str, connection_conf: dict[str, Any], is_composer: bool = False
) -> None:
from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS
print(f"Removing connection '{connection_id}' if it exists")
if AIRFLOW_V_3_0_PLUS:
try:
delete_connection_request(connection_id=connection_id, is_composer=is_composer)
except HTTPError:
print(f"Connection '{connection_id}' does not exist. A new one will be created")
create_connection_request(
connection_id=connection_id, connection=connection_conf, is_composer=is_composer
)
else:
from sqlalchemy import delete
from airflow.models import Connection
from airflow.settings import Session
if Session is None:
raise RuntimeError("Session not configured. Call configure_orm() first.")
session = Session()
session.execute(delete(Connection).where(Connection.conn_id == connection_id))
connection = Connection(conn_id=connection_id, **connection_conf)
session.add(connection)
session.commit()
print(f"Connection '{connection_id}' created")
def delete_airflow_connection(connection_id: str, is_composer: bool = False) -> None:
from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS
print(f"Removing connection '{connection_id}'")
if AIRFLOW_V_3_0_PLUS:
delete_connection_request(connection_id=connection_id, is_composer=is_composer)
else:
from sqlalchemy import delete
from airflow.models import Connection
from airflow.settings import Session
if Session is None:
raise RuntimeError("Session not configured. Call configure_orm() first.")
session = Session()
session.execute(delete(Connection).where(Connection.conn_id == connection_id))
session.commit()
| {
"repo_id": "apache/airflow",
"file_path": "providers/google/tests/system/google/gcp_api_client_helpers.py",
"license": "Apache License 2.0",
"lines": 106,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/microsoft/azure/src/airflow/providers/microsoft/azure/triggers/message_bus.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import asyncio
from abc import abstractmethod
from collections.abc import AsyncIterator
from typing import TYPE_CHECKING, Any
from asgiref.sync import sync_to_async
from airflow.providers.microsoft.azure.hooks.asb import MessageHook
from airflow.providers.microsoft.azure.version_compat import AIRFLOW_V_3_0_PLUS
if AIRFLOW_V_3_0_PLUS:
from airflow.triggers.base import BaseEventTrigger, TriggerEvent
else:
from airflow.triggers.base import ( # type: ignore
BaseTrigger as BaseEventTrigger,
TriggerEvent,
)
if TYPE_CHECKING:
from azure.servicebus import ServiceBusReceivedMessage
class BaseAzureServiceBusTrigger(BaseEventTrigger):
"""
Base trigger for Azure Service Bus message processing.
This trigger provides common functionality for listening to Azure Service Bus
queues and topics/subscriptions. It handles connection management and
async message processing.
:param poll_interval: Time interval between polling operations (seconds)
:param azure_service_bus_conn_id: Connection ID for Azure Service Bus
:param max_wait_time: Maximum time to wait for messages (seconds)
"""
default_conn_name = "azure_service_bus_default"
default_max_wait_time = None
default_poll_interval = 60
def __init__(
self,
poll_interval: float | None = None,
azure_service_bus_conn_id: str | None = None,
max_wait_time: float | None = None,
) -> None:
self.connection_id = (
azure_service_bus_conn_id
if azure_service_bus_conn_id
else BaseAzureServiceBusTrigger.default_conn_name
)
self.max_wait_time = (
max_wait_time if max_wait_time else BaseAzureServiceBusTrigger.default_max_wait_time
)
self.poll_interval = (
poll_interval if poll_interval else BaseAzureServiceBusTrigger.default_poll_interval
)
self.message_hook = MessageHook(azure_service_bus_conn_id=self.connection_id)
@abstractmethod
def serialize(self) -> tuple[str, dict[str, Any]]:
"""Serialize the trigger instance."""
@abstractmethod
def run(self) -> AsyncIterator[TriggerEvent]:
"""Run the trigger logic."""
@classmethod
def _get_message_body(cls, message: ServiceBusReceivedMessage) -> str:
message_body = message.body
if isinstance(message_body, bytes):
return message_body.decode("utf-8")
try:
return "".join(chunk.decode("utf-8") for chunk in message_body)
except Exception:
raise TypeError(f"Expected bytes or an iterator of bytes, but got {type(message_body).__name__}")
class AzureServiceBusQueueTrigger(BaseAzureServiceBusTrigger):
"""
Trigger for Azure Service Bus Queue message processing.
This trigger monitors one or more Azure Service Bus queues for incoming messages.
When messages arrive, they are processed and yielded as trigger events that can
be consumed by downstream tasks.
Example:
>>> trigger = AzureServiceBusQueueTrigger(
... queues=["queue1", "queue2"],
... azure_service_bus_conn_id="my_asb_conn",
... poll_interval=30,
... )
:param queues: List of queue names to monitor
:param poll_interval: Time interval between polling operations (seconds)
:param azure_service_bus_conn_id: Connection ID for Azure Service Bus
:param max_wait_time: Maximum time to wait for messages (seconds)
"""
def __init__(
self,
queues: list[str],
poll_interval: float | None = None,
azure_service_bus_conn_id: str | None = None,
max_wait_time: float | None = None,
) -> None:
super().__init__(poll_interval, azure_service_bus_conn_id, max_wait_time)
self.queues = queues
def serialize(self) -> tuple[str, dict[str, Any]]:
return (
self.__class__.__module__ + "." + self.__class__.__qualname__,
{
"azure_service_bus_conn_id": self.connection_id,
"queues": self.queues,
"poll_interval": self.poll_interval,
"max_wait_time": self.max_wait_time,
},
)
async def run(self) -> AsyncIterator[TriggerEvent]:
read_queue_message_async = sync_to_async(self.message_hook.read_message)
while True:
for queue_name in self.queues:
message = await read_queue_message_async(
queue_name=queue_name, max_wait_time=self.max_wait_time
)
if message:
yield TriggerEvent(
{
"message": BaseAzureServiceBusTrigger._get_message_body(message),
"queue": queue_name,
}
)
break
await asyncio.sleep(self.poll_interval)
class AzureServiceBusSubscriptionTrigger(BaseAzureServiceBusTrigger):
"""
Trigger for Azure Service Bus Topic Subscription message processing.
This trigger monitors topic subscriptions for incoming messages. It can handle
multiple topics with a single subscription name, processing messages as they
arrive and yielding them as trigger events.
Example:
>>> trigger = AzureServiceBusSubscriptionTrigger(
... topics=["topic1", "topic2"],
... subscription_name="my-subscription",
... azure_service_bus_conn_id="my_asb_conn",
... )
:param topics: List of topic names to monitor
:param subscription_name: Name of the subscription to use
:param poll_interval: Time interval between polling operations (seconds)
:param azure_service_bus_conn_id: Connection ID for Azure Service Bus
:param max_wait_time: Maximum time to wait for messages (seconds)
"""
def __init__(
self,
topics: list[str],
subscription_name: str,
poll_interval: float | None = None,
azure_service_bus_conn_id: str | None = None,
max_wait_time: float | None = None,
) -> None:
super().__init__(poll_interval, azure_service_bus_conn_id, max_wait_time)
self.topics = topics
self.subscription_name = subscription_name
def serialize(self) -> tuple[str, dict[str, Any]]:
return (
self.__class__.__module__ + "." + self.__class__.__qualname__,
{
"azure_service_bus_conn_id": self.connection_id,
"topics": self.topics,
"subscription_name": self.subscription_name,
"poll_interval": self.poll_interval,
"max_wait_time": self.max_wait_time,
},
)
async def run(self) -> AsyncIterator[TriggerEvent]:
read_subscription_message_async = sync_to_async(self.message_hook.read_subscription_message)
while True:
for topic_name in self.topics:
message = await read_subscription_message_async(
topic_name=topic_name,
subscription_name=self.subscription_name,
max_wait_time=self.max_wait_time,
)
if message:
yield TriggerEvent(
{
"message": BaseAzureServiceBusTrigger._get_message_body(message),
"topic": topic_name,
"subscription": self.subscription_name,
}
)
break
await asyncio.sleep(self.poll_interval)
| {
"repo_id": "apache/airflow",
"file_path": "providers/microsoft/azure/src/airflow/providers/microsoft/azure/triggers/message_bus.py",
"license": "Apache License 2.0",
"lines": 190,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/microsoft/azure/tests/unit/microsoft/azure/triggers/test_message_bus.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from unittest.mock import Mock, patch
import pytest
from airflow.providers.microsoft.azure.triggers.message_bus import (
AzureServiceBusQueueTrigger,
AzureServiceBusSubscriptionTrigger,
)
from airflow.triggers.base import TriggerEvent
class TestBaseAzureServiceBusTrigger:
"""Test the base trigger functionality."""
def test_init_with_defaults(self):
"""Test initialization with default values using queue trigger."""
with patch("airflow.providers.microsoft.azure.triggers.message_bus.MessageHook"):
trigger = AzureServiceBusQueueTrigger(queues=["test_queue"])
assert trigger.max_wait_time is None
assert trigger.poll_interval == 60
assert hasattr(trigger, "message_hook")
def test_init_with_custom_values(self):
"""Test initialization with custom values using queue trigger."""
with patch("airflow.providers.microsoft.azure.triggers.message_bus.MessageHook"):
trigger = AzureServiceBusQueueTrigger(
queues=["test_queue"],
poll_interval=30,
azure_service_bus_conn_id="custom_conn",
max_wait_time=120,
)
assert trigger.poll_interval == 30
assert trigger.max_wait_time == 120
assert trigger.connection_id == "custom_conn"
class TestAzureServiceBusQueueTrigger:
"""Test the queue trigger functionality."""
def test_init(self):
"""Test queue trigger initialization."""
queues = ["queue1", "queue2"]
with patch("airflow.providers.microsoft.azure.triggers.message_bus.MessageHook"):
trigger = AzureServiceBusQueueTrigger(
queues=queues,
azure_service_bus_conn_id="test_conn",
)
assert trigger.queues == queues
def test_serialize(self):
"""Test serialization of queue trigger."""
queues = ["queue1", "queue2"]
with patch("airflow.providers.microsoft.azure.triggers.message_bus.MessageHook"):
trigger = AzureServiceBusQueueTrigger(
queues=queues,
azure_service_bus_conn_id="test_conn",
)
class_path, config = trigger.serialize()
assert "AzureServiceBusQueueTrigger" in class_path
assert config["queues"] == queues
assert "azure_service_bus_conn_id" in config
@pytest.mark.asyncio
async def test_run_with_message(self):
"""Test the main run method with a mock message as bytes."""
with patch("airflow.providers.microsoft.azure.triggers.message_bus.MessageHook"):
trigger = AzureServiceBusQueueTrigger(
queues=["test_queue"],
poll_interval=0.01, # Very short for testing
)
mock_message = Mock(body=b"test message")
trigger.message_hook.read_message = Mock(return_value=mock_message)
# Get one event from the generator
events = []
async for event in trigger.run():
events.append(event)
if len(events) >= 1:
break
assert len(events) == 1
assert isinstance(events[0], TriggerEvent)
assert events[0].payload["message"] == "test message"
assert events[0].payload["queue"] == "test_queue"
@pytest.mark.asyncio
async def test_run_with_iterator_message(self):
"""Test the main run method with a mock message as an iterator."""
with patch("airflow.providers.microsoft.azure.triggers.message_bus.MessageHook"):
trigger = AzureServiceBusQueueTrigger(
queues=["test_queue"],
poll_interval=0.01, # Very short for testing
)
mock_message = Mock(body=iter([b"test", b" ", b"iterator", b" ", b"message"]))
trigger.message_hook.read_message = Mock(return_value=mock_message)
# Get one event from the generator
events = []
async for event in trigger.run():
events.append(event)
if len(events) >= 1:
break
assert len(events) == 1
assert isinstance(events[0], TriggerEvent)
assert events[0].payload["message"] == "test iterator message"
assert events[0].payload["queue"] == "test_queue"
class TestAzureServiceBusSubscriptionTrigger:
"""Test the subscription trigger functionality."""
def test_init(self):
"""Test subscription trigger initialization."""
topics = ["topic1", "topic2"]
subscription = "test-subscription"
with patch("airflow.providers.microsoft.azure.triggers.message_bus.MessageHook"):
trigger = AzureServiceBusSubscriptionTrigger(
topics=topics,
subscription_name=subscription,
azure_service_bus_conn_id="test_conn",
)
assert trigger.topics == topics
assert trigger.subscription_name == subscription
def test_serialize(self):
"""Test serialization of subscription trigger."""
topics = ["topic1", "topic2"]
subscription = "test-subscription"
with patch("airflow.providers.microsoft.azure.triggers.message_bus.MessageHook"):
trigger = AzureServiceBusSubscriptionTrigger(
topics=topics,
subscription_name=subscription,
azure_service_bus_conn_id="test_conn",
)
class_path, config = trigger.serialize()
assert "AzureServiceBusSubscriptionTrigger" in class_path
assert config["topics"] == topics
assert config["subscription_name"] == subscription
@pytest.mark.asyncio
async def test_run_subscription_with_message(self):
"""Test the main run method with a mock message as bytes."""
with patch("airflow.providers.microsoft.azure.triggers.message_bus.MessageHook"):
trigger = AzureServiceBusSubscriptionTrigger(
topics=["test_topic"],
subscription_name="test-sub",
poll_interval=0.01, # Very short for testing
azure_service_bus_conn_id="test_conn",
)
mock_message = Mock(body=b"subscription test message")
trigger.message_hook.read_subscription_message = Mock(return_value=mock_message)
# Get one event from the generator
events = []
async for event in trigger.run():
events.append(event)
if len(events) >= 1:
break
assert len(events) == 1
assert isinstance(events[0], TriggerEvent)
assert events[0].payload["message"] == "subscription test message"
assert events[0].payload["topic"] == "test_topic"
assert events[0].payload["subscription"] == "test-sub"
@pytest.mark.asyncio
async def test_run_subscription_with_iterator_message(self):
"""Test the main run method with a mock message as an iterator."""
with patch("airflow.providers.microsoft.azure.triggers.message_bus.MessageHook"):
trigger = AzureServiceBusSubscriptionTrigger(
topics=["test_topic"],
subscription_name="test-sub",
poll_interval=0.01, # Very short for testing
azure_service_bus_conn_id="test_conn",
)
mock_message = Mock(body=iter([b"iterator", b" ", b"subscription"]))
trigger.message_hook.read_subscription_message = Mock(return_value=mock_message)
# Get one event from the generator
events = []
async for event in trigger.run():
events.append(event)
if len(events) >= 1:
break
assert len(events) == 1
assert isinstance(events[0], TriggerEvent)
assert events[0].payload["message"] == "iterator subscription"
assert events[0].payload["topic"] == "test_topic"
assert events[0].payload["subscription"] == "test-sub"
class TestIntegrationScenarios:
"""Test integration scenarios and edge cases."""
@pytest.mark.asyncio
async def test_multiple_messages_processing(self):
"""Test processing multiple messages in sequence."""
with patch("airflow.providers.microsoft.azure.triggers.message_bus.MessageHook"):
trigger = AzureServiceBusQueueTrigger(
queues=["test_queue"],
poll_interval=0.01, # Very short for testing
)
messages_as_str = ["msg1", "msg2", "msg3"]
mock_messages = [Mock(body=msg.encode("utf-8")) for msg in messages_as_str]
trigger.message_hook.read_message = Mock(side_effect=mock_messages + [None])
# Collect events
events = []
async for event in trigger.run():
events.append(event)
if len(events) >= 3:
break
assert len(events) == 3
received_messages = [event.payload["message"] for event in events]
assert received_messages == messages_as_str
def test_queue_trigger_with_empty_queues_list(self):
"""Test queue trigger with empty queues list."""
with patch("airflow.providers.microsoft.azure.triggers.message_bus.MessageHook"):
trigger = AzureServiceBusQueueTrigger(queues=[])
assert trigger.queues == []
def test_subscription_trigger_with_empty_topics_list(self):
"""Test subscription trigger with empty topics list."""
with patch("airflow.providers.microsoft.azure.triggers.message_bus.MessageHook"):
trigger = AzureServiceBusSubscriptionTrigger(
topics=[], subscription_name="test-sub", azure_service_bus_conn_id="test_conn"
)
assert trigger.topics == []
def test_message_hook_initialization(self):
"""Test that MessageHook is properly initialized."""
conn_id = "test_connection"
with patch("airflow.providers.microsoft.azure.triggers.message_bus.MessageHook") as mock_hook_class:
trigger = AzureServiceBusQueueTrigger(queues=["test"], azure_service_bus_conn_id=conn_id)
# Verify the hook was initialized with the correct connection ID
mock_hook_class.assert_called_once_with(azure_service_bus_conn_id=conn_id)
# Also verify the trigger has the message_hook attribute
assert hasattr(trigger, "message_hook")
def test_message_hook_properly_configured(self):
"""Test that MessageHook is properly configured with connection."""
conn_id = "test_connection"
with patch("airflow.providers.microsoft.azure.triggers.message_bus.MessageHook") as mock_hook_class:
trigger = AzureServiceBusQueueTrigger(queues=["test"], azure_service_bus_conn_id=conn_id)
# Verify the hook was called with the correct parameters
mock_hook_class.assert_called_once_with(azure_service_bus_conn_id=conn_id)
assert hasattr(trigger, "message_hook")
# Verify the connection_id is set correctly
assert trigger.connection_id == conn_id
| {
"repo_id": "apache/airflow",
"file_path": "providers/microsoft/azure/tests/unit/microsoft/azure/triggers/test_message_bus.py",
"license": "Apache License 2.0",
"lines": 235,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:dev/breeze/src/airflow_breeze/commands/release_management_validation.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import sys
from pathlib import Path
import click
from airflow_breeze.commands.release_management_group import release_management_group
from airflow_breeze.utils.airflow_release_validator import AirflowReleaseValidator
from airflow_breeze.utils.console import console_print
from airflow_breeze.utils.release_validator import CheckType
@release_management_group.command(
name="verify-rc-by-pmc",
help=(
"[EXPERIMENTAL] Verify a release candidate for PMC voting.\n"
"\n"
"Runs the standard PMC verification steps (reproducible builds, SVN files, licenses, signatures, checksums) "
"with extra safety/ergonomics.\n"
"\n"
"Note: This command is experimental; breaking changes might happen without notice. "
"It is recommended to also follow the manual verification steps and compare results.\n"
"\n"
"DEPRECATION NOTICE: All checks except 'reproducible-build' will be deprecated upon full migration "
"to Apache Trusted Releases (ATR). After migration, only the reproducible build check will remain."
),
)
@click.option(
"--distribution",
type=click.Choice(["airflow", "airflowctl", "providers", "python-client"]),
default="airflow",
help="Distribution type to verify",
)
@click.option(
"--version",
help="Release candidate version",
)
@click.option(
"--task-sdk-version",
help="Task SDK version",
)
@click.option(
"--path-to-airflow-svn",
"-p",
required=True,
type=click.Path(exists=True, file_okay=False, dir_okay=True, resolve_path=True, path_type=Path),
envvar="PATH_TO_AIRFLOW_SVN",
help="Path to directory where release files are checked out from SVN (e.g., ~/code/asf-dist/dev/airflow)",
)
@click.option(
"--checks",
help="Comma separated list of checks to run. Available: reproducible-build, svn, licenses, signatures, checksums. All by default.",
)
@click.option(
"--download-gpg-keys",
is_flag=True,
help="Download and import ASF KEYS for signature verification.",
)
@click.option(
"--update-svn/--no-update-svn",
is_flag=True,
default=True,
help="Run 'svn update' before verification to fetch the latest release files. Default: True.",
)
@click.option(
"--verbose/--no-verbose",
is_flag=True,
default=True,
help="Show detailed verification output. Enabled by default; use --no-verbose to disable (for development/testing only).",
)
def verify_rc_by_pmc(
distribution: str,
version: str,
task_sdk_version: str | None,
path_to_airflow_svn: Path,
checks: str | None,
download_gpg_keys: bool,
update_svn: bool,
verbose: bool,
):
"""Verify a release candidate for PMC voting.
This is intended to automate (not replace) the manual verification steps described in the
release guides.
Notes:
- Experimental: breaking changes may happen without notice. It is recommended to also follow
the manual verification steps and compare results.
- Reproducible build verification checks out the release tag, builds packages using the same
breeze commands as documented in README_RELEASE_AIRFLOW.md, and compares with SVN artifacts.
- DEPRECATION: All checks except 'reproducible-build' will be deprecated upon full migration
to Apache Trusted Releases (ATR). After migration, only the reproducible build check will remain.
Practical requirements:
- Run from the Airflow git repository root (must contain the airflow-core/ directory).
- Ensure you have a full SVN checkout of the relevant release directory.
- Some checks may require external tools (e.g. gpg, java for Apache RAT, hatch for builds).
Examples:
Verify Airflow + Task SDK RC (run all checks):
breeze release-management verify-rc-by-pmc \\
--distribution airflow \\
--version 3.1.3rc1 \\
--task-sdk-version 1.1.3rc1 \\
--path-to-airflow-svn ~/asf-dist/dev/airflow
Verify only signatures + checksums:
breeze release-management verify-rc-by-pmc \\
--distribution airflow \\
--version 3.1.3rc1 \\
--task-sdk-version 1.1.3rc1 \\
--path-to-airflow-svn ~/asf-dist/dev/airflow \\
--checks signatures,checksums
"""
airflow_repo_root = Path.cwd()
if not (airflow_repo_root / "airflow-core").exists():
console_print("[red]Error: Must be run from Airflow repository root[/red]")
sys.exit(1)
check_list = None
if checks:
try:
check_list = [CheckType(c.strip()) for c in checks.split(",")]
except ValueError as e:
console_print(f"[red]Invalid check type: {e}[/red]")
console_print(f"Available checks: {', '.join([c.value for c in CheckType])}")
sys.exit(1)
# Show deprecation warning if running checks other than reproducible-build
checks_to_run = check_list or list(CheckType)
deprecated_checks = [c for c in checks_to_run if c != CheckType.REPRODUCIBLE_BUILD]
if deprecated_checks:
console_print(
"[yellow]DEPRECATION WARNING: All checks except 'reproducible-build' will be deprecated "
"upon full migration to Apache Trusted Releases (ATR). After migration, only the reproducible "
"build check will remain as the primary automated verification.[/yellow]\n"
)
# Validate required options based on distribution type
if distribution == "providers":
console_print("[yellow]providers verification not yet implemented[/yellow]")
sys.exit(1)
elif distribution == "airflow":
if not version:
console_print("[red]Error: --version is required for airflow verification[/red]")
sys.exit(1)
validator = AirflowReleaseValidator(
version=version,
svn_path=path_to_airflow_svn,
airflow_repo_root=airflow_repo_root,
task_sdk_version=task_sdk_version,
download_gpg_keys=download_gpg_keys,
update_svn=update_svn,
verbose=verbose,
)
elif distribution == "airflowctl":
console_print("[yellow]airflowctl validation not yet implemented[/yellow]")
elif distribution == "python-client":
console_print("[yellow]providers validation not yet implemented[/yellow]")
else:
console_print(f"[red]Unknown distribution: {distribution}[/red]")
sys.exit(1)
if not validator.validate(checks=check_list):
console_print(f"[red]Verification failed for {distribution}[/red]")
sys.exit(1)
# Deprecated alias for backwards compatibility (was on main before rename to verify-rc-by-pmc)
@release_management_group.command(
name="validate-rc-by-pmc",
hidden=True,
help="[DEPRECATED: use verify-rc-by-pmc] Validate a release candidate for PMC voting.",
)
@click.option(
"--distribution",
type=click.Choice(["airflow", "airflowctl", "providers", "python-client"]),
default="airflow",
help="Distribution type to verify",
)
@click.option("--version", help="Release candidate version")
@click.option("--task-sdk-version", help="Task SDK version")
@click.option(
"--path-to-airflow-svn",
"-p",
required=True,
type=click.Path(exists=True, file_okay=False, dir_okay=True, resolve_path=True, path_type=Path),
envvar="PATH_TO_AIRFLOW_SVN",
help="Path to SVN checkout",
)
@click.option("--checks", help="Comma-separated list of checks to run")
@click.option("--download-gpg-keys", is_flag=True, help="Download ASF KEYS")
@click.option("--update-svn/--no-update-svn", is_flag=True, default=True, help="Run 'svn update'")
@click.option("--verbose", is_flag=True, help="Verbose output")
@click.pass_context
def validate_rc_by_pmc(ctx: click.Context, **kwargs):
"""Deprecated alias for verify-rc-by-pmc."""
console_print(
"[yellow]Warning: 'validate-rc-by-pmc' is deprecated and will be removed in a future release. "
"Use 'verify-rc-by-pmc' instead.[/yellow]"
)
ctx.invoke(verify_rc_by_pmc, **kwargs)
| {
"repo_id": "apache/airflow",
"file_path": "dev/breeze/src/airflow_breeze/commands/release_management_validation.py",
"license": "Apache License 2.0",
"lines": 203,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:dev/breeze/src/airflow_breeze/utils/airflow_release_validator.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import filecmp
import shutil
import tarfile
import time
from pathlib import Path
from airflow_breeze.utils.console import console_print
from airflow_breeze.utils.release_validator import CheckType, ReleaseValidator, ValidationResult
from airflow_breeze.utils.run_utils import run_command
class AirflowReleaseValidator(ReleaseValidator):
"""Validator for Apache Airflow release candidates."""
def __init__(
self,
version: str,
svn_path: Path,
airflow_repo_root: Path,
task_sdk_version: str | None = None,
download_gpg_keys: bool = False,
update_svn: bool = True,
verbose: bool = False,
):
super().__init__(
version=version,
svn_path=svn_path,
airflow_repo_root=airflow_repo_root,
download_gpg_keys=download_gpg_keys,
update_svn=update_svn,
verbose=verbose,
)
self.task_sdk_version = task_sdk_version or version
self.version_without_rc = self._strip_rc_suffix(version)
self.task_sdk_version_without_rc = self._strip_rc_suffix(self.task_sdk_version)
@property
def expected_airflow_file_bases(self) -> list[str]:
return [
f"apache_airflow-{self.version_without_rc}-source.tar.gz",
f"apache_airflow-{self.version_without_rc}.tar.gz",
f"apache_airflow-{self.version_without_rc}-py3-none-any.whl",
f"apache_airflow_core-{self.version_without_rc}.tar.gz",
f"apache_airflow_core-{self.version_without_rc}-py3-none-any.whl",
]
@property
def expected_task_sdk_file_bases(self) -> list[str]:
return [
f"apache_airflow_task_sdk-{self.task_sdk_version_without_rc}.tar.gz",
f"apache_airflow_task_sdk-{self.task_sdk_version_without_rc}-py3-none-any.whl",
]
def get_distribution_name(self) -> str:
return "Apache Airflow"
def get_svn_directory(self) -> Path:
return self.svn_path / self.version
def get_task_sdk_svn_directory(self) -> Path:
return self.svn_path / "task-sdk" / self.task_sdk_version
def get_svn_directories(self) -> list[Path]:
"""Return both Airflow and Task SDK SVN directories."""
return [self.get_svn_directory(), self.get_task_sdk_svn_directory()]
def get_expected_files(self) -> list[str]:
files = []
for base in self.expected_airflow_file_bases:
files.extend([base, f"{base}.asc", f"{base}.sha512"])
return files
def get_task_sdk_expected_files(self) -> list[str]:
files = []
for base in self.expected_task_sdk_file_bases:
files.extend([base, f"{base}.asc", f"{base}.sha512"])
return files
def validate_svn_files(self):
console_print("\n[bold]SVN File Verification[/bold]")
start_time = time.time()
airflow_svn_dir = self.get_svn_directory()
task_sdk_svn_dir = self.get_task_sdk_svn_directory()
console_print(f"Checking Airflow directory: {airflow_svn_dir}")
if not airflow_svn_dir.exists():
return ValidationResult(
check_type=CheckType.SVN,
passed=False,
message=f"Airflow SVN directory not found: {airflow_svn_dir}",
duration_seconds=time.time() - start_time,
)
console_print(f"Checking Task SDK directory: {task_sdk_svn_dir}")
if not task_sdk_svn_dir.exists():
return ValidationResult(
check_type=CheckType.SVN,
passed=False,
message=f"Task SDK SVN directory not found: {task_sdk_svn_dir}",
duration_seconds=time.time() - start_time,
)
actual_airflow = {f.name for f in airflow_svn_dir.iterdir() if f.is_file()}
expected_airflow = set(self.get_expected_files())
missing_airflow = expected_airflow - actual_airflow
actual_task_sdk = {f.name for f in task_sdk_svn_dir.iterdir() if f.is_file()}
expected_task_sdk = set(self.get_task_sdk_expected_files())
missing_task_sdk = expected_task_sdk - actual_task_sdk
details = []
if missing_airflow:
details.append(f"Missing {len(missing_airflow)} Airflow files:")
details.extend([f" - {f}" for f in sorted(missing_airflow)[:10]])
if missing_task_sdk:
details.append(f"Missing {len(missing_task_sdk)} Task SDK files:")
details.extend([f" - {f}" for f in sorted(missing_task_sdk)[:10]])
missing = list(missing_airflow) + list(missing_task_sdk)
total_expected = len(expected_airflow) + len(expected_task_sdk)
message = (
f"All {total_expected} expected files present" if not missing else f"Missing {len(missing)} files"
)
result = ValidationResult(
check_type=CheckType.SVN,
passed=not missing,
message=message,
details=details or None,
duration_seconds=time.time() - start_time,
)
self._print_result(result)
return result
def _compare_archives(self, built_file: Path, svn_file: Path) -> tuple[bool, list[str]]:
"""Compare two archives by content.
Returns:
Tuple of (matches, diff_details) where diff_details lists what differs.
"""
diff_details = []
if built_file.suffix == ".whl":
import zipfile
try:
with zipfile.ZipFile(built_file) as z1, zipfile.ZipFile(svn_file) as z2:
n1 = set(z1.namelist())
n2 = set(z2.namelist())
only_in_built = {n for n in (n1 - n2)}
only_in_svn = {n for n in (n2 - n1)}
if only_in_built:
diff_details.append(f"Only in built: {', '.join(sorted(only_in_built)[:5])}")
if only_in_svn:
diff_details.append(f"Only in SVN: {', '.join(sorted(only_in_svn)[:5])}")
for n in n1 & n2:
if z1.getinfo(n).CRC != z2.getinfo(n).CRC:
diff_details.append(f"Content differs: {n}")
return (not diff_details, diff_details)
except Exception as e:
return (False, [f"Error: {e}"])
elif built_file.suffix == ".gz": # tar.gz
try:
with tarfile.open(built_file, "r:gz") as t1, tarfile.open(svn_file, "r:gz") as t2:
m1 = {m.name: m for m in t1.getmembers()}
m2 = {m.name: m for m in t2.getmembers()}
only_in_built = {n for n in (set(m1.keys()) - set(m2.keys()))}
only_in_svn = {n for n in (set(m2.keys()) - set(m1.keys()))}
if only_in_built:
diff_details.append(f"Only in built: {', '.join(sorted(only_in_built)[:5])}")
if only_in_svn:
diff_details.append(f"Only in SVN: {', '.join(sorted(only_in_svn)[:5])}")
# First pass: compare sizes (fast, metadata only)
common_names = set(m1.keys()) & set(m2.keys())
size_mismatches = []
for name in common_names:
if m1[name].size != m2[name].size:
size_mismatches.append(name)
elif m1[name].issym() and m2[name].issym():
if m1[name].linkname != m2[name].linkname:
diff_details.append(f"Symlink differs: {name}")
elif m1[name].isdir() != m2[name].isdir():
diff_details.append(f"Type differs: {name}")
if size_mismatches:
for name in size_mismatches[:10]:
diff_details.append(f"Size differs: {name} ({m1[name].size} vs {m2[name].size})")
# If file lists and sizes all match, archives are equivalent
return (not diff_details, diff_details)
except Exception as e:
return (False, [f"Error: {e}"])
return (False, ["Unknown archive type"])
def validate_reproducible_build(self):
"""Build packages from source using git checkout and compare with SVN artifacts."""
console_print("\n[bold]Reproducible Build Verification[/bold]")
start_time = time.time()
tag = self.version
repo_root = self.airflow_repo_root
# Check for uncommitted changes
status_result = run_command(
["git", "status", "--porcelain"],
cwd=str(repo_root),
capture_output=True,
text=True,
check=False,
)
if status_result.stdout.strip():
return ValidationResult(
check_type=CheckType.REPRODUCIBLE_BUILD,
passed=False,
message="Repository has uncommitted changes",
details=["Please commit or stash changes before running reproducible build check."],
duration_seconds=time.time() - start_time,
)
# Save current branch name (if on a branch) or HEAD commit
branch_result = run_command(
["git", "rev-parse", "--abbrev-ref", "HEAD"],
cwd=str(repo_root),
capture_output=True,
text=True,
check=False,
)
original_branch = branch_result.stdout.strip() if branch_result.returncode == 0 else None
# Save current HEAD to restore later
head_result = run_command(
["git", "rev-parse", "HEAD"],
cwd=str(repo_root),
capture_output=True,
text=True,
check=False,
)
if head_result.returncode != 0:
return ValidationResult(
check_type=CheckType.REPRODUCIBLE_BUILD,
passed=False,
message="Failed to get current HEAD",
duration_seconds=time.time() - start_time,
)
original_head = head_result.stdout.strip()
# Determine what to display and restore to
if original_branch and original_branch != "HEAD":
original_ref = original_branch
original_display = f"branch '{original_branch}'"
else:
original_ref = original_head
original_display = f"commit {original_head[:12]}"
# Warn user about branch switch
console_print(
f"[yellow]WARNING: This check will temporarily switch from {original_display} "
f"to tag '{tag}' and should automatically return afterwards.[/yellow]"
)
console_print(f"Checking out tag: {tag}")
checkout_result = run_command(
["git", "checkout", tag],
cwd=str(repo_root),
check=False,
)
if checkout_result.returncode != 0:
return ValidationResult(
check_type=CheckType.REPRODUCIBLE_BUILD,
passed=False,
message=f"Failed to checkout tag {tag}",
details=["Hint: Make sure the tag exists. Run 'git fetch --tags' to update."],
duration_seconds=time.time() - start_time,
)
# Initialize result variables
differences = []
verified_count = 0
missing_from_svn = []
build_failed = False
try:
# Clean dist directory (as per manual release process: rm -rf dist/*)
dist_dir = repo_root / "dist"
if dist_dir.exists():
console_print("Cleaning dist directory...")
shutil.rmtree(dist_dir)
# NOTE: git clean commented out - it removes .venv and other important files
# The Docker-based build should handle this in isolation anyway
# console_print("Cleaning untracked files (git clean -fdx)...")
# run_command(
# ["git", "clean", "-fdx"],
# cwd=str(repo_root),
# check=False,
# )
# Build packages using breeze from the checked-out tag
console_print("Building packages from source...")
if not self.build_packages():
build_failed = True
else:
# Compare built packages with SVN
dist_dir = repo_root / "dist"
for pattern in ["*.tar.gz", "*.whl"]:
for built_file in dist_dir.glob(pattern):
svn_dir = (
self.get_task_sdk_svn_directory()
if "task_sdk" in built_file.name
else self.get_svn_directory()
)
svn_file = svn_dir / built_file.name
if svn_file.exists():
console_print(f"Verifying {built_file.name}...", end=" ")
# Default to binary comparison
if filecmp.cmp(built_file, svn_file, shallow=False):
verified_count += 1
console_print("[green]OK[/green]")
else:
# Compare archive contents
matches, diff_details = self._compare_archives(built_file, svn_file)
if matches:
verified_count += 1
console_print("[green]OK (content match)[/green]")
else:
differences.append(built_file.name)
console_print("[red]MISMATCH[/red]")
for detail in diff_details[:10]:
console_print(f" {detail}")
if len(diff_details) > 10:
console_print(f" ... and {len(diff_details) - 10} more differences")
else:
missing_from_svn.append(built_file.name)
console_print(
f"[yellow]Note: {built_file.name} not in SVN (may be expected)[/yellow]"
)
finally:
# Always restore original branch/HEAD, regardless of success or failure
console_print(f"Restoring to {original_display}...")
restore_result = run_command(
["git", "checkout", original_ref],
cwd=str(repo_root),
check=False,
)
if restore_result.returncode == 0:
console_print(f"[green]Successfully restored to {original_display}[/green]")
else:
console_print(
f"[red]WARNING: Failed to restore to {original_display}. "
f"Please manually run: git checkout {original_ref}[/red]"
)
# Return result after restoring HEAD
if build_failed:
result = ValidationResult(
check_type=CheckType.REPRODUCIBLE_BUILD,
passed=False,
message="Failed to build packages",
duration_seconds=time.time() - start_time,
)
self._print_result(result)
return result
if not differences:
message = f"All {verified_count} packages are identical to SVN"
else:
message = f"{len(differences)} packages differ from SVN"
details = None
if differences:
details = differences[:]
if missing_from_svn and self.verbose:
details = details or []
details.append(f"Note: {len(missing_from_svn)} built packages not in SVN (may be expected)")
result = ValidationResult(
check_type=CheckType.REPRODUCIBLE_BUILD,
passed=not differences,
message=message,
details=details,
duration_seconds=time.time() - start_time,
)
self._print_result(result)
return result
def validate_licenses(self):
"""Run Apache RAT license check on source tarball."""
console_print("\n[bold]Apache RAT License Verification[/bold]")
start_time = time.time()
source_dir = Path("/tmp/apache-airflow-src")
# Download Apache RAT with checksum verification
rat_jar = self._download_apache_rat()
if not rat_jar:
return ValidationResult(
check_type=CheckType.LICENSES,
passed=False,
message="Failed to download or verify Apache RAT",
duration_seconds=time.time() - start_time,
)
source_tarball = self.get_svn_directory() / f"apache_airflow-{self.version_without_rc}-source.tar.gz"
if not source_tarball.exists():
return ValidationResult(
check_type=CheckType.LICENSES,
passed=False,
message=f"Source tarball not found: {source_tarball}",
duration_seconds=time.time() - start_time,
)
console_print(f"Extracting source to {source_dir}...")
if source_dir.exists():
shutil.rmtree(source_dir)
source_dir.mkdir(parents=True)
with tarfile.open(source_tarball, "r:gz") as tar:
for member in tar.getmembers():
member.name = "/".join(member.name.split("/")[1:])
if member.name:
tar.extract(member, source_dir, filter="data")
rat_excludes = source_dir / ".rat-excludes"
console_print("Running Apache RAT...")
result = run_command(
[
"java",
"-jar",
str(rat_jar),
"--input-exclude-file",
str(rat_excludes) if rat_excludes.exists() else "/dev/null",
str(source_dir),
],
check=False,
capture_output=True,
text=True,
)
error_lines = [line.strip() for line in result.stdout.split("\n") if line.strip().startswith("!")]
unapproved = unknown = 0
for line in result.stdout.split("\n"):
if "Unapproved:" in line:
try:
unapproved = int(line.split("Unapproved:")[1].split()[0])
except (IndexError, ValueError):
pass
if "Unknown:" in line:
try:
unknown = int(line.split("Unknown:")[1].split()[0])
except (IndexError, ValueError):
pass
details = []
if error_lines:
details.append(f"Found {len(error_lines)} license issues:")
details.extend(error_lines[:10])
if len(error_lines) > 10:
details.append(f"... and {len(error_lines) - 10} more")
if unapproved > 0:
details.append(f"Unapproved licenses: {unapproved}")
if unknown > 0:
details.append(f"Unknown licenses: {unknown}")
# Show verbose RAT output if requested
if self.verbose:
separator_count = 0
for line in result.stdout.splitlines():
if line.strip().startswith("**********"):
separator_count += 1
if separator_count >= 3:
break
console_print(line)
# Clean up extracted source directory (~500MB)
if source_dir.exists():
shutil.rmtree(source_dir)
passed = not error_lines and unapproved == 0 and unknown == 0
message = (
"All files have approved licenses"
if passed
else f"Found {len(error_lines)} issues, {unapproved} unapproved, {unknown} unknown"
)
result = ValidationResult(
check_type=CheckType.LICENSES,
passed=passed,
message=message,
details=details or None,
duration_seconds=time.time() - start_time,
)
self._print_result(result)
return result
def build_packages(self) -> bool:
"""Build Airflow distributions and source tarball."""
console_print("Building Airflow distributions...")
# Use breeze from the current checkout
base_cmd = ["breeze"]
result = run_command(
base_cmd
+ [
"release-management",
"prepare-airflow-distributions",
"--distribution-format",
"both",
],
cwd=str(self.airflow_repo_root),
check=False,
capture_output=True,
text=True,
)
if result.returncode != 0:
console_print("[red]Failed to build Airflow distributions[/red]")
if result.stdout:
console_print(f"[yellow]STDOUT:[/yellow]\n{result.stdout[-2000:]}")
if result.stderr:
console_print(f"[yellow]STDERR:[/yellow]\n{result.stderr[-2000:]}")
return False
console_print("Building Task SDK distributions...")
result = run_command(
base_cmd
+ [
"release-management",
"prepare-task-sdk-distributions",
"--distribution-format",
"both",
],
cwd=str(self.airflow_repo_root),
check=False,
capture_output=True,
text=True,
)
if result.returncode != 0:
console_print("[red]Failed to build Task SDK distributions[/red]")
if result.stdout:
console_print(f"[yellow]STDOUT:[/yellow]\n{result.stdout[-2000:]}")
if result.stderr:
console_print(f"[yellow]STDERR:[/yellow]\n{result.stderr[-2000:]}")
return False
console_print("Building source tarball...")
cmd = base_cmd + [
"release-management",
"prepare-tarball",
"--tarball-type",
"apache_airflow",
"--version",
self.version_without_rc,
]
if version_suffix := self._get_version_suffix():
cmd.extend(["--version-suffix", version_suffix])
result = run_command(
cmd, cwd=str(self.airflow_repo_root), check=False, capture_output=True, text=True
)
if result.returncode != 0:
console_print("[red]Failed to build source tarball[/red]")
if result.stdout:
console_print(f"[yellow]STDOUT:[/yellow]\n{result.stdout[-2000:]}")
if result.stderr:
console_print(f"[yellow]STDERR:[/yellow]\n{result.stderr[-2000:]}")
return False
console_print("[green]All packages built successfully[/green]")
return True
| {
"repo_id": "apache/airflow",
"file_path": "dev/breeze/src/airflow_breeze/utils/airflow_release_validator.py",
"license": "Apache License 2.0",
"lines": 524,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:dev/breeze/src/airflow_breeze/utils/release_validator.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import re
import shutil
import subprocess
import tempfile
import time
from abc import ABC, abstractmethod
from collections.abc import Callable
from dataclasses import dataclass
from enum import Enum
from pathlib import Path
from airflow_breeze.utils.console import console_print
from airflow_breeze.utils.run_utils import run_command
class CheckType(str, Enum):
SVN = "svn"
REPRODUCIBLE_BUILD = "reproducible-build"
SIGNATURES = "signatures"
CHECKSUMS = "checksums"
LICENSES = "licenses"
@dataclass
class ValidationResult:
check_type: CheckType
passed: bool
message: str
details: list[str] | None = None
duration_seconds: float | None = None
class ReleaseValidator(ABC):
"""Base class for release validators with common functionality for PMC verification."""
APACHE_RAT_JAR_DOWNLOAD_URL = (
"https://downloads.apache.org/creadur/apache-rat-0.17/apache-rat-0.17-bin.tar.gz"
)
APACHE_RAT_JAR_SHA512_DOWNLOAD_URL = (
"https://downloads.apache.org/creadur/apache-rat-0.17/apache-rat-0.17-bin.tar.gz.sha512"
)
GPG_KEYS_URL = "https://dist.apache.org/repos/dist/release/airflow/KEYS"
def __init__(
self,
version: str,
svn_path: Path,
airflow_repo_root: Path,
download_gpg_keys: bool = False,
update_svn: bool = True,
verbose: bool = False,
):
self.version = version
self.svn_path = svn_path
self.airflow_repo_root = airflow_repo_root
self.download_gpg_keys = download_gpg_keys
self.update_svn = update_svn
self.verbose = verbose
self.results: list[ValidationResult] = []
@abstractmethod
def get_distribution_name(self) -> str:
pass
@abstractmethod
def get_svn_directory(self) -> Path:
pass
@abstractmethod
def get_expected_files(self) -> list[str]:
pass
@abstractmethod
def build_packages(self, source_dir: Path | None = None) -> bool:
pass
@abstractmethod
def validate_svn_files(self) -> ValidationResult:
pass
@abstractmethod
def validate_reproducible_build(self) -> ValidationResult:
pass
@abstractmethod
def validate_licenses(self) -> ValidationResult:
pass
def get_svn_directories(self) -> list[Path]:
"""Return list of SVN directories to validate. Override for multi-directory validation."""
return [self.get_svn_directory()]
def validate_signatures(self) -> ValidationResult:
"""Verify GPG signatures for all .asc files."""
console_print("\n[bold]GPG Signature Verification[/bold]")
start_time = time.time()
asc_files: list[Path] = []
for svn_dir in self.get_svn_directories():
if svn_dir.exists():
asc_files.extend(svn_dir.glob("*.asc"))
if not asc_files:
return ValidationResult(
check_type=CheckType.SIGNATURES,
passed=False,
message="No .asc files found",
duration_seconds=time.time() - start_time,
)
failed = []
for asc_file in asc_files:
result = run_command(
["gpg", "--verify", str(asc_file)], check=False, capture_output=True, text=True
)
if result.returncode != 0:
failed.append(asc_file.name)
elif self.verbose:
# Extract signer from GPG output
match = re.search(r"Good signature from \"(.*)\"", result.stderr)
signer = match.group(1) if match else "Unknown"
console_print(f" {asc_file.name}: Valid signature from {signer}")
message = (
f"All {len(asc_files)} signatures verified"
if not failed
else f"{len(failed)} of {len(asc_files)} signatures failed"
)
details = failed[:] if failed else None
if failed:
details = details or []
details.append(
"Hint: If signatures failed due to missing keys, try running with --download-gpg-keys"
)
details.append(f"or download manually from {self.GPG_KEYS_URL}")
result = ValidationResult(
check_type=CheckType.SIGNATURES,
passed=not failed,
message=message,
details=details,
duration_seconds=time.time() - start_time,
)
self._print_result(result)
return result
def validate_checksums(self) -> ValidationResult:
"""Verify SHA512 checksums for all .sha512 files."""
console_print("\n[bold]SHA512 Checksum Verification[/bold]")
start_time = time.time()
sha512_files: list[Path] = []
for svn_dir in self.get_svn_directories():
if svn_dir.exists():
sha512_files.extend(svn_dir.glob("*.sha512"))
if not sha512_files:
return ValidationResult(
check_type=CheckType.CHECKSUMS,
passed=False,
message="No .sha512 files found",
duration_seconds=time.time() - start_time,
)
failed = []
for sha_file in sha512_files:
expected = sha_file.read_text().split()[0]
target_file = sha_file.parent / sha_file.name.replace(".sha512", "")
if not target_file.exists():
failed.append(f"{sha_file.name} (target file missing)")
continue
result = run_command(
["shasum", "-a", "512", str(target_file)], check=False, capture_output=True, text=True
)
if result.returncode != 0 or result.stdout.split()[0] != expected:
failed.append(sha_file.name)
elif self.verbose:
console_print(f" {sha_file.name}: OK")
message = (
f"All {len(sha512_files)} checksums valid"
if not failed
else f"{len(failed)} of {len(sha512_files)} checksums failed"
)
result = ValidationResult(
check_type=CheckType.CHECKSUMS,
passed=not failed,
message=message,
details=failed or None,
duration_seconds=time.time() - start_time,
)
self._print_result(result)
return result
@property
def check_methods(self) -> dict[CheckType, Callable]:
return {
CheckType.SVN: self.validate_svn_files,
CheckType.REPRODUCIBLE_BUILD: self.validate_reproducible_build,
CheckType.SIGNATURES: self.validate_signatures,
CheckType.CHECKSUMS: self.validate_checksums,
CheckType.LICENSES: self.validate_licenses,
}
@property
def all_check_types(self) -> list[CheckType]:
"""Return all available check types in order.
Order matches README_RELEASE_AIRFLOW.md section order for PMC verification:
1. Reproducible build - Build from source and compare with SVN artifacts
2. SVN - Verify expected files exist in SVN
3. Licenses - Apache RAT license verification
4. Signatures - GPG signature verification
5. Checksums - SHA512 checksum verification
Note: Tests are independent and can run in any order.
"""
return [
CheckType.REPRODUCIBLE_BUILD,
CheckType.SVN,
CheckType.LICENSES,
CheckType.SIGNATURES,
CheckType.CHECKSUMS,
]
def _get_prerequisites_for_checks(self, checks: list[CheckType]) -> dict[str, list[CheckType]]:
"""Return mapping of prerequisite -> list of checks that require it."""
# Define which checks require which prerequisites
prereq_map = {
"java": [CheckType.LICENSES], # Apache RAT requires Java
"gpg": [CheckType.SIGNATURES], # GPG signature verification
"svn": list(CheckType), # All checks need SVN files
"docker": [CheckType.REPRODUCIBLE_BUILD], # Docker builds
"hatch": [CheckType.REPRODUCIBLE_BUILD], # Package builds
"clean_git": [CheckType.REPRODUCIBLE_BUILD], # No uncommitted changes
}
# Filter to only prerequisites needed for the selected checks
needed: dict[str, list[CheckType]] = {}
for prereq, required_by in prereq_map.items():
matching = [c for c in checks if c in required_by]
if matching:
needed[prereq] = matching
return needed
def validate_prerequisites(self, checks: list[CheckType] | None = None) -> bool:
"""Verify prerequisites based on which checks will be run."""
if checks is None:
checks = self.all_check_types
console_print("\n[bold]Prerequisites Verification[/bold]")
failed: list[str] = []
warnings: list[str] = []
needed_prereqs = self._get_prerequisites_for_checks(checks)
# Check Java (required for Apache RAT / license checks)
if "java" in needed_prereqs:
java_path = shutil.which("java")
if not java_path:
failed.append("Java is not installed (required for Apache RAT)")
elif self.verbose:
console_print(f" [green]✓[/green] Java: {java_path}")
# Check GPG (required for signature verification)
if "gpg" in needed_prereqs:
gpg_path = shutil.which("gpg")
if not gpg_path:
failed.append("GPG is not installed (required for signature verification)")
elif self.verbose:
console_print(f" [green]✓[/green] GPG: {gpg_path}")
# Check SVN (required for release verification)
if "svn" in needed_prereqs:
svn_path = shutil.which("svn")
if not svn_path:
failed.append("SVN is not installed (required for release verification)")
elif self.verbose:
console_print(f" [green]✓[/green] SVN: {svn_path}")
# Check Docker (required for reproducible builds)
if "docker" in needed_prereqs:
docker_path = shutil.which("docker")
if not docker_path:
failed.append("Docker is not installed (required for reproducible builds)")
else:
# Check if Docker daemon is running
result = run_command(
["docker", "info"],
check=False,
capture_output=True,
)
if result.returncode != 0:
failed.append("Docker is installed but not running (start Docker daemon)")
elif self.verbose:
console_print(f" [green]✓[/green] Docker: {docker_path} (daemon running)")
# Check hatch (required for local package builds)
if "hatch" in needed_prereqs:
hatch_path = shutil.which("hatch")
if not hatch_path:
failed.append(
"hatch is not installed (required for reproducible builds, install with: uv tool install hatch)"
)
elif self.verbose:
console_print(f" [green]✓[/green] hatch: {hatch_path}")
# Check for clean git working directory (required for reproducible builds)
if "clean_git" in needed_prereqs:
if not self._check_clean_git_working_directory():
failed.append(
"Git working directory has uncommitted or staged changes "
"(reproducible build requires clean checkout to switch tags)"
)
elif self.verbose:
console_print(" [green]✓[/green] Git: working directory clean")
# Optionally download GPG keys
if self.download_gpg_keys:
self._download_gpg_keys()
if warnings:
console_print("[yellow]Warnings:[/yellow]")
for w in warnings:
console_print(f" - {w}")
if failed:
console_print("[red]Prerequisites failed:[/red]")
for f in failed:
console_print(f" - {f}")
console_print("[yellow]Please install missing prerequisites and try again.[/yellow]")
return False
# Optionally update SVN checkout
if self.update_svn:
if not self._update_svn():
return False
else:
console_print("[yellow]SVN update skipped. The local revision might not be the latest.[/yellow]")
# Check that release files exist in the SVN directory
if not self._verify_release_files_exist():
return False
console_print("[green]All required prerequisites met[/green]")
return True
def _download_gpg_keys(self) -> None:
"""Download GPG keys from ASF."""
console_print("Downloading GPG keys from ASF...")
with tempfile.NamedTemporaryFile() as tmp_keys:
run_command(["wget", "-qO", tmp_keys.name, self.GPG_KEYS_URL], check=True)
run_command(["gpg", "--import", tmp_keys.name], check=True, capture_output=True)
console_print("[green]GPG keys downloaded and imported[/green]")
def _check_svn_locks(self, svn_dir: Path) -> bool:
"""Check if SVN working copy is locked."""
# svn status shows 'L' in second column for locked items
result = run_command(
["svn", "status", str(svn_dir)],
check=False,
capture_output=True,
text=True,
)
# Check for lock indicator in output (L in column 3) or E155037 error
if "E155037" in result.stderr:
return True
for line in result.stdout.splitlines():
# SVN status format: columns are [item status][props][lock][history][switched][info][conflict]
# Lock is in column 3 (index 2), shown as 'L'
if len(line) > 2 and line[2] == "L":
return True
return False
def _check_clean_git_working_directory(self) -> bool:
"""Check if git working directory is clean (no uncommitted or staged changes)."""
result = run_command(
["git", "-C", str(self.airflow_repo_root), "status", "--porcelain"],
check=False,
capture_output=True,
text=True,
)
if result.returncode != 0:
return False
# If output is empty, working directory is clean
return not result.stdout.strip()
def _update_svn(self) -> bool:
"""Update SVN checkout to ensure we have the latest release files."""
# Update only the specific directories needed, not the entire SVN tree
svn_dirs = self.get_svn_directories()
for svn_dir in svn_dirs:
# Check for SVN locks before attempting update (prevents hanging)
if self._check_svn_locks(svn_dir.parent):
console_print(f"[red]SVN working copy is locked: {svn_dir.parent}[/red]")
console_print(
"\n[yellow]Hint: Run the following to release SVN locks:[/yellow]\n"
f" svn cleanup {svn_dir.parent}\n"
"\n[yellow]Or skip SVN update with --no-update-svn if files are already up to date.[/yellow]"
)
return False
console_print(f"Updating SVN checkout: {svn_dir}...")
result = run_command(
["svn", "update", "--set-depth=infinity", str(svn_dir)],
check=False,
capture_output=True,
text=True,
)
if result.returncode != 0:
console_print("[red]Failed to update SVN checkout[/red]")
if result.stderr:
console_print(f"[red]{result.stderr.strip()}[/red]")
console_print(
"[yellow]Hint: Make sure you have checked out the SVN repository:[/yellow]\n"
" svn checkout --depth=immediates https://dist.apache.org/repos/dist asf-dist\n"
" svn update --set-depth=infinity asf-dist/dev/airflow"
)
return False
console_print("[green]SVN checkout updated[/green]")
return True
def _verify_release_files_exist(self) -> bool:
"""Verify that the SVN directories contain release files."""
for svn_dir in self.get_svn_directories():
if not svn_dir.exists():
console_print(f"[red]SVN directory does not exist: {svn_dir}[/red]")
console_print(
"[yellow]Hint: Make sure the version is correct and SVN is checked out.[/yellow]\n"
" You may need to run with --update-svn to fetch the latest files."
)
return False
# Check for release artifacts (.tar.gz or .whl files)
release_files = list(svn_dir.glob("*.tar.gz")) + list(svn_dir.glob("*.whl"))
if not release_files:
console_print(f"[red]No release files found in: {svn_dir}[/red]")
console_print(
"[yellow]The directory exists but contains no release artifacts.\n"
"This may happen if:\n"
" - The release was already published and files were moved to the release folder\n"
" - The SVN checkout is out of date\n"
" - The version is incorrect\n\n"
"Hint: Try running with --update-svn to fetch the latest files.[/yellow]"
)
return False
if self.verbose:
console_print(f" [green]✓[/green] Found {len(release_files)} release files in {svn_dir}")
return True
def _download_apache_rat(self) -> Path | None:
"""Download and verify Apache RAT jar.
Returns the path to the jar file, or None if download/verification failed.
"""
rat_jar = Path("/tmp/apache-rat-0.17/apache-rat-0.17.jar")
if rat_jar.exists():
console_print("[green]Apache RAT already present[/green]")
return rat_jar
console_print("Downloading Apache RAT...")
rat_tarball = Path("/tmp/apache-rat-0.17-bin.tar.gz")
rat_sha512 = Path("/tmp/apache-rat-0.17-bin.tar.gz.sha512")
# Download tarball
wget_result = run_command(
["wget", "-qO", str(rat_tarball), self.APACHE_RAT_JAR_DOWNLOAD_URL],
check=False,
capture_output=True,
)
if wget_result.returncode != 0:
console_print("[red]Failed to download Apache RAT[/red]")
return None
# Download and verify checksum
console_print("Verifying Apache RAT Checksum...")
sha_download = run_command(
["wget", "-qO", str(rat_sha512), self.APACHE_RAT_JAR_SHA512_DOWNLOAD_URL],
check=False,
capture_output=True,
)
if sha_download.returncode != 0:
console_print("[red]Failed to download Apache RAT checksum[/red]")
return None
sha_result = run_command(["shasum", "-a", "512", str(rat_tarball)], capture_output=True, text=True)
calculated_sha = sha_result.stdout.split()[0]
expected_sha = rat_sha512.read_text().split()[0]
if calculated_sha != expected_sha:
console_print("[red]Apache RAT checksum verification failed![/red]")
console_print(f" Expected: {expected_sha[:32]}...")
console_print(f" Got: {calculated_sha[:32]}...")
return None
# Extract
subprocess.run(["tar", "-C", "/tmp", "-xzf", str(rat_tarball)], check=True)
console_print("[green]Apache RAT downloaded and verified[/green]")
return rat_jar
def validate(self, checks: list[CheckType] | None = None) -> bool:
"""Run validation checks. Override to add prerequisites."""
if checks is None:
checks = self.all_check_types
if not self.validate_prerequisites(checks):
return False
return self._run_checks(checks)
def _run_checks(self, checks: list[CheckType] | None = None) -> bool:
"""Internal method to run the actual validation checks."""
if checks is None:
checks = self.all_check_types
self.checks_run = checks # Track which checks were actually run
console_print(f"\n[bold cyan]Validating {self.get_distribution_name()} {self.version}[/bold cyan]")
console_print(f"SVN Path: {self.svn_path}")
console_print(f"Airflow Root: {self.airflow_repo_root}")
for check_type in checks:
if check_type in self.check_methods:
result = self.check_methods[check_type]()
self.results.append(result)
self._print_summary()
return all(r.passed for r in self.results)
def _print_result(self, result: ValidationResult):
status = "[green]PASSED[/green]" if result.passed else "[red]FAILED[/red]"
console_print(f"Status: {status} - {result.message}")
if result.details:
for detail in result.details:
console_print(f" {detail}")
if result.duration_seconds:
console_print(f"Duration: {result.duration_seconds:.1f}s")
def _print_summary(self):
console_print("\n" + "=" * 70)
passed_count = sum(1 for r in self.results if r.passed)
total_count = len(self.results)
# Check if we ran all available checks
all_checks = set(self.all_check_types)
checks_run = set(getattr(self, "checks_run", all_checks))
skipped_checks = all_checks - checks_run
if passed_count == total_count:
console_print(f"[bold green]ALL CHECKS PASSED ({passed_count}/{total_count})[/bold green]")
console_print("\nPassed checks:")
for result in self.results:
console_print(f" - {result.check_type.value}: {result.message}")
if skipped_checks:
console_print(
f"\n[yellow]Note: Only {total_count} of {len(all_checks)} checks were run.[/yellow]"
)
console_print("Skipped checks:")
for check in sorted(skipped_checks, key=lambda c: c.value):
console_print(f" - {check.value}")
console_print(
"\n[yellow]You may vote +1 (binding) only if you have verified "
"the skipped checks manually or by running them separately.[/yellow]"
)
else:
console_print("\nYou can vote +1 (binding) on this release.")
else:
failed_count = total_count - passed_count
console_print(
f"[bold red]SOME CHECKS FAILED ({failed_count} failed, {passed_count} passed)[/bold red]"
)
console_print("\nFailed checks:")
for result in self.results:
if not result.passed:
console_print(f" - {result.check_type.value}: {result.message}")
console_print("\nPlease review failures above before voting.")
total_duration = sum(r.duration_seconds or 0 for r in self.results)
console_print(f"\nTotal validation time: {total_duration:.1f}s")
console_print("=" * 70)
def _strip_rc_suffix(self, version: str) -> str:
return re.sub(r"rc\d+$", "", version)
def _get_version_suffix(self) -> str:
if "rc" in self.version:
match = re.search(r"(rc\d+)$", self.version)
if match:
return match.group(1)
return ""
| {
"repo_id": "apache/airflow",
"file_path": "dev/breeze/src/airflow_breeze/utils/release_validator.py",
"license": "Apache License 2.0",
"lines": 528,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:dev/breeze/src/airflow_breeze/utils/check_release_files.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import itertools
import re
from pathlib import Path
from airflow_breeze.utils.console import get_console
PROVIDERS_DOCKER = """\
FROM ghcr.io/apache/airflow/main/ci/python3.10
RUN cd airflow-core; uv sync --no-sources
# Install providers
{}
"""
AIRFLOW_DOCKER = """\
FROM python:3.10
RUN pip install "apache-airflow=={}" \
--constraint "https://raw.githubusercontent.com/apache/airflow/constraints-{}/constraints-3.10.txt"
"""
TASK_SDK_DOCKER = """\
FROM python:3.10
RUN pip install "apache-airflow-task-sdk=={}" "apache-airflow-core=={}" "apache-airflow=={}"\
--constraint "https://raw.githubusercontent.com/apache/airflow/constraints-{}/constraints-3.10.txt"
"""
AIRFLOW_CTL_DOCKER = """\
FROM python:3.10
# Install airflow-ctl
RUN pip install "apache-airflow-ctl=={}"
"""
PYTHON_CLIENT_DOCKER = """\
FROM python:3.10
# Install python-client
RUN pip install "apache-airflow-client=={}"
"""
def get_packages(packages_file: Path) -> list[tuple[str, str]]:
"""Read packages from packages.txt file."""
try:
content = packages_file.read_text()
except FileNotFoundError:
raise SystemExit(f"List of packages to check is empty. Please add packages to `{packages_file}`")
if not content:
raise SystemExit(f"List of packages to check is empty. Please add packages to `{packages_file}`")
# e.g. https://pypi.org/project/apache-airflow-providers-airbyte/3.1.0rc1/
packages = []
for line in content.splitlines():
if line:
_, name, version = line.rstrip("/").rsplit("/", 2)
packages.append((name, version))
return packages
def create_docker(txt: str, output_file: Path, release_type: str):
"""Generate Dockerfile for testing installation."""
output_file.write_text(txt)
console = get_console()
console.print("\n[bold]To check installation run:[/bold]")
if release_type == "python-client":
command = (
'--entrypoint "bash" local/airflow "-c" "python -c '
"'import airflow_client.client; print(airflow_client.client.__version__)'\""
)
elif release_type == "airflow-ctl":
command = '--entrypoint "airflowctl" local/airflow --help'
else:
command = '--entrypoint "airflow" local/airflow info'
console.print(
f"""\
docker build -f {output_file} --tag local/airflow .
docker run --rm {command}
docker image rm local/airflow
"""
)
def check_providers(files: list[str], release_date: str, packages: list[tuple[str, str]]) -> list[str]:
"""Check if all expected provider files are present."""
console = get_console()
console.print("Checking providers from packages.txt:\n")
missing_list = []
expected_files = expand_name_variations(
[
f"apache_airflow_providers-{release_date}-source.tar.gz",
]
)
missing_list.extend(check_all_files(expected_files=expected_files, actual_files=files))
for name, version_raw in packages:
console.print(f"Checking {name} {version_raw}")
version = strip_rc_suffix(version_raw)
expected_files = expand_name_variations(
[
f"{name.replace('-', '_')}-{version}.tar.gz",
f"{name.replace('-', '_')}-{version}-py3-none-any.whl",
]
)
missing_list.extend(check_all_files(expected_files=expected_files, actual_files=files))
return missing_list
def strip_rc_suffix(version: str) -> str:
"""Remove rc suffix from version string."""
return re.sub(r"rc\d+$", "", version)
def print_status(file: str, is_found: bool):
"""Print status of a file check."""
console = get_console()
color, status = ("green", "OK") if is_found else ("red", "MISSING")
console.print(f" - {file}: [{color}]{status}[/{color}]")
def check_all_files(actual_files: list[str], expected_files: list[str]) -> list[str]:
"""Check if all expected files are in actual files list."""
missing_list = []
for file in expected_files:
is_found = file in actual_files
if not is_found:
missing_list.append(file)
print_status(file=file, is_found=is_found)
return missing_list
def check_airflow_release(files: list[str], version: str) -> list[str]:
"""Check if all expected Airflow release files are present."""
console = get_console()
console.print(f"Checking airflow release for version {version}:\n")
version = strip_rc_suffix(version)
expected_files = expand_name_variations(
[
f"apache_airflow-{version}.tar.gz",
f"apache_airflow-{version}-source.tar.gz",
f"apache_airflow-{version}-py3-none-any.whl",
f"apache_airflow_core-{version}.tar.gz",
f"apache_airflow_core-{version}-py3-none-any.whl",
]
)
return check_all_files(expected_files=expected_files, actual_files=files)
def check_task_sdk_release(files: list[str], version: str) -> list[str]:
"""Check if all expected task-sdk release files are present."""
console = get_console()
console.print(f"Checking task-sdk release for version {version}:\n")
version = strip_rc_suffix(version)
expected_files = expand_name_variations(
[
f"apache_airflow_task_sdk-{version}.tar.gz",
f"apache_airflow_task_sdk-{version}-py3-none-any.whl",
]
)
return check_all_files(expected_files=expected_files, actual_files=files)
def expand_name_variations(files: list[str]) -> list[str]:
"""Expand file names to include signature and checksum variations."""
return sorted(base + suffix for base, suffix in itertools.product(files, ["", ".asc", ".sha512"]))
def check_airflow_ctl_release(files: list[str], version: str) -> list[str]:
"""Check if all expected airflow-ctl release files are present."""
console = get_console()
console.print(f"Checking airflow-ctl release for version {version}:\n")
version = strip_rc_suffix(version)
expected_files = expand_name_variations(
[
f"apache_airflow_ctl-{version}-source.tar.gz",
f"apache_airflow_ctl-{version}.tar.gz",
f"apache_airflow_ctl-{version}-py3-none-any.whl",
]
)
return check_all_files(expected_files=expected_files, actual_files=files)
def check_python_client_release(files: list[str], version: str) -> list[str]:
"""Check if all expected python-client release files are present."""
console = get_console()
console.print(f"Checking python-client release for version {version}:\n")
version = strip_rc_suffix(version)
expected_files = expand_name_variations(
[
f"apache_airflow_python_client-{version}-source.tar.gz",
f"apache_airflow_client-{version}.tar.gz",
f"apache_airflow_client-{version}-py3-none-any.whl",
]
)
return check_all_files(expected_files=expected_files, actual_files=files)
def warn_of_missing_files(files: list[str], directory: str):
"""Print warning message for missing files."""
console = get_console()
console.print(
f"[red]Check failed. Here are the files we expected but did not find in {directory}:[/red]\n"
)
for file in files:
console.print(f" - [red]{file}[/red]")
| {
"repo_id": "apache/airflow",
"file_path": "dev/breeze/src/airflow_breeze/utils/check_release_files.py",
"license": "Apache License 2.0",
"lines": 186,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:dev/breeze/tests/test_check_release_files.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from pathlib import Path
from airflow_breeze.utils.check_release_files import (
check_airflow_ctl_release,
check_airflow_release,
check_providers,
check_python_client_release,
check_task_sdk_release,
)
def test_check_airflow_release_pass():
"""Passes if all files are present."""
files = [
"apache_airflow-2.8.1-py3-none-any.whl",
"apache_airflow-2.8.1-py3-none-any.whl.asc",
"apache_airflow-2.8.1-py3-none-any.whl.sha512",
"apache_airflow-2.8.1-source.tar.gz",
"apache_airflow-2.8.1-source.tar.gz.asc",
"apache_airflow-2.8.1-source.tar.gz.sha512",
"apache_airflow-2.8.1.tar.gz",
"apache_airflow-2.8.1.tar.gz.asc",
"apache_airflow-2.8.1.tar.gz.sha512",
"apache_airflow_core-2.8.1-py3-none-any.whl",
"apache_airflow_core-2.8.1-py3-none-any.whl.asc",
"apache_airflow_core-2.8.1-py3-none-any.whl.sha512",
"apache_airflow_core-2.8.1.tar.gz",
"apache_airflow_core-2.8.1.tar.gz.asc",
"apache_airflow_core-2.8.1.tar.gz.sha512",
]
assert check_airflow_release(files, version="2.8.1rc2") == []
def test_check_airflow_release_fail():
"""Fails if missing one file."""
files = [
"apache_airflow-2.8.1-py3-none-any.whl",
"apache_airflow-2.8.1-py3-none-any.whl.asc",
"apache_airflow-2.8.1-py3-none-any.whl.sha512",
"apache_airflow-2.8.1-source.tar.gz",
"apache_airflow-2.8.1-source.tar.gz.asc",
"apache_airflow-2.8.1-source.tar.gz.sha512",
"apache_airflow-2.8.1.tar.gz.asc",
"apache_airflow-2.8.1.tar.gz.sha512",
"apache_airflow_core-2.8.1-py3-none-any.whl",
"apache_airflow_core-2.8.1-py3-none-any.whl.asc",
"apache_airflow_core-2.8.1-py3-none-any.whl.sha512",
"apache_airflow_core-2.8.1.tar.gz.asc",
"apache_airflow_core-2.8.1.tar.gz.sha512",
]
missing_files = check_airflow_release(files, version="2.8.1rc2")
assert missing_files == ["apache_airflow-2.8.1.tar.gz", "apache_airflow_core-2.8.1.tar.gz"]
def test_check_providers_pass(tmp_path: Path):
"""Passes if all provider files are present."""
packages_file = tmp_path / "packages.txt"
packages_file.write_text(
"https://pypi.org/project/apache-airflow-providers-airbyte/3.1.0rc1/\n"
"https://pypi.org/project/apache-airflow-providers-foo-bar/9.6.42rc2/\n"
)
packages = [
("apache-airflow-providers-airbyte", "3.1.0rc1"),
("apache-airflow-providers-foo-bar", "9.6.42rc2"),
]
files = [
"apache_airflow_providers-2024-01-01-source.tar.gz",
"apache_airflow_providers-2024-01-01-source.tar.gz.asc",
"apache_airflow_providers-2024-01-01-source.tar.gz.sha512",
"apache_airflow_providers_airbyte-3.1.0.tar.gz",
"apache_airflow_providers_airbyte-3.1.0.tar.gz.asc",
"apache_airflow_providers_airbyte-3.1.0.tar.gz.sha512",
"apache_airflow_providers_airbyte-3.1.0-py3-none-any.whl",
"apache_airflow_providers_airbyte-3.1.0-py3-none-any.whl.asc",
"apache_airflow_providers_airbyte-3.1.0-py3-none-any.whl.sha512",
"apache_airflow_providers_foo_bar-9.6.42.tar.gz",
"apache_airflow_providers_foo_bar-9.6.42.tar.gz.asc",
"apache_airflow_providers_foo_bar-9.6.42.tar.gz.sha512",
"apache_airflow_providers_foo_bar-9.6.42-py3-none-any.whl",
"apache_airflow_providers_foo_bar-9.6.42-py3-none-any.whl.asc",
"apache_airflow_providers_foo_bar-9.6.42-py3-none-any.whl.sha512",
]
assert check_providers(files, release_date="2024-01-01", packages=packages) == []
def test_check_providers_failure(tmp_path: Path):
"""Fails if provider files are missing."""
packages_file = tmp_path / "packages.txt"
packages_file.write_text("https://pypi.org/project/apache-airflow-providers-spam-egg/1.2.3rc4/\n")
packages = [("apache-airflow-providers-spam-egg", "1.2.3rc4")]
files = [
"apache_airflow_providers-2024-02-01-source.tar.gz",
"apache_airflow_providers-2024-02-01-source.tar.gz.asc",
"apache_airflow_providers-2024-02-01-source.tar.gz.sha512",
"apache_airflow_providers_spam_egg-1.2.3.tar.gz",
"apache_airflow_providers_spam_egg-1.2.3.tar.gz.sha512",
"apache_airflow_providers_spam_egg-1.2.3-py3-none-any.whl",
"apache_airflow_providers_spam_egg-1.2.3-py3-none-any.whl.asc",
]
assert sorted(check_providers(files, release_date="2024-02-01", packages=packages)) == [
"apache_airflow_providers_spam_egg-1.2.3-py3-none-any.whl.sha512",
"apache_airflow_providers_spam_egg-1.2.3.tar.gz.asc",
]
def test_check_task_sdk_release_pass():
"""Passes if all task-sdk files are present."""
files = [
"apache_airflow_task_sdk-1.0.0-py3-none-any.whl",
"apache_airflow_task_sdk-1.0.0-py3-none-any.whl.asc",
"apache_airflow_task_sdk-1.0.0-py3-none-any.whl.sha512",
"apache_airflow_task_sdk-1.0.0.tar.gz",
"apache_airflow_task_sdk-1.0.0.tar.gz.asc",
"apache_airflow_task_sdk-1.0.0.tar.gz.sha512",
]
assert check_task_sdk_release(files, version="1.0.0rc1") == []
def test_check_task_sdk_release_fail():
"""Fails if task-sdk files are missing."""
files = [
"apache_airflow_task_sdk-1.0.0-py3-none-any.whl",
"apache_airflow_task_sdk-1.0.0-py3-none-any.whl.sha512",
"apache_airflow_task_sdk-1.0.0.tar.gz",
"apache_airflow_task_sdk-1.0.0.tar.gz.asc",
]
missing_files = check_task_sdk_release(files, version="1.0.0rc1")
assert sorted(missing_files) == [
"apache_airflow_task_sdk-1.0.0-py3-none-any.whl.asc",
"apache_airflow_task_sdk-1.0.0.tar.gz.sha512",
]
def test_check_airflow_ctl_release_pass():
"""Passes if all airflow-ctl files are present."""
files = [
"apache_airflow_ctl-1.2.3-py3-none-any.whl",
"apache_airflow_ctl-1.2.3-py3-none-any.whl.asc",
"apache_airflow_ctl-1.2.3-py3-none-any.whl.sha512",
"apache_airflow_ctl-1.2.3-source.tar.gz",
"apache_airflow_ctl-1.2.3-source.tar.gz.asc",
"apache_airflow_ctl-1.2.3-source.tar.gz.sha512",
"apache_airflow_ctl-1.2.3.tar.gz",
"apache_airflow_ctl-1.2.3.tar.gz.asc",
"apache_airflow_ctl-1.2.3.tar.gz.sha512",
]
assert check_airflow_ctl_release(files, version="1.2.3rc2") == []
def test_check_airflow_ctl_release_fail():
"""Fails if airflow-ctl files are missing."""
files = [
"apache_airflow_ctl-1.2.3-py3-none-any.whl",
"apache_airflow_ctl-1.2.3-py3-none-any.whl.asc",
"apache_airflow_ctl-1.2.3-source.tar.gz.asc",
"apache_airflow_ctl-1.2.3.tar.gz",
"apache_airflow_ctl-1.2.3.tar.gz.sha512",
]
missing_files = check_airflow_ctl_release(files, version="1.2.3rc2")
assert sorted(missing_files) == [
"apache_airflow_ctl-1.2.3-py3-none-any.whl.sha512",
"apache_airflow_ctl-1.2.3-source.tar.gz",
"apache_airflow_ctl-1.2.3-source.tar.gz.sha512",
"apache_airflow_ctl-1.2.3.tar.gz.asc",
]
def test_check_python_client_release_pass():
"""Passes if all python-client files are present."""
files = [
"apache_airflow_client-2.5.0-py3-none-any.whl",
"apache_airflow_client-2.5.0-py3-none-any.whl.asc",
"apache_airflow_client-2.5.0-py3-none-any.whl.sha512",
"apache_airflow_client-2.5.0.tar.gz",
"apache_airflow_client-2.5.0.tar.gz.asc",
"apache_airflow_client-2.5.0.tar.gz.sha512",
"apache_airflow_python_client-2.5.0-source.tar.gz",
"apache_airflow_python_client-2.5.0-source.tar.gz.asc",
"apache_airflow_python_client-2.5.0-source.tar.gz.sha512",
]
assert check_python_client_release(files, version="2.5.0rc3") == []
def test_check_python_client_release_fail():
"""Fails if python-client files are missing."""
files = [
"apache_airflow_client-2.5.0-py3-none-any.whl",
"apache_airflow_client-2.5.0-py3-none-any.whl.sha512",
"apache_airflow_client-2.5.0.tar.gz.asc",
"apache_airflow_python_client-2.5.0-source.tar.gz",
"apache_airflow_python_client-2.5.0-source.tar.gz.sha512",
]
missing_files = check_python_client_release(files, version="2.5.0rc3")
assert sorted(missing_files) == [
"apache_airflow_client-2.5.0-py3-none-any.whl.asc",
"apache_airflow_client-2.5.0.tar.gz",
"apache_airflow_client-2.5.0.tar.gz.sha512",
"apache_airflow_python_client-2.5.0-source.tar.gz.asc",
]
| {
"repo_id": "apache/airflow",
"file_path": "dev/breeze/tests/test_check_release_files.py",
"license": "Apache License 2.0",
"lines": 195,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/neo4j/src/airflow/providers/neo4j/sensors/neo4j.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from collections.abc import Callable, Sequence
from operator import itemgetter
from typing import TYPE_CHECKING, Any
from airflow.providers.common.compat.sdk import AirflowException, BaseSensorOperator
from airflow.providers.neo4j.hooks.neo4j import Neo4jHook
if TYPE_CHECKING:
from airflow.sdk import Context
class Neo4jSensor(BaseSensorOperator):
"""
Executes a Cypher query in Neo4j until the returned value satisfies a condition.
The query runs repeatedly at the defined poke interval until:
* A callable provided in ``failure`` evaluates to True, which raises an exception.
* A callable provided in ``success`` evaluates to True, which marks success.
* Otherwise, the truthiness of the selected value determines success.
Example
-------
.. code-block:: python
wait_person_exists = Neo4jSensor(
task_id="wait_person_exists",
neo4j_conn_id="neo4j_default",
cypher="MATCH (p:Person) RETURN count(p) > 0",
)
:param neo4j_conn_id: Connection ID to use for connecting to Neo4j.
:param cypher: Cypher statement to execute. (Templated)
:param parameters: Query parameters. (Templated)
:param success: Callable that receives the selected value and returns a boolean.
:param failure: Callable that receives the selected value; if it returns True, an error is raised.
:param selector: Function that extracts a single value from the first row of the result.
:param fail_on_empty: When True, raises if the query returns no rows.
"""
template_fields: Sequence[str] = ("cypher", "parameters")
template_fields_renderers = {"cypher": "sql", "parameters": "json"}
def __init__(
self,
*,
neo4j_conn_id: str = "neo4j_default",
cypher: str,
parameters: dict[str, Any] | None = None,
success: Callable[[Any], bool] | None = None,
failure: Callable[[Any], bool] | None = None,
selector: Callable[[tuple[Any, ...]], Any] = itemgetter(0),
fail_on_empty: bool = False,
**kwargs: Any,
) -> None:
super().__init__(**kwargs)
self.neo4j_conn_id = neo4j_conn_id
self.cypher = cypher
self.parameters = parameters
self.success = success
self.failure = failure
self.selector = selector
self.fail_on_empty = fail_on_empty
@staticmethod
def _row_to_tuple(record: Any) -> tuple[Any, ...]:
if record is None:
return ()
if hasattr(record, "values"):
try:
return tuple(record.values())
except Exception:
pass
if isinstance(record, dict):
return tuple(record.values())
if isinstance(record, (list, tuple)):
return tuple(record)
return (record,)
def poke(self, context: Context) -> bool:
hook = Neo4jHook(conn_id=self.neo4j_conn_id)
self.log.info("Executing Cypher: %s (parameters=%s)", self.cypher, self.parameters)
rows = hook.run(self.cypher, self.parameters)
if not rows:
if self.fail_on_empty:
raise AirflowException("No rows returned, raising as per parameter 'fail_on_empty=True'")
return False
first_row = self._row_to_tuple(rows[0])
if not callable(self.selector):
raise AirflowException(f"Parameter 'selector' is not callable: {self.selector!r}")
value = self.selector(first_row)
if self.failure is not None:
if callable(self.failure):
if self.failure(value):
raise AirflowException(f"Failure criteria met: failure({value!r}) returned True")
else:
raise AirflowException(f"Parameter 'failure' is not callable: {self.failure!r}")
if self.success is not None:
if callable(self.success):
return bool(self.success(value))
raise AirflowException(f"Parameter 'success' is not callable: {self.success!r}")
return bool(value)
| {
"repo_id": "apache/airflow",
"file_path": "providers/neo4j/src/airflow/providers/neo4j/sensors/neo4j.py",
"license": "Apache License 2.0",
"lines": 106,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/neo4j/tests/system/neo4j/example_neo4j_sensor.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Example use of Neo4j Sensor with a Neo4j Operator.
"""
from __future__ import annotations
import os
from datetime import datetime
from airflow import DAG
from airflow.providers.neo4j.operators.neo4j import Neo4jOperator
from airflow.providers.neo4j.sensors.neo4j import Neo4jSensor
ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
DAG_ID = "example_neo4j_sensor"
with DAG(
DAG_ID,
start_date=datetime(2025, 1, 1),
schedule=None,
tags=["example"],
catchup=False,
) as dag:
# [START run_query_neo4j_sensor]
run_query_neo4j_sensor = Neo4jSensor(
task_id="run_query_neo4j_sensor",
neo4j_conn_id="neo4j_default",
cypher="RETURN 1 AS value",
success=lambda x: x == 1,
poke_interval=5,
timeout=60,
)
# [END run_query_neo4j_sensor]
run_query_neo4j_operator = Neo4jOperator(
task_id="run_query_neo4j_operator",
neo4j_conn_id="neo4j_default",
parameters={"name": "Tom Hanks"},
sql="CREATE (actor {name: $name})",
dag=dag,
)
run_query_neo4j_sensor >> run_query_neo4j_operator
from tests_common.test_utils.system_tests import get_test_run # noqa: E402
# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest)
test_run = get_test_run(dag)
| {
"repo_id": "apache/airflow",
"file_path": "providers/neo4j/tests/system/neo4j/example_neo4j_sensor.py",
"license": "Apache License 2.0",
"lines": 56,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/neo4j/tests/unit/neo4j/sensors/test_neo4j.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from unittest import mock
import pytest
from airflow.models.dag import DAG
from airflow.providers.common.compat.sdk import AirflowException, timezone
from airflow.providers.neo4j.sensors.neo4j import Neo4jSensor
DEFAULT_DATE = timezone.datetime(2015, 1, 1)
TEST_DAG_ID = "unit_test_neo4j_dag"
class TestNeo4jSensor:
def setup_method(self):
args = {"owner": "airflow", "start_date": DEFAULT_DATE}
self.dag = DAG(TEST_DAG_ID, schedule=None, default_args=args)
@mock.patch("airflow.providers.neo4j.sensors.neo4j.Neo4jHook")
def test_neo4j_sensor_smoke_test(self, mock_neo4j_hook):
mock_neo4j_hook_conn = mock_neo4j_hook.return_value
mock_neo4j_hook_conn.run.return_value = [{"person_count": 50}]
cypher = "MATCH (p:Person) RETURN COUNT(p) AS person_count"
sensor = Neo4jSensor(task_id="neo4j_sensor_check", neo4j_conn_id="neo4j_default", cypher=cypher)
assert sensor.poke(mock.MagicMock()) is True
mock_neo4j_hook.assert_called_once_with(conn_id="neo4j_default")
mock_neo4j_hook_conn.run.assert_called_once_with(cypher, None)
@mock.patch("airflow.providers.neo4j.sensors.neo4j.Neo4jHook")
def test_neo4j_sensor_poke_empty_default(self, mock_neo4j_hook):
mock_neo4j_hook_conn = mock_neo4j_hook.return_value
mock_neo4j_hook_conn.run.return_value = []
cypher = "MATCH (n:NoSuchLabel) RETURN n.id AS id"
sensor = Neo4jSensor(
task_id="neo4j_sensor_check",
neo4j_conn_id="neo4j_default",
cypher=cypher,
parameters=None,
)
assert sensor.poke(mock.MagicMock()) is False
@mock.patch("airflow.providers.neo4j.sensors.neo4j.Neo4jHook")
def test_neo4j_sensor_poke_empty_false(self, mock_neo4j_hook):
mock_neo4j_hook_conn = mock_neo4j_hook.return_value
mock_neo4j_hook_conn.run.return_value = []
cypher = "MATCH (n:NoSuchLabel) RETURN n.id AS id"
sensor = Neo4jSensor(
task_id="neo4j_sensor_check",
neo4j_conn_id="neo4j_default",
cypher=cypher,
parameters=None,
fail_on_empty=False,
)
assert sensor.poke(mock.MagicMock()) is False
@mock.patch("airflow.providers.neo4j.sensors.neo4j.Neo4jHook")
def test_neo4j_sensor_poke_empty_true(self, mock_neo4j_hook):
mock_neo4j_hook_conn = mock_neo4j_hook.return_value
mock_neo4j_hook_conn.run.return_value = []
cypher = "MATCH (n:NoSuchLabel) RETURN n.id AS id"
sensor = Neo4jSensor(
task_id="neo4j_sensor_check",
neo4j_conn_id="neo4j_default",
cypher=cypher,
parameters=None,
fail_on_empty=True,
)
with pytest.raises(AirflowException):
sensor.poke(mock.MagicMock())
@mock.patch("airflow.providers.neo4j.sensors.neo4j.Neo4jHook")
def test_neo4j_sensor_poke_default_true_value(self, mock_neo4j_hook):
mock_neo4j_hook_conn = mock_neo4j_hook.return_value
mock_neo4j_hook_conn.run.return_value = [{"value": 1}]
cypher = "RETURN 1 AS value"
sensor = Neo4jSensor(
task_id="neo4j_sensor_check",
neo4j_conn_id="neo4j_default",
cypher=cypher,
parameters=None,
)
assert sensor.poke(mock.MagicMock()) is True
@mock.patch("airflow.providers.neo4j.sensors.neo4j.Neo4jHook")
def test_neo4j_sensor_poke_default_false_value(self, mock_neo4j_hook):
mock_neo4j_hook_conn = mock_neo4j_hook.return_value
mock_neo4j_hook_conn.run.return_value = [{"value": 0}]
cypher = "RETURN 0 AS value"
sensor = Neo4jSensor(
task_id="neo4j_sensor_check",
neo4j_conn_id="neo4j_default",
cypher=cypher,
parameters=None,
)
assert sensor.poke(mock.MagicMock()) is False
@mock.patch("airflow.providers.neo4j.sensors.neo4j.Neo4jHook")
def test_neo4j_sensor_poke_failure_precedence(self, mock_neo4j_hook):
mock_neo4j_hook_conn = mock_neo4j_hook.return_value
mock_neo4j_hook_conn.run.return_value = [{"value": 10}]
success = mock.MagicMock(side_effect=lambda v: v == 10)
failure = mock.MagicMock(side_effect=lambda v: v == 10)
cypher = "RETURN 10 AS value"
sensor = Neo4jSensor(
task_id="neo4j_sensor_check",
neo4j_conn_id="neo4j_default",
cypher=cypher,
success=success,
failure=failure,
)
with pytest.raises(AirflowException):
sensor.poke(mock.MagicMock())
failure.assert_called_once()
success.assert_not_called()
@mock.patch("airflow.providers.neo4j.sensors.neo4j.Neo4jHook")
def test_neo4j_sensor_poke_failure_non_callable(self, mock_neo4j_hook):
mock_neo4j_hook_conn = mock_neo4j_hook.return_value
mock_neo4j_hook_conn.run.return_value = [{"value": 10}]
cypher = "RETURN 10 AS value"
sensor = Neo4jSensor(
task_id="neo4j_sensor_check", neo4j_conn_id="neo4j_default", cypher=cypher, failure="value = 10"
)
with pytest.raises(AirflowException) as ctx:
sensor.poke(mock.MagicMock())
assert str(ctx.value) == "Parameter 'failure' is not callable: 'value = 10'"
@mock.patch("airflow.providers.neo4j.sensors.neo4j.Neo4jHook")
def test_neo4j_sensor_poke_failure_default(self, mock_neo4j_hook):
mock_neo4j_hook_conn = mock_neo4j_hook.return_value
mock_neo4j_hook_conn.run.return_value = [{"value": 1}]
cypher = "RETURN 1 AS value"
sensor = Neo4jSensor(
task_id="neo4j_sensor_check",
neo4j_conn_id="neo4j_default",
cypher=cypher,
failure=lambda x: x == 0,
)
assert sensor.poke(mock.MagicMock()) is True
@mock.patch("airflow.providers.neo4j.sensors.neo4j.Neo4jHook")
def test_neo4j_sensor_poke_success_true(self, mock_neo4j_hook):
mock_neo4j_hook_conn = mock_neo4j_hook.return_value
mock_neo4j_hook_conn.run.return_value = [{"value": 15}]
cypher = "RETURN 15 AS value"
sensor = Neo4jSensor(
task_id="neo4j_sensor_check",
neo4j_conn_id="neo4j_default",
cypher=cypher,
success=lambda x: x > 10,
)
assert sensor.poke(mock.MagicMock()) is True
@mock.patch("airflow.providers.neo4j.sensors.neo4j.Neo4jHook")
def test_neo4j_sensor_poke_success_false(self, mock_neo4j_hook):
mock_neo4j_hook_conn = mock_neo4j_hook.return_value
mock_neo4j_hook_conn.run.return_value = [{"value": 10}]
cypher = "RETURN 10 AS value"
sensor = Neo4jSensor(
task_id="neo4j_sensor_check",
neo4j_conn_id="neo4j_default",
cypher=cypher,
success=lambda x: x > 10,
)
assert sensor.poke(mock.MagicMock()) is False
@mock.patch("airflow.providers.neo4j.sensors.neo4j.Neo4jHook")
def test_neo4j_sensor_poke_success_non_callable(self, mock_neo4j_hook):
mock_neo4j_hook_conn = mock_neo4j_hook.return_value
mock_neo4j_hook_conn.run.return_value = [{"value": 10}]
cypher = "RETURN 10 AS value"
sensor = Neo4jSensor(
task_id="neo4j_sensor_check",
neo4j_conn_id="neo4j_default",
cypher=cypher,
success="value = 10",
)
with pytest.raises(AirflowException) as ctx:
sensor.poke(mock.MagicMock())
assert str(ctx.value) == "Parameter 'success' is not callable: 'value = 10'"
@mock.patch("airflow.providers.neo4j.sensors.neo4j.Neo4jHook")
def test_neo4j_sensor_poke_selector_default(self, mock_neo4j_hook):
mock_neo4j_hook_conn = mock_neo4j_hook.return_value
mock_neo4j_hook_conn.run.return_value = [{"first_name": "John", "last_name": "Doe"}]
cypher = "MATCH (n:Person{id:'John Doe'}) RETURN n.first_name AS first_name, n.last_name AS last_name"
sensor = Neo4jSensor(
task_id="neo4j_sensor_check",
neo4j_conn_id="neo4j_default",
cypher=cypher,
success=lambda x: x == "John",
)
assert sensor.poke(mock.MagicMock()) is True
@mock.patch("airflow.providers.neo4j.sensors.neo4j.Neo4jHook")
def test_neo4j_sensor_poke_selector_custom(self, mock_neo4j_hook):
mock_neo4j_hook_conn = mock_neo4j_hook.return_value
mock_neo4j_hook_conn.run.return_value = [{"first_name": "John", "last_name": "Doe"}]
cypher = "MATCH (n:Person{id:'John Doe'}) RETURN n.first_name AS first_name, n.last_name AS last_name"
sensor = Neo4jSensor(
task_id="neo4j_sensor_check",
neo4j_conn_id="neo4j_default",
cypher=cypher,
success=lambda x: x == "Doe",
selector=lambda x: x[1],
)
assert sensor.poke(mock.MagicMock()) is True
@mock.patch("airflow.providers.neo4j.sensors.neo4j.Neo4jHook")
def test_neo4j_sensor_poke_selector_non_callable(self, mock_neo4j_hook):
mock_neo4j_hook_conn = mock_neo4j_hook.return_value
mock_neo4j_hook_conn.run.return_value = [{"first_name": "John", "last_name": "Doe"}]
cypher = "MATCH (n:Person{id:'John Doe'}) RETURN n.first_name AS first_name, n.last_name AS last_name"
sensor = Neo4jSensor(
task_id="neo4j_sensor_check",
neo4j_conn_id="neo4j_default",
cypher=cypher,
success=lambda x: x == "John",
selector="first_name",
)
with pytest.raises(AirflowException) as ctx:
sensor.poke(mock.MagicMock())
assert str(ctx.value) == "Parameter 'selector' is not callable: 'first_name'"
@mock.patch("airflow.providers.neo4j.sensors.neo4j.Neo4jHook")
def test_neo4j_sensor_poke_templated_parameters(self, mock_neo4j_hook):
mock_neo4j_hook_conn = mock_neo4j_hook.return_value
mock_neo4j_hook_conn.run.return_value = [{"c": 100}]
cypher = "MATCH (n:$node_label) RETURN COUNT(n) as total_person"
sensor = Neo4jSensor(
task_id="neo4j_sensor_check",
neo4j_conn_id="neo4j_default",
cypher=cypher,
parameters={"node_label": "{{ target_node_label }}"},
success=lambda x: x == 100,
)
sensor.render_template_fields(context={"target_node_label": "Person"})
assert sensor.parameters == {"node_label": "Person"}
assert sensor.poke(context=mock.MagicMock()) is True
mock_neo4j_hook_conn.run.assert_called_once_with(
cypher,
{"node_label": "Person"},
)
| {
"repo_id": "apache/airflow",
"file_path": "providers/neo4j/tests/unit/neo4j/sensors/test_neo4j.py",
"license": "Apache License 2.0",
"lines": 238,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:task-sdk/src/airflow/sdk/yaml.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Use libyaml for YAML dump/load operations where possible.
If libyaml is available we will use it -- it is significantly faster.
This module delegates all other properties to the yaml module, so it can be used as:
.. code-block:: python
import airflow.sdk.yaml as yaml
And then be used directly in place of the normal python module.
"""
from __future__ import annotations
from typing import TYPE_CHECKING, Any, BinaryIO, TextIO, cast
if TYPE_CHECKING:
from yaml.error import MarkedYAMLError, YAMLError # noqa: F401
def safe_load(stream: bytes | str | BinaryIO | TextIO) -> Any:
"""Like yaml.safe_load, but use the C libyaml for speed where we can."""
# delay import until use.
from yaml import load as orig
try:
from yaml import CSafeLoader as SafeLoader
except ImportError:
from yaml import SafeLoader # type: ignore[assignment]
return orig(stream, SafeLoader)
def dump(data: Any, **kwargs) -> str:
"""Like yaml.safe_dump, but use the C libyaml for speed where we can."""
# delay import until use.
from yaml import dump as orig
try:
from yaml import CSafeDumper as SafeDumper
except ImportError:
from yaml import SafeDumper # type: ignore[assignment]
return cast("str", orig(data, Dumper=SafeDumper, **kwargs))
def __getattr__(name):
# Delegate anything else to the yaml module
import yaml
if name == "FullLoader":
# Try to use CFullLoader by default
getattr(yaml, "CFullLoader", yaml.FullLoader)
return getattr(yaml, name)
| {
"repo_id": "apache/airflow",
"file_path": "task-sdk/src/airflow/sdk/yaml.py",
"license": "Apache License 2.0",
"lines": 53,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/discord/src/airflow/providers/discord/version_compat.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# NOTE! THIS FILE IS COPIED MANUALLY IN OTHER PROVIDERS DELIBERATELY TO AVOID ADDING UNNECESSARY
# DEPENDENCIES BETWEEN PROVIDERS. IF YOU WANT TO ADD CONDITIONAL CODE IN YOUR PROVIDER THAT DEPENDS
# ON AIRFLOW VERSION, PLEASE COPY THIS FILE TO THE ROOT PACKAGE OF YOUR PROVIDER AND IMPORT
# THOSE CONSTANTS FROM IT RATHER THAN IMPORTING THEM FROM ANOTHER PROVIDER OR TEST CODE
#
from __future__ import annotations
def get_base_airflow_version_tuple() -> tuple[int, int, int]:
from packaging.version import Version
from airflow import __version__
airflow_version = Version(__version__)
return airflow_version.major, airflow_version.minor, airflow_version.micro
AIRFLOW_V_3_0_PLUS = get_base_airflow_version_tuple() >= (3, 0, 0)
AIRFLOW_V_3_1_PLUS: bool = get_base_airflow_version_tuple() >= (3, 1, 0)
__all__ = [
"AIRFLOW_V_3_0_PLUS",
"AIRFLOW_V_3_1_PLUS",
]
| {
"repo_id": "apache/airflow",
"file_path": "providers/discord/src/airflow/providers/discord/version_compat.py",
"license": "Apache License 2.0",
"lines": 34,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:shared/configuration/src/airflow_shared/configuration/exceptions.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Exceptions for configuration parsing."""
from __future__ import annotations
class AirflowConfigException(Exception):
"""Raise when there is a configuration problem."""
| {
"repo_id": "apache/airflow",
"file_path": "shared/configuration/src/airflow_shared/configuration/exceptions.py",
"license": "Apache License 2.0",
"lines": 21,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:shared/configuration/tests/configuration/test_parser.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Tests for shared AirflowConfigParser."""
from __future__ import annotations
import datetime
import json
import os
import re
import textwrap
from configparser import ConfigParser
from enum import Enum
from unittest.mock import patch
import pytest
from airflow_shared.configuration.exceptions import AirflowConfigException
from airflow_shared.configuration.parser import (
AirflowConfigParser as _SharedAirflowConfigParser,
configure_parser_from_configuration_description,
)
class AirflowConfigParser(_SharedAirflowConfigParser):
"""Test parser that extends shared parser for testing."""
def __init__(self, default_config: str | None = None, *args, **kwargs):
configuration_description = {
"test": {
"options": {
"key1": {"default": "default_value"},
"key2": {"default": 123},
}
}
}
_default_values = ConfigParser()
_default_values.add_section("test")
_default_values.set("test", "key1", "default_value")
_default_values.set("test", "key2", "123")
super().__init__(configuration_description, _default_values, *args, **kwargs)
self.configuration_description = configuration_description
self._default_values = _default_values
self._suppress_future_warnings = False
if default_config is not None:
self._update_defaults_from_string(default_config)
def _update_defaults_from_string(self, config_string: str):
"""Update defaults from string for testing."""
parser = ConfigParser()
parser.read_string(config_string)
for section in parser.sections():
if section not in self._default_values.sections():
self._default_values.add_section(section)
for key, value in parser.items(section):
self._default_values.set(section, key, value)
class TestAirflowConfigParser:
"""Test the shared AirflowConfigParser parser methods."""
def test_getboolean(self):
"""Test AirflowConfigParser.getboolean"""
test_config = """
[type_validation]
key1 = non_bool_value
[true]
key2 = t
key3 = true
key4 = 1
[false]
key5 = f
key6 = false
key7 = 0
[inline-comment]
key8 = true #123
"""
test_conf = AirflowConfigParser(default_config=test_config)
with pytest.raises(
AirflowConfigException,
match=re.escape(
'Failed to convert value to bool. Please check "key1" key in "type_validation" section. '
'Current value: "non_bool_value".'
),
):
test_conf.getboolean("type_validation", "key1")
assert isinstance(test_conf.getboolean("true", "key3"), bool)
assert test_conf.getboolean("true", "key2") is True
assert test_conf.getboolean("true", "key3") is True
assert test_conf.getboolean("true", "key4") is True
assert test_conf.getboolean("false", "key5") is False
assert test_conf.getboolean("false", "key6") is False
assert test_conf.getboolean("false", "key7") is False
assert test_conf.getboolean("inline-comment", "key8") is True
def test_getint(self):
"""Test AirflowConfigParser.getint"""
test_config = """
[invalid]
key1 = str
[valid]
key2 = 1
[float]
key3 = 4.096e+07
[decimal]
key4 = 12.34
"""
test_conf = AirflowConfigParser(default_config=test_config)
with pytest.raises(
AirflowConfigException,
match=re.escape(
'Failed to convert value to int. Please check "key1" key in "invalid" section. '
'Current value: "str".'
),
):
test_conf.getint("invalid", "key1")
assert isinstance(test_conf.getint("valid", "key2"), int)
assert test_conf.getint("valid", "key2") == 1
assert isinstance(test_conf.getint("float", "key3"), int)
assert test_conf.getint("float", "key3") == 40960000
with pytest.raises(
AirflowConfigException,
match=re.escape(
'Failed to convert value to int. Please check "key4" key in "decimal" section. '
'Current value: "12.34".'
),
):
test_conf.getint("decimal", "key4")
def test_getfloat(self):
"""Test AirflowConfigParser.getfloat"""
test_config = """
[invalid]
key1 = str
[valid]
key2 = 1.23
"""
test_conf = AirflowConfigParser(default_config=test_config)
with pytest.raises(
AirflowConfigException,
match=re.escape(
'Failed to convert value to float. Please check "key1" key in "invalid" section. '
'Current value: "str".'
),
):
test_conf.getfloat("invalid", "key1")
assert isinstance(test_conf.getfloat("valid", "key2"), float)
assert test_conf.getfloat("valid", "key2") == 1.23
def test_getlist(self):
"""Test AirflowConfigParser.getlist"""
test_config = """
[single]
key1 = str
empty =
[many]
key2 = one,two,three
[diffdelimiter]
key3 = one;two;three
"""
test_conf = AirflowConfigParser(default_config=test_config)
single = test_conf.getlist("single", "key1")
assert single == ["str"]
empty = test_conf.getlist("single", "empty")
assert empty == []
many = test_conf.getlist("many", "key2")
assert many == ["one", "two", "three"]
semicolon = test_conf.getlist("diffdelimiter", "key3", delimiter=";")
assert semicolon == ["one", "two", "three"]
assert test_conf.getlist("empty", "key0", fallback=None) is None
assert test_conf.getlist("empty", "key0", fallback=[]) == []
@pytest.mark.parametrize(
("config_str", "expected"),
[
pytest.param('{"a": 123}', {"a": 123}, id="dict"),
pytest.param("[1,2,3]", [1, 2, 3], id="list"),
pytest.param('"abc"', "abc", id="str"),
pytest.param("2.1", 2.1, id="num"),
pytest.param("", None, id="empty"),
],
)
def test_getjson(self, config_str, expected):
"""Test AirflowConfigParser.getjson"""
config = textwrap.dedent(
f"""
[test]
json = {config_str}
"""
)
test_conf = AirflowConfigParser()
test_conf.read_string(config)
assert test_conf.getjson("test", "json") == expected
def test_getenum(self):
"""Test AirflowConfigParser.getenum"""
class TestEnum(Enum):
option1 = 1
option2 = 2
option3 = 3
fallback = 4
config = """
[test1]
option = option1
[test2]
option = option2
[test3]cmd
option = option3
[test4]
option = option4
"""
test_conf = AirflowConfigParser()
test_conf.read_string(config)
assert test_conf.getenum("test1", "option", TestEnum) == TestEnum.option1
assert test_conf.getenum("test2", "option", TestEnum) == TestEnum.option2
assert test_conf.getenum("test3", "option", TestEnum) == TestEnum.option3
assert test_conf.getenum("test4", "option", TestEnum, fallback="fallback") == TestEnum.fallback
with pytest.raises(AirflowConfigException, match=re.escape("option1, option2, option3, fallback")):
test_conf.getenum("test4", "option", TestEnum)
def test_getenumlist(self):
"""Test AirflowConfigParser.getenumlist"""
class TestEnum(Enum):
option1 = 1
option2 = 2
option3 = 3
fallback = 4
config = """
[test1]
option = option1,option2,option3
[test2]
option = option1,option3
[test3]
option = option1,option4
[test4]
option =
"""
test_conf = AirflowConfigParser()
test_conf.read_string(config)
assert test_conf.getenumlist("test1", "option", TestEnum) == [
TestEnum.option1,
TestEnum.option2,
TestEnum.option3,
]
assert test_conf.getenumlist("test2", "option", TestEnum) == [TestEnum.option1, TestEnum.option3]
assert test_conf.getenumlist("test3", "option", TestEnum) == [TestEnum.option1]
assert test_conf.getenumlist("test4", "option", TestEnum) == []
def test_getjson_empty_with_fallback(self):
"""Test AirflowConfigParser.getjson with empty value and fallback"""
config = textwrap.dedent(
"""
[test]
json =
"""
)
test_conf = AirflowConfigParser()
test_conf.read_string(config)
assert test_conf.getjson("test", "json", fallback={}) == {}
assert test_conf.getjson("test", "json") is None
@pytest.mark.parametrize(
("fallback"),
[
pytest.param({"a": "b"}, id="dict"),
# fallback is _NOT_ json parsed, but used verbatim
pytest.param('{"a": "b"}', id="str"),
pytest.param(None, id="None"),
],
)
def test_getjson_fallback(self, fallback):
"""Test AirflowConfigParser.getjson with fallback"""
test_conf = AirflowConfigParser()
assert test_conf.getjson("test", "json", fallback=fallback) == fallback
def test_has_option(self):
"""Test AirflowConfigParser.has_option"""
test_config = """[test]
key1 = value1
"""
test_conf = AirflowConfigParser()
test_conf.read_string(test_config)
assert test_conf.has_option("test", "key1")
assert not test_conf.has_option("test", "key_not_exists")
assert not test_conf.has_option("section_not_exists", "key1")
def test_remove_option(self):
"""Test AirflowConfigParser.remove_option"""
test_config = """[test]
key1 = hello
key2 = airflow
"""
test_config_default = """[test]
key1 = awesome
key2 = airflow
"""
test_conf = AirflowConfigParser(default_config=test_config_default)
test_conf.read_string(test_config)
assert test_conf.get("test", "key1") == "hello"
test_conf.remove_option("test", "key1", remove_default=False)
assert test_conf.get("test", "key1") == "awesome"
test_conf.remove_option("test", "key2")
assert not test_conf.has_option("test", "key2")
def test_get_with_defaults(self):
"""Test AirflowConfigParser.get() with defaults"""
test_config_default = """[test]
key1 = default_value
"""
test_conf = AirflowConfigParser(default_config=test_config_default)
assert test_conf.get("test", "key1") == "default_value"
def test_get_mandatory_value(self):
"""Test AirflowConfigParser.get_mandatory_value()"""
test_conf = AirflowConfigParser()
test_conf.add_section("test")
test_conf.set("test", "key1", "value1")
assert test_conf.get_mandatory_value("test", "key1") == "value1"
with pytest.raises(
AirflowConfigException, match=re.escape("section/key [test/missing_key] not found in config")
):
test_conf.get_mandatory_value("test", "missing_key")
def test_sensitive_config_values(self):
"""Test AirflowConfigParser.sensitive_config_values property"""
test_conf = AirflowConfigParser()
test_conf.configuration_description = {
"test": {
"options": {
"password": {"sensitive": True, "default": "secret"},
"api_key": {"sensitive": True, "default": "key123"},
"username": {"sensitive": False, "default": "user"},
"normal_value": {"default": "value"},
}
},
"database": {
"options": {
"connection": {"sensitive": True, "default": "sqlite://"},
}
},
}
if "inversed_deprecated_options" in test_conf.__dict__:
delattr(test_conf, "inversed_deprecated_options")
if "sensitive_config_values" in test_conf.__dict__:
delattr(test_conf, "sensitive_config_values")
sensitive = test_conf.sensitive_config_values
assert isinstance(sensitive, set)
assert ("test", "password") in sensitive
assert ("test", "api_key") in sensitive
assert ("database", "connection") in sensitive
assert ("test", "username") not in sensitive
assert ("test", "normal_value") not in sensitive
def test_deprecated_options(self):
"""Test AirflowConfigParser handles deprecated options"""
class TestParserWithDeprecated(AirflowConfigParser):
deprecated_options = {
("new_section", "new_key"): ("old_section", "old_key", "2.0.0"),
}
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.configuration_description = {}
self._default_values = ConfigParser()
self._suppress_future_warnings = False
test_conf = TestParserWithDeprecated()
test_conf.add_section("old_section")
test_conf.set("old_section", "old_key", "old_value")
with pytest.warns(DeprecationWarning, match="old_key"):
assert test_conf.get("new_section", "new_key", suppress_warnings=True) == "old_value"
with pytest.raises(AirflowConfigException):
test_conf.get("new_section", "new_key", lookup_from_deprecated=False, suppress_warnings=True)
def test_deprecated_options_same_section(self):
"""Test deprecated options in the same section"""
class TestParserWithDeprecated(AirflowConfigParser):
deprecated_options = {
("test", "new_key"): ("test", "old_key", "2.0.0"),
}
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.configuration_description = {}
self._default_values = ConfigParser()
self._suppress_future_warnings = False
test_conf = TestParserWithDeprecated()
test_conf.add_section("test")
test_conf.set("test", "old_key", "old_value")
with pytest.warns(DeprecationWarning, match="old_key"):
assert test_conf.get("test", "new_key", suppress_warnings=True) == "old_value"
def test_deprecated_options_lookup_disabled(self):
"""Test deprecated options with lookup_from_deprecated=False"""
class TestParserWithDeprecated(AirflowConfigParser):
deprecated_options = {
("new_section", "new_key"): ("old_section", "old_key", "2.0.0"),
}
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.configuration_description = {}
self._default_values = ConfigParser()
self._suppress_future_warnings = False
test_conf = TestParserWithDeprecated()
test_conf.add_section("old_section")
test_conf.set("old_section", "old_key", "old_value")
with pytest.raises(AirflowConfigException):
test_conf.get("new_section", "new_key", lookup_from_deprecated=False)
def test_deprecated_options_precedence(self):
"""Test that new option takes precedence over deprecated option"""
class TestParserWithDeprecated(AirflowConfigParser):
deprecated_options = {
("test", "new_key"): ("test", "old_key", "2.0.0"),
}
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.configuration_description = {}
self._default_values = ConfigParser()
self._suppress_future_warnings = False
test_conf = TestParserWithDeprecated()
test_conf.add_section("test")
test_conf.set("test", "old_key", "old_value")
test_conf.set("test", "new_key", "new_value")
value = test_conf.get("test", "new_key")
assert value == "new_value"
@pytest.mark.parametrize(
("deprecated_options_dict", "kwargs", "new_section_expected_value", "old_section_expected_value"),
[
pytest.param(
{("old_section", "old_key"): ("new_section", "new_key", "2.0.0")},
{"fallback": None},
None,
"value",
id="deprecated_in_different_section_lookup_enabled",
),
pytest.param(
{("old_section", "old_key"): ("new_section", "new_key", "2.0.0")},
{"fallback": None, "lookup_from_deprecated": False},
None,
None,
id="deprecated_in_different_section_lookup_disabled",
),
pytest.param(
{("new_section", "old_key"): ("new_section", "new_key", "2.0.0")},
{"fallback": None},
"value",
None,
id="deprecated_in_same_section_lookup_enabled",
),
pytest.param(
{("new_section", "old_key"): ("new_section", "new_key", "2.0.0")},
{"fallback": None, "lookup_from_deprecated": False},
None,
None,
id="deprecated_in_same_section_lookup_disabled",
),
],
)
def test_deprecated_options_with_lookup_from_deprecated(
self, deprecated_options_dict, kwargs, new_section_expected_value, old_section_expected_value
):
"""Test deprecated options with lookup_from_deprecated parameter"""
class TestParserWithDeprecated(AirflowConfigParser):
deprecated_options = deprecated_options_dict.copy()
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.configuration_description = {}
self._default_values = ConfigParser()
self._suppress_future_warnings = False
test_conf = TestParserWithDeprecated()
test_conf.add_section("new_section")
test_conf.set("new_section", "new_key", "value")
if ("new_section", "old_key") in deprecated_options_dict:
pass
if ("old_section", "old_key") in deprecated_options_dict:
test_conf.add_section("old_section")
test_conf.set("old_section", "old_key", "value")
result = test_conf.get("new_section", "old_key", **kwargs)
assert result == new_section_expected_value
if old_section_expected_value is not None:
result = test_conf.get("old_section", "old_key", **kwargs)
assert result == old_section_expected_value
def test_deprecated_options_cmd(self):
"""Test deprecated options with _cmd suffix"""
class TestParserWithDeprecated(AirflowConfigParser):
deprecated_options = {
("test", "new_key"): ("test", "old_key", "2.0.0"),
}
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.configuration_description = {}
self._default_values = ConfigParser()
self._suppress_future_warnings = False
self.sensitive_config_values = {("test", "old_key")}
test_conf = TestParserWithDeprecated()
test_conf.add_section("test")
test_conf.set("test", "old_key_cmd", 'echo -n "cmd_value"')
with pytest.warns(DeprecationWarning, match="old_key"):
assert test_conf.get("test", "new_key", suppress_warnings=False) == "cmd_value"
def test_cmd_from_env_var(self):
test_config = textwrap.dedent(
"""\
[testcmdenv]
itsacommand=NOT OK
notacommand=OK
"""
)
test_conf = AirflowConfigParser(default_config=test_config)
test_conf.sensitive_config_values.add(("testcmdenv", "itsacommand"))
with patch.dict(os.environ, {"AIRFLOW__TESTCMDENV__ITSACOMMAND_CMD": 'echo -n "OK"'}):
assert test_conf.get("testcmdenv", "itsacommand") == "OK"
assert test_conf.get("testcmdenv", "notacommand") == "OK"
def test_cmd_from_config_file(self):
test_config = textwrap.dedent(
"""\
[test]
sensitive_key_cmd=echo -n cmd_value
non_sensitive_key=config_value
non_sensitive_key_cmd=echo -n cmd_value
"""
)
test_conf = AirflowConfigParser()
test_conf.read_string(test_config)
test_conf.sensitive_config_values.add(("test", "sensitive_key"))
assert test_conf.get("test", "sensitive_key") == "cmd_value"
assert test_conf.get("test", "non_sensitive_key") == "config_value"
def test_secret_from_config_file(self):
class TestParserWithSecretBackend(AirflowConfigParser):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.configuration_description = {}
self._default_values = ConfigParser()
self._suppress_future_warnings = False
def _get_config_value_from_secret_backend(self, config_key: str) -> str | None:
"""Mock secrets backend - return a test value"""
return "secret_value_from_backend"
test_config = textwrap.dedent(
"""\
[test]
sensitive_key_secret=test/secret/path
non_sensitive_key=config_value
non_sensitive_key_secret=test/secret/path
"""
)
test_conf = TestParserWithSecretBackend()
test_conf.read_string(test_config)
test_conf.sensitive_config_values.add(("test", "sensitive_key"))
assert test_conf.get("test", "sensitive_key") == "secret_value_from_backend"
assert test_conf.get("test", "non_sensitive_key") == "config_value"
def test_secret_from_env_var(self):
class TestParserWithSecretBackend(AirflowConfigParser):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.configuration_description = {}
self._default_values = ConfigParser()
self._suppress_future_warnings = False
def _get_config_value_from_secret_backend(self, config_key: str) -> str | None:
"""Mock secrets backend - return a test value"""
return "secret_value_from_backend"
test_config = textwrap.dedent(
"""\
[test]
sensitive_key=config_value
non_sensitive_key=config_value
"""
)
test_conf = TestParserWithSecretBackend()
test_conf.read_string(test_config)
test_conf.sensitive_config_values.add(("test", "sensitive_key"))
with patch.dict(os.environ, {"AIRFLOW__TEST__SENSITIVE_KEY_SECRET": "test/secret/path"}):
assert test_conf.get("test", "sensitive_key") == "secret_value_from_backend"
with patch.dict(os.environ, {"AIRFLOW__TEST__NON_SENSITIVE_KEY_SECRET": "test/secret/path"}):
assert test_conf.get("test", "non_sensitive_key") == "config_value"
def test_deprecated_sections(self):
"""Test deprecated sections"""
class TestParserWithDeprecated(AirflowConfigParser):
deprecated_sections = {
"new_section": ("old_section", "2.1"),
}
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.configuration_description = {}
self._default_values = ConfigParser()
self._default_values.add_section("new_section")
self._default_values.set("new_section", "val", "new")
self._suppress_future_warnings = False
test_conf = TestParserWithDeprecated()
test_conf.add_section("old_section")
test_conf.set("old_section", "val", "old_val")
with pytest.warns(DeprecationWarning, match="old_section"):
assert test_conf.get("new_section", "val", suppress_warnings=True) == "old_val"
with pytest.warns(FutureWarning, match="old_section"):
assert test_conf.get("old_section", "val", suppress_warnings=True) == "old_val"
def test_gettimedelta(self):
test_config = """
[invalid]
# non-integer value
key1 = str
# fractional value
key2 = 300.99
# too large value for C int
key3 = 999999999999999
[valid]
# negative value
key4 = -1
# zero
key5 = 0
# positive value
key6 = 300
[default]
# Equals to None
key7 =
"""
test_conf = AirflowConfigParser(default_config=test_config)
with pytest.raises(
AirflowConfigException,
match=re.escape(
'Failed to convert value to int. Please check "key1" key in "invalid" section. '
'Current value: "str".'
),
):
test_conf.gettimedelta("invalid", "key1")
with pytest.raises(
AirflowConfigException,
match=re.escape(
'Failed to convert value to int. Please check "key2" key in "invalid" section. '
'Current value: "300.99".'
),
):
test_conf.gettimedelta("invalid", "key2")
with pytest.raises(
AirflowConfigException,
match=re.escape(
"Failed to convert value to timedelta in `seconds`. "
"Python int too large to convert to C int. "
'Please check "key3" key in "invalid" section. Current value: "999999999999999".'
),
):
test_conf.gettimedelta("invalid", "key3")
assert isinstance(test_conf.gettimedelta("valid", "key4"), datetime.timedelta)
assert test_conf.gettimedelta("valid", "key4") == datetime.timedelta(seconds=-1)
assert isinstance(test_conf.gettimedelta("valid", "key5"), datetime.timedelta)
assert test_conf.gettimedelta("valid", "key5") == datetime.timedelta(seconds=0)
assert isinstance(test_conf.gettimedelta("valid", "key6"), datetime.timedelta)
assert test_conf.gettimedelta("valid", "key6") == datetime.timedelta(seconds=300)
assert isinstance(test_conf.gettimedelta("default", "key7"), type(None))
assert test_conf.gettimedelta("default", "key7") is None
def test_getimport(self):
test_config = """
[test]
valid_module = json.JSONDecoder
empty_module =
invalid_module = non.existent.module.path
"""
test_conf = AirflowConfigParser(default_config=test_config)
result = test_conf.getimport("test", "valid_module")
assert result is not None
assert result == json.JSONDecoder
assert test_conf.getimport("test", "empty_module") is None
with pytest.raises(
AirflowConfigException,
match=re.escape(
'The object could not be loaded. Please check "invalid_module" key in "test" section. '
'Current value: "non.existent.module.path".'
),
):
test_conf.getimport("test", "invalid_module")
with pytest.raises(AirflowConfigException):
test_conf.getimport("test", "missing_module")
def test_get_mandatory_list_value(self):
test_config = """
[test]
existing_list = one,two,three
"""
test_conf = AirflowConfigParser(default_config=test_config)
result = test_conf.get_mandatory_list_value("test", "existing_list")
assert result == ["one", "two", "three"]
with pytest.raises(AirflowConfigException):
test_conf.get_mandatory_list_value("test", "missing_key")
with pytest.raises(ValueError, match=r"The value test/missing_key should be set!"):
test_conf.get_mandatory_list_value("test", "missing_key", fallback=None)
def test_set_case_insensitive(self):
# both get and set should be case insensitive
test_conf = AirflowConfigParser()
test_conf.add_section("test")
test_conf.set("test", "key1", "value1")
test_conf.set("TEST", "KEY1", "value2")
assert test_conf.get("test", "key1") == "value2"
assert test_conf.get("TEST", "KEY1") == "value2"
test_conf.set("Test", "NewKey", "new_value")
assert test_conf.get("test", "newkey") == "new_value"
assert test_conf.get("TEST", "NEWKEY") == "new_value"
def test_configure_parser_from_configuration_description_with_deprecated_options(self):
"""
Test that configure_parser_from_configuration_description respects deprecated options.
"""
configuration_description = {
"test_section": {
"options": {
"non_deprecated_key": {"default": "non_deprecated_value"},
"deprecated_key_version": {
"default": "deprecated_value_version",
"version_deprecated": "3.0.0",
},
"deprecated_key_reason": {
"default": "deprecated_value_reason",
"deprecation_reason": "Some reason",
},
"none_default_deprecated": {"default": None, "deprecation_reason": "No default"},
}
}
}
parser = ConfigParser()
all_vars = {}
configure_parser_from_configuration_description(parser, configuration_description, all_vars)
# Assert that the non-deprecated key is present
assert parser.has_option("test_section", "non_deprecated_key")
assert parser.get("test_section", "non_deprecated_key") == "non_deprecated_value"
# Assert that the deprecated keys are NOT present
assert not parser.has_option("test_section", "deprecated_key_version")
assert not parser.has_option("test_section", "deprecated_key_reason")
# Assert that options with default None are still not set
assert not parser.has_option("test_section", "none_default_deprecated")
def test_get_default_value_deprecated(self):
"""Test 'conf.get' for deprecated options and should not return default value."""
class TestConfigParser(AirflowConfigParser):
def __init__(self):
configuration_description = {
"test_section": {
"options": {
"deprecated_key": {
"default": "some_value",
"deprecation_reason": "deprecated",
},
"deprecated_key2": {
"default": "some_value",
"version_deprecated": "2.0.0",
},
"deprecated_key3": {
"default": None,
"version_deprecated": "2.0.0",
},
"active_key": {
"default": "active_value",
},
}
}
}
_default_values = ConfigParser()
# verify configure_parser_from_configuration_description logic of skipping
configure_parser_from_configuration_description(
_default_values, configuration_description, {}
)
_SharedAirflowConfigParser.__init__(self, configuration_description, _default_values)
test_conf = TestConfigParser()
deprecated_conf_list = [
("test_section", "deprecated_key"),
("test_section", "deprecated_key2"),
("test_section", "deprecated_key3"),
]
# case 1: using `get` with fallback
# should return fallback if not found
expected_sentinel = object()
for section, key in deprecated_conf_list:
assert test_conf.get(section, key, fallback=expected_sentinel) is expected_sentinel
# case 2: using `get` without fallback
# should raise AirflowConfigException
for section, key in deprecated_conf_list:
with pytest.raises(
AirflowConfigException,
match=re.escape(f"section/key [{section}/{key}] not found in config"),
):
test_conf.get(section, key)
# case 3: active (non-deprecated) key
# Active key should be present
assert test_conf.get("test_section", "active_key") == "active_value"
def test_team_env_var_takes_priority(self):
"""Test that team-specific env var is returned when team_name is provided."""
test_config = textwrap.dedent(
"""\
[celery]
broker_url = redis://global:6379/0
"""
)
test_conf = AirflowConfigParser(default_config=test_config)
with patch.dict(
os.environ,
{"AIRFLOW__TEAM_A___CELERY__BROKER_URL": "redis://team-a:6379/0"},
):
assert test_conf.get("celery", "broker_url", team_name="team_a") == "redis://team-a:6379/0"
def test_team_config_file_section(self):
"""Test that [team_name=section] in config file is used when team_name is provided."""
test_conf = AirflowConfigParser()
test_conf.read_string(
textwrap.dedent(
"""\
[celery]
broker_url = redis://global:6379/0
[team_a=celery]
broker_url = redis://team-a:6379/0
"""
)
)
assert test_conf.get("celery", "broker_url", team_name="team_a") == "redis://team-a:6379/0"
def test_team_does_not_fallback_to_global_config(self):
"""Test that team lookup does NOT fall back to global config section or env var."""
test_conf = AirflowConfigParser()
test_conf.read_string(
textwrap.dedent(
"""\
[celery]
broker_url = redis://global:6379/0
"""
)
)
# team_a has no config set, should NOT get global value; should fall through to defaults
with pytest.raises(AirflowConfigException):
test_conf.get("celery", "broker_url", team_name="team_a")
def test_team_does_not_fallback_to_global_env_var(self):
"""Test that team lookup does NOT fall back to global env var."""
test_conf = AirflowConfigParser()
with patch.dict(os.environ, {"AIRFLOW__CELERY__BROKER_URL": "redis://global-env:6379/0"}):
with pytest.raises(AirflowConfigException):
test_conf.get("celery", "broker_url", team_name="team_a")
def test_team_skips_cmd_lookup(self):
"""Test that _cmd config values are skipped when team_name is provided."""
test_conf = AirflowConfigParser()
test_conf.read_string(
textwrap.dedent(
"""\
[test]
sensitive_key_cmd = echo -n cmd_value
"""
)
)
test_conf.sensitive_config_values.add(("test", "sensitive_key"))
# Without team_name, cmd works
assert test_conf.get("test", "sensitive_key") == "cmd_value"
# With team_name, cmd is skipped
with pytest.raises(AirflowConfigException):
test_conf.get("test", "sensitive_key", team_name="team_a")
def test_team_skips_secret_lookup(self):
"""Test that _secret config values are skipped when team_name is provided."""
class TestParserWithSecretBackend(AirflowConfigParser):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.configuration_description = {}
self._default_values = ConfigParser()
self._suppress_future_warnings = False
def _get_config_value_from_secret_backend(self, config_key: str) -> str | None:
return "secret_value_from_backend"
test_conf = TestParserWithSecretBackend()
test_conf.read_string(
textwrap.dedent(
"""\
[test]
sensitive_key_secret = test/secret/path
"""
)
)
test_conf.sensitive_config_values.add(("test", "sensitive_key"))
# Without team_name, secret backend works
assert test_conf.get("test", "sensitive_key") == "secret_value_from_backend"
# With team_name, secret backend is skipped
with pytest.raises(AirflowConfigException):
test_conf.get("test", "sensitive_key", team_name="team_a")
def test_team_falls_through_to_defaults(self):
"""Test that team lookup falls through to defaults when no team-specific value is set."""
test_conf = AirflowConfigParser()
# "test" section with "key1" having default "default_value" is set in the AirflowConfigParser fixture
assert test_conf.get("test", "key1", team_name="team_a") == "default_value"
def test_team_env_var_format(self):
"""Test the triple-underscore env var format: AIRFLOW__{TEAM}___{SECTION}__{KEY}."""
test_conf = AirflowConfigParser()
with patch.dict(
os.environ,
{"AIRFLOW__MY_TEAM___MY_SECTION__MY_KEY": "team_value"},
):
assert test_conf.get("my_section", "my_key", team_name="my_team") == "team_value"
| {
"repo_id": "apache/airflow",
"file_path": "shared/configuration/tests/configuration/test_parser.py",
"license": "Apache License 2.0",
"lines": 864,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:task-sdk/src/airflow/sdk/configuration.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""SDK configuration parser that extends the shared parser."""
from __future__ import annotations
import logging
import os
import pathlib
from configparser import ConfigParser
from io import StringIO
from typing import Any
from airflow.sdk import yaml
from airflow.sdk._shared.configuration.parser import (
AirflowConfigParser as _SharedAirflowConfigParser,
configure_parser_from_configuration_description,
)
from airflow.sdk.execution_time.secrets import _SERVER_DEFAULT_SECRETS_SEARCH_PATH
log = logging.getLogger(__name__)
def _default_config_file_path(file_name: str) -> str:
"""Get path to airflow core config.yml file."""
# TODO: Task SDK will have its own config.yml
# Temporary: SDK uses Core's config files during development
# Option 1: For installed packages
config_path = pathlib.Path(__file__).parent.parent / "config_templates" / file_name
if config_path.exists():
return str(config_path)
# Option 2: Monorepo structure
config_path = (
pathlib.Path(__file__).parent.parent.parent.parent.parent
/ "airflow-core"
/ "src"
/ "airflow"
/ "config_templates"
/ file_name
)
if config_path.exists():
return str(config_path)
raise FileNotFoundError(f"Could not find '{file_name}' in config_templates. ")
def retrieve_configuration_description() -> dict[str, dict[str, Any]]:
"""
Read Airflow configuration description from Core's YAML file.
SDK reads airflow core config.yml. Eventually SDK will have its own config.yml
with only authoring related configs.
:return: Python dictionary containing configs & their info
"""
base_configuration_description: dict[str, dict[str, Any]] = {}
with open(_default_config_file_path("config.yml")) as config_file:
base_configuration_description.update(yaml.safe_load(config_file))
return base_configuration_description
def create_default_config_parser(configuration_description: dict[str, dict[str, Any]]) -> ConfigParser:
"""
Create default config parser based on configuration description.
This version expands {AIRFLOW_HOME} in default values but not other
Core-specific expansion variables (SECRET_KEY, FERNET_KEY, etc.).
:param configuration_description: configuration description from config.yml
:return: Default Config Parser with default values
"""
parser = ConfigParser()
all_vars = get_sdk_expansion_variables()
configure_parser_from_configuration_description(parser, configuration_description, all_vars)
return parser
def get_sdk_expansion_variables() -> dict[str, Any]:
"""
Get variables available for config value expansion in SDK.
SDK only needs AIRFLOW_HOME for expansion. Core specific variables
(FERNET_KEY, JWT_SECRET_KEY, etc.) are not needed in the SDK.
"""
airflow_home = os.environ.get("AIRFLOW_HOME", os.path.expanduser("~/airflow"))
return {
"AIRFLOW_HOME": airflow_home,
}
def get_airflow_config() -> str:
"""Get path to airflow.cfg file."""
airflow_home = os.environ.get("AIRFLOW_HOME", os.path.expanduser("~/airflow"))
return os.path.join(airflow_home, "airflow.cfg")
class AirflowSDKConfigParser(_SharedAirflowConfigParser):
"""
SDK configuration parser that extends the shared parser.
In Phase 1, this reads Core's config.yml and can optionally read airflow.cfg.
Eventually SDK will have its own config.yml with only authoring-related configs.
"""
def __init__(
self,
default_config: str | None = None,
*args,
**kwargs,
):
# Read Core's config.yml (Phase 1: shared config.yml)
configuration_description = retrieve_configuration_description()
# Create default values parser
_default_values = create_default_config_parser(configuration_description)
super().__init__(configuration_description, _default_values, *args, **kwargs)
self.configuration_description = configuration_description
self._default_values = _default_values
self._suppress_future_warnings = False
# Optionally load from airflow.cfg if it exists
airflow_config = get_airflow_config()
if os.path.exists(airflow_config):
try:
self.read(airflow_config)
except Exception as e:
log.warning("Could not read airflow.cfg from %s: %s", airflow_config, e)
if default_config is not None:
self._update_defaults_from_string(default_config)
def expand_all_configuration_values(self):
"""Expand all configuration values using SDK-specific expansion variables."""
all_vars = get_sdk_expansion_variables()
for section in self.sections():
for key, value in self.items(section):
if value is not None:
if self.has_option(section, key):
self.remove_option(section, key)
if self.is_template(section, key) or not isinstance(value, str):
self.set(section, key, value)
else:
try:
self.set(section, key, value.format(**all_vars))
except (KeyError, ValueError, IndexError):
# Leave unexpanded if variable not available
self.set(section, key, value)
def load_test_config(self):
"""
Use the test configuration instead of Airflow defaults.
Unit tests load values from `unit_tests.cfg` to ensure consistent behavior. Realistically we should
not have this needed but this is temporary to help fix the tests that use dag_maker and rely on few
configurations.
The SDK does not expand template variables (FERNET_KEY, JWT_SECRET_KEY, etc.) because it does not use
the config fields that require expansion.
"""
unit_test_config_file = pathlib.Path(_default_config_file_path("unit_tests.cfg"))
unit_test_config = unit_test_config_file.read_text()
self.remove_all_read_configurations()
with StringIO(unit_test_config) as test_config_file:
self.read_file(test_config_file)
self.expand_all_configuration_values()
log.info("Unit test configuration loaded from 'unit_tests.cfg'")
def remove_all_read_configurations(self):
"""Remove all read configurations, leaving only default values in the config."""
for section in self.sections():
self.remove_section(section)
def get_custom_secret_backend(worker_mode: bool = False):
"""
Get Secret Backend if defined in airflow.cfg.
Conditionally selects the section, key and kwargs key based on whether it is called from worker or not.
This is a convenience function that calls conf._get_custom_secret_backend().
Uses SDK's conf instead of Core's conf.
"""
# Lazy import to trigger __getattr__ and lazy initialization
from airflow.sdk.configuration import conf
return conf._get_custom_secret_backend(worker_mode=worker_mode)
def initialize_secrets_backends(
default_backends: list[str] = _SERVER_DEFAULT_SECRETS_SEARCH_PATH,
):
"""
Initialize secrets backend.
* import secrets backend classes
* instantiate them and return them in a list
Uses SDK's conf instead of Core's conf.
"""
from airflow.sdk._shared.module_loading import import_string
backend_list = []
worker_mode = False
# Determine worker mode - if default_backends is not the server default, it's worker mode
# This is a simplified check; in practice, worker mode is determined by the caller
if default_backends != _SERVER_DEFAULT_SECRETS_SEARCH_PATH:
worker_mode = True
custom_secret_backend = get_custom_secret_backend(worker_mode)
if custom_secret_backend is not None:
from airflow.sdk.definitions.connection import Connection
custom_secret_backend._set_connection_class(Connection)
backend_list.append(custom_secret_backend)
for class_name in default_backends:
from airflow.sdk.definitions.connection import Connection
secrets_backend_cls = import_string(class_name)
backend = secrets_backend_cls()
backend._set_connection_class(Connection)
backend_list.append(backend)
return backend_list
def ensure_secrets_loaded(
default_backends: list[str] = _SERVER_DEFAULT_SECRETS_SEARCH_PATH,
) -> list:
"""
Ensure that all secrets backends are loaded.
If the secrets_backend_list contains only 2 default backends, reload it.
"""
# Check if the secrets_backend_list contains only 2 default backends.
# Check if we are loading the backends for worker too by checking if the default_backends is equal
# to _SERVER_DEFAULT_SECRETS_SEARCH_PATH.
secrets_backend_list = initialize_secrets_backends()
if len(secrets_backend_list) == 2 or default_backends != _SERVER_DEFAULT_SECRETS_SEARCH_PATH:
return initialize_secrets_backends(default_backends=default_backends)
return secrets_backend_list
def initialize_config() -> AirflowSDKConfigParser:
"""
Initialize SDK configuration parser.
Called automatically when SDK is imported.
"""
airflow_config_parser = AirflowSDKConfigParser()
if airflow_config_parser.getboolean("core", "unit_test_mode", fallback=False):
airflow_config_parser.load_test_config()
return airflow_config_parser
def __getattr__(name: str):
if name == "conf":
val = initialize_config()
globals()[name] = val
return val
raise AttributeError(f"module '{__name__}' has no attribute '{name}'")
| {
"repo_id": "apache/airflow",
"file_path": "task-sdk/src/airflow/sdk/configuration.py",
"license": "Apache License 2.0",
"lines": 221,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:task-sdk/src/airflow/sdk/definitions/callback.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import inspect
from abc import ABC
from collections.abc import Callable
from typing import Any
import structlog
from airflow.sdk._shared.module_loading import import_string, is_valid_dotpath
log = structlog.getLogger(__name__)
class Callback(ABC):
"""
Base class for Deadline Alert callbacks.
Callbacks are used to execute custom logic when a deadline is missed.
The `callback_callable` can be a Python callable type or a string containing the path to the callable that
can be used to import the callable. It must be a top-level callable in a module present on the host where
it will run.
It will be called with Airflow context and specified kwargs when a deadline is missed.
"""
path: str
kwargs: dict
def __init__(self, callback_callable: Callable | str, kwargs: dict[str, Any] | None = None):
self.path = self.get_callback_path(callback_callable)
if kwargs and "context" in kwargs:
raise ValueError("context is a reserved kwarg for this class")
self.kwargs = kwargs or {}
@classmethod
def get_callback_path(cls, _callback: str | Callable) -> str:
"""Convert callback to a string path that can be used to import it later."""
if callable(_callback):
cls.verify_callable(_callback)
# TODO: This implementation doesn't support using a lambda function as a callback.
# We should consider that in the future, but the addition is non-trivial.
# Get the reference path to the callable in the form `airflow.models.deadline.get_from_db`
return f"{_callback.__module__}.{_callback.__qualname__}"
if not isinstance(_callback, str) or not is_valid_dotpath(_callback.strip()):
raise ImportError(f"`{_callback}` doesn't look like a valid dot path.")
stripped_callback = _callback.strip()
try:
# The provided callback is a string which appears to be a valid dotpath, attempt to import it.
callback = import_string(stripped_callback)
if not callable(callback):
# The input is a string which can be imported, but is not callable.
raise AttributeError(f"Provided callback {callback} is not callable.")
cls.verify_callable(callback)
except ImportError as e:
# Logging here instead of failing because it is possible that the code for the callable
# exists somewhere other than on the DAG processor. We are making a best effort to validate,
# but can't rule out that it may be available at runtime even if it can not be imported here.
log.debug(
"Callback %s is formatted like a callable dotpath, but could not be imported.\n%s",
stripped_callback,
e,
)
return stripped_callback
@classmethod
def verify_callable(cls, callback: Callable):
"""For additional verification of the callable during initialization in subclasses."""
pass # No verification needed in the base class
@classmethod
def deserialize(cls, data: dict, version):
path = data.pop("path")
return cls(callback_callable=path, **data)
@classmethod
def serialized_fields(cls) -> tuple[str, ...]:
return ("path", "kwargs")
def serialize(self) -> dict[str, Any]:
return {f: getattr(self, f) for f in self.serialized_fields()}
def __eq__(self, other):
if type(self) is not type(other):
return NotImplemented
return self.serialize() == other.serialize()
def __hash__(self):
serialized = self.serialize()
hashable_items = []
for k, v in serialized.items():
if isinstance(v, dict):
hashable_items.append((k, tuple(sorted(v.items()))))
else:
hashable_items.append((k, v))
return hash(tuple(sorted(hashable_items)))
class AsyncCallback(Callback):
"""
Asynchronous callback that runs in the triggerer.
The `callback_callable` can be a Python callable type or a string containing the path to the callable that
can be used to import the callable. It must be a top-level awaitable callable in a module present on the
triggerer.
It will be called with Airflow context and specified kwargs when a deadline is missed.
"""
def __init__(self, callback_callable: Callable | str, kwargs: dict | None = None):
super().__init__(callback_callable=callback_callable, kwargs=kwargs)
@classmethod
def verify_callable(cls, callback: Callable):
if not (inspect.iscoroutinefunction(callback) or hasattr(callback, "__await__")):
raise AttributeError(f"Provided callback {callback} is not awaitable.")
class SyncCallback(Callback):
"""
Synchronous callback that runs in the specified or default executor.
The `callback_callable` can be a Python callable type or a string containing the path to the callable that
can be used to import the callable. It must be a top-level callable in a module present on the executor.
It will be called with Airflow context and specified kwargs when a deadline is missed.
"""
executor: str | None
def __init__(
self, callback_callable: Callable | str, kwargs: dict | None = None, executor: str | None = None
):
super().__init__(callback_callable=callback_callable, kwargs=kwargs)
self.executor = executor
@classmethod
def serialized_fields(cls) -> tuple[str, ...]:
return super().serialized_fields() + ("executor",)
| {
"repo_id": "apache/airflow",
"file_path": "task-sdk/src/airflow/sdk/definitions/callback.py",
"license": "Apache License 2.0",
"lines": 125,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:task-sdk/tests/task_sdk/definitions/test_callback.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import cast
import pytest
from airflow.sdk._shared.module_loading import qualname
from airflow.sdk.definitions.callback import AsyncCallback, Callback, SyncCallback
from airflow.serialization.serde import deserialize, serialize
async def empty_async_callback_for_deadline_tests():
"""Used in a number of tests to confirm that Deadlines and DeadlineAlerts function correctly."""
pass
def empty_sync_callback_for_deadline_tests():
"""Used in a number of tests to confirm that Deadlines and DeadlineAlerts function correctly."""
pass
TEST_CALLBACK_PATH = qualname(empty_async_callback_for_deadline_tests)
TEST_CALLBACK_KWARGS = {"arg1": "value1"}
UNIMPORTABLE_DOT_PATH = "valid.but.nonexistent.path"
class TestCallback:
@pytest.mark.parametrize(
("subclass", "callable"),
[
pytest.param(AsyncCallback, empty_async_callback_for_deadline_tests, id="async"),
pytest.param(SyncCallback, empty_sync_callback_for_deadline_tests, id="sync"),
],
)
def test_init_error_reserved_kwarg(self, subclass, callable):
with pytest.raises(ValueError, match="context is a reserved kwarg for this class"):
subclass(callable, {"context": None})
@pytest.mark.parametrize(
("callback_callable", "expected_path"),
[
pytest.param(
empty_sync_callback_for_deadline_tests,
qualname(empty_sync_callback_for_deadline_tests),
id="valid_sync_callable",
),
pytest.param(
empty_async_callback_for_deadline_tests,
qualname(empty_async_callback_for_deadline_tests),
id="valid_async_callable",
),
pytest.param(TEST_CALLBACK_PATH, TEST_CALLBACK_PATH, id="valid_path_string"),
pytest.param(lambda x: x, None, id="lambda_function"),
pytest.param(TEST_CALLBACK_PATH + " ", TEST_CALLBACK_PATH, id="path_with_whitespace"),
pytest.param(UNIMPORTABLE_DOT_PATH, UNIMPORTABLE_DOT_PATH, id="valid_format_not_importable"),
],
)
def test_get_callback_path_happy_cases(self, callback_callable, expected_path):
path = Callback.get_callback_path(callback_callable)
if expected_path is None:
assert path.endswith("<lambda>")
else:
assert path == expected_path
@pytest.mark.parametrize(
("callback_callable", "error_type"),
[
pytest.param(42, ImportError, id="not_a_string"),
pytest.param("", ImportError, id="empty_string"),
pytest.param("os.path", AttributeError, id="non_callable_module"),
],
)
def test_get_callback_path_error_cases(self, callback_callable, error_type):
expected_message = ""
if error_type is ImportError:
expected_message = "doesn't look like a valid dot path."
elif error_type is AttributeError:
expected_message = "is not callable."
with pytest.raises(error_type, match=expected_message):
Callback.get_callback_path(callback_callable)
@pytest.mark.parametrize(
("callback1_args", "callback2_args", "should_equal"),
[
pytest.param(
(TEST_CALLBACK_PATH, TEST_CALLBACK_KWARGS),
(TEST_CALLBACK_PATH, TEST_CALLBACK_KWARGS),
True,
id="identical",
),
pytest.param(
(TEST_CALLBACK_PATH, TEST_CALLBACK_KWARGS),
(UNIMPORTABLE_DOT_PATH, TEST_CALLBACK_KWARGS),
False,
id="different_path",
),
pytest.param(
(TEST_CALLBACK_PATH, TEST_CALLBACK_KWARGS),
(TEST_CALLBACK_PATH, {"other": "kwargs"}),
False,
id="different_kwargs",
),
pytest.param((TEST_CALLBACK_PATH, None), (TEST_CALLBACK_PATH, None), True, id="both_no_kwargs"),
],
)
def test_callback_equality(self, callback1_args, callback2_args, should_equal):
callback1 = AsyncCallback(*callback1_args)
callback2 = AsyncCallback(*callback2_args)
assert (callback1 == callback2) == should_equal
@pytest.mark.parametrize(
("callback_class", "args1", "args2", "should_be_same_hash"),
[
pytest.param(
AsyncCallback,
(TEST_CALLBACK_PATH, TEST_CALLBACK_KWARGS),
(TEST_CALLBACK_PATH, TEST_CALLBACK_KWARGS),
True,
id="async_identical",
),
pytest.param(
SyncCallback,
(TEST_CALLBACK_PATH, TEST_CALLBACK_KWARGS),
(TEST_CALLBACK_PATH, TEST_CALLBACK_KWARGS),
True,
id="sync_identical",
),
pytest.param(
AsyncCallback,
(TEST_CALLBACK_PATH, TEST_CALLBACK_KWARGS),
(UNIMPORTABLE_DOT_PATH, TEST_CALLBACK_KWARGS),
False,
id="async_different_path",
),
pytest.param(
SyncCallback,
(TEST_CALLBACK_PATH, TEST_CALLBACK_KWARGS),
(TEST_CALLBACK_PATH, {"other": "kwargs"}),
False,
id="sync_different_kwargs",
),
pytest.param(
AsyncCallback,
(TEST_CALLBACK_PATH, None),
(TEST_CALLBACK_PATH, None),
True,
id="async_no_kwargs",
),
],
)
def test_callback_hash_and_set_behavior(self, callback_class, args1, args2, should_be_same_hash):
callback1 = callback_class(*args1)
callback2 = callback_class(*args2)
assert (hash(callback1) == hash(callback2)) == should_be_same_hash
class TestAsyncCallback:
@pytest.mark.parametrize(
("callback_callable", "kwargs", "expected_path"),
[
pytest.param(
empty_async_callback_for_deadline_tests,
TEST_CALLBACK_KWARGS,
TEST_CALLBACK_PATH,
id="callable",
),
pytest.param(TEST_CALLBACK_PATH, TEST_CALLBACK_KWARGS, TEST_CALLBACK_PATH, id="string_path"),
pytest.param(
UNIMPORTABLE_DOT_PATH, TEST_CALLBACK_KWARGS, UNIMPORTABLE_DOT_PATH, id="unimportable_path"
),
],
)
def test_init(self, callback_callable, kwargs, expected_path):
callback = AsyncCallback(callback_callable, kwargs=kwargs)
assert callback.path == expected_path
assert callback.kwargs == kwargs
assert isinstance(callback, Callback)
def test_init_error(self):
with pytest.raises(AttributeError, match="is not awaitable."):
AsyncCallback(empty_sync_callback_for_deadline_tests)
def test_serialize_deserialize(self):
callback = AsyncCallback(TEST_CALLBACK_PATH, kwargs=TEST_CALLBACK_KWARGS)
serialized = serialize(callback)
deserialized = cast("Callback", deserialize(serialized.copy()))
assert callback == deserialized
class TestSyncCallback:
@pytest.mark.parametrize(
("callback_callable", "executor"),
[
pytest.param(empty_sync_callback_for_deadline_tests, "remote", id="with_executor"),
pytest.param(empty_sync_callback_for_deadline_tests, None, id="without_executor"),
pytest.param(qualname(empty_sync_callback_for_deadline_tests), None, id="importable_path"),
pytest.param(UNIMPORTABLE_DOT_PATH, None, id="unimportable_path"),
],
)
def test_init(self, callback_callable, executor):
callback = SyncCallback(TEST_CALLBACK_PATH, kwargs=TEST_CALLBACK_KWARGS, executor=executor)
assert callback.path == TEST_CALLBACK_PATH
assert callback.kwargs == TEST_CALLBACK_KWARGS
assert callback.executor == executor
assert isinstance(callback, Callback)
def test_serialize_deserialize(self):
callback = SyncCallback(TEST_CALLBACK_PATH, kwargs=TEST_CALLBACK_KWARGS, executor="local")
serialized = serialize(callback)
deserialized = cast("Callback", deserialize(serialized.copy()))
assert callback == deserialized
# While DeadlineReference lives in the SDK package, the unit tests to confirm it
# works need database access so they live in the models/test_deadline.py module.
| {
"repo_id": "apache/airflow",
"file_path": "task-sdk/tests/task_sdk/definitions/test_callback.py",
"license": "Apache License 2.0",
"lines": 207,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:devel-common/src/tests_common/test_utils/fernet.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import base64
import hashlib
def generate_fernet_key_string(string_key: str = "AIRFLOW_INTEGRATION_TEST") -> str:
"""Generate always the same Fernet key value as a URL-safe base64-encoded 32-byte key."""
raw = hashlib.sha256(string_key.encode()).digest() # 32 bytes
return base64.urlsafe_b64encode(raw).decode()
| {
"repo_id": "apache/airflow",
"file_path": "devel-common/src/tests_common/test_utils/fernet.py",
"license": "Apache License 2.0",
"lines": 24,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:devel-common/tests/unit/tests_common/test_utils/test_fernet.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from tests_common.test_utils.fernet import generate_fernet_key_string
class TestFernetUtils:
"""Test utils for Fernet encryption."""
def test_generate_fernet_key_string(self):
"""Test generating a Fernet key."""
key = generate_fernet_key_string("TEST_KEY")
assert key == "NBJC_zYX6NWNek9v7tVv64YZz4K5sAgpoC4WGkQYv6I="
default_key = generate_fernet_key_string()
assert default_key == "BMsag_V7iplH1SIxzrTIbhLRZYOAYd6p0_nPtGdmuxo="
| {
"repo_id": "apache/airflow",
"file_path": "devel-common/tests/unit/tests_common/test_utils/test_fernet.py",
"license": "Apache License 2.0",
"lines": 27,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:dev/breeze/src/airflow_breeze/utils/click_validators.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import re
from datetime import datetime
import click
def validate_release_date(ctx: click.core.Context, param: click.core.Option, value: str) -> str:
"""
Validate that the date follows YYYY-MM-DD[_NN] format.
:param ctx: Click context
:param param: Click parameter
:param value: The value to validate
:return: The validated value
:raises click.BadParameter: If the value doesn't match the required format
"""
if not value:
return value
# Check if the format matches YYYY-MM-DD or YYYY-MM-DD_NN
pattern = r"^\d{4}-\d{2}-\d{2}(_\d{2})?$"
if not re.match(pattern, value):
raise click.BadParameter(
"Date must be in YYYY-MM-DD or YYYY-MM-DD_NN format (e.g., 2025-11-16 or 2025-11-16_01)"
)
# Validate that the date part (YYYY-MM-DD) is a valid date
date_part = value.split("_")[0]
try:
datetime.strptime(date_part, "%Y-%m-%d")
except ValueError:
raise click.BadParameter(f"Invalid date: {date_part}. Please provide a valid date.")
return value
| {
"repo_id": "apache/airflow",
"file_path": "dev/breeze/src/airflow_breeze/utils/click_validators.py",
"license": "Apache License 2.0",
"lines": 44,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:dev/breeze/tests/test_release_date_validation.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import pytest
from airflow_breeze.utils.click_validators import validate_release_date
class TestPlannedReleaseDateValidation:
"""Test validation of planned release date format YYYY_MM_DD[_NN]."""
@pytest.mark.parametrize(
"date_value",
[
"2025-11-16",
"2025-11-16_01",
"2025-11-16_99",
"2025-01-01",
"2024-02-29", # Leap year
"2025-12-31",
],
)
def test_valid_date_formats(self, date_value):
"""Test that valid date formats are accepted."""
# The function is a click callback, so we pass None for ctx and param
result = validate_release_date(None, None, date_value)
assert result == date_value
def test_empty_value(self):
"""Test that empty value is accepted."""
result = validate_release_date(None, None, "")
assert result == ""
@pytest.mark.parametrize(
("date_value", "error_pattern"),
[
("2025_11_16", "YYYY-MM-DD"), # Wrong separator (underscores)
("2025-11-16_1", "YYYY-MM-DD"), # Wrong NN format (needs 2 digits)
("25-11-16", "YYYY-MM-DD"), # Wrong year format (needs 4 digits)
("2025-13-16", "Invalid date"), # Invalid month (13)
("2025-11-32", "Invalid date"), # Invalid day (32)
("2025-02-30", "Invalid date"), # Invalid date (Feb 30)
("2025-00-01", "Invalid date"), # Invalid month (0)
("2025-11-00", "Invalid date"), # Invalid day (0)
],
)
def test_invalid_date_formats(self, date_value, error_pattern):
"""Test that invalid date formats are rejected."""
from click import BadParameter
with pytest.raises(BadParameter, match=error_pattern):
validate_release_date(None, None, date_value)
| {
"repo_id": "apache/airflow",
"file_path": "dev/breeze/tests/test_release_date_validation.py",
"license": "Apache License 2.0",
"lines": 59,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/amazon/tests/unit/amazon/aws/executors/ecs/test_utils.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Unit tests for AWS ECS Executor Utilities."""
from __future__ import annotations
import datetime
from unittest import mock
from airflow.models.taskinstance import TaskInstanceKey
from airflow.providers.amazon.aws.executors.ecs.utils import (
AllEcsConfigKeys,
EcsExecutorException,
EcsExecutorTask,
EcsQueuedTask,
EcsTaskCollection,
EcsTaskInfo,
RunTaskKwargsConfigKeys,
_recursive_flatten_dict,
camelize_dict_keys,
parse_assign_public_ip,
)
from airflow.utils.state import State
class TestEcsQueuedTask:
"""Test EcsQueuedTask dataclass."""
def test_ecs_queued_task_creation(self):
"""Test EcsQueuedTask object creation."""
key = TaskInstanceKey(dag_id="test_dag", task_id="test_task", run_id="test_run", try_number=1)
command = ["echo", "hello"]
queue = "default"
executor_config = {"key": "value"}
attempt_number = 1
next_attempt_time = datetime.datetime.now()
queued_task = EcsQueuedTask(
key=key,
command=command,
queue=queue,
executor_config=executor_config,
attempt_number=attempt_number,
next_attempt_time=next_attempt_time,
)
assert queued_task.key == key
assert queued_task.command == command
assert queued_task.queue == queue
assert queued_task.executor_config == executor_config
assert queued_task.attempt_number == attempt_number
assert queued_task.next_attempt_time == next_attempt_time
class TestEcsTaskInfo:
"""Test EcsTaskInfo dataclass."""
def test_ecs_task_info_creation(self):
"""Test EcsTaskInfo object creation."""
cmd = ["echo", "hello"]
queue = "default"
config = {"key": "value"}
task_info = EcsTaskInfo(cmd=cmd, queue=queue, config=config)
assert task_info.cmd == cmd
assert task_info.queue == queue
assert task_info.config == config
class TestRunTaskKwargsConfigKeys:
"""Test RunTaskKwargsConfigKeys class."""
def test_config_keys_values(self):
"""Test that config keys have correct values."""
assert RunTaskKwargsConfigKeys.ASSIGN_PUBLIC_IP == "assign_public_ip"
assert RunTaskKwargsConfigKeys.CAPACITY_PROVIDER_STRATEGY == "capacity_provider_strategy"
assert RunTaskKwargsConfigKeys.CLUSTER == "cluster"
assert RunTaskKwargsConfigKeys.CONTAINER_NAME == "container_name"
assert RunTaskKwargsConfigKeys.LAUNCH_TYPE == "launch_type"
assert RunTaskKwargsConfigKeys.PLATFORM_VERSION == "platform_version"
assert RunTaskKwargsConfigKeys.SECURITY_GROUPS == "security_groups"
assert RunTaskKwargsConfigKeys.SUBNETS == "subnets"
assert RunTaskKwargsConfigKeys.TASK_DEFINITION == "task_definition"
class TestAllEcsConfigKeys:
"""Test AllEcsConfigKeys class."""
def test_all_config_keys_values(self):
"""Test that all config keys have correct values."""
# Test inherited keys
assert AllEcsConfigKeys.ASSIGN_PUBLIC_IP == "assign_public_ip"
assert AllEcsConfigKeys.CLUSTER == "cluster"
# Test additional keys
assert AllEcsConfigKeys.AWS_CONN_ID == "conn_id"
assert AllEcsConfigKeys.CHECK_HEALTH_ON_STARTUP == "check_health_on_startup"
assert AllEcsConfigKeys.MAX_RUN_TASK_ATTEMPTS == "max_run_task_attempts"
assert AllEcsConfigKeys.REGION_NAME == "region_name"
assert AllEcsConfigKeys.RUN_TASK_KWARGS == "run_task_kwargs"
class TestEcsExecutorException:
"""Test EcsExecutorException class."""
def test_ecs_executor_exception_creation(self):
"""Test EcsExecutorException creation."""
exception = EcsExecutorException("Test error message")
assert str(exception) == "Test error message"
assert isinstance(exception, Exception)
class TestEcsExecutorTask:
"""Test EcsExecutorTask class."""
def test_ecs_executor_task_creation(self):
"""Test EcsExecutorTask object creation."""
task_arn = "arn:aws:ecs:us-east-1:123456789012:task/test-task"
last_status = "RUNNING"
desired_status = "RUNNING"
containers = [{"name": "container1", "exit_code": 0}]
started_at = datetime.datetime.now()
stopped_reason = None
external_executor_id = "test-executor-id"
task = EcsExecutorTask(
task_arn=task_arn,
last_status=last_status,
desired_status=desired_status,
containers=containers,
started_at=started_at,
stopped_reason=stopped_reason,
external_executor_id=external_executor_id,
)
assert task.task_arn == task_arn
assert task.last_status == last_status
assert task.desired_status == desired_status
assert task.containers == containers
assert task.started_at == started_at
assert task.stopped_reason == stopped_reason
assert task.external_executor_id == external_executor_id
def test_get_task_state_running(self):
"""Test get_task_state returns RUNNING when last_status is RUNNING."""
task = EcsExecutorTask(
task_arn="arn:aws:ecs:us-east-1:123456789012:task/test-task",
last_status="RUNNING",
desired_status="RUNNING",
containers=[{"name": "container1", "exit_code": 0}],
)
assert task.get_task_state() == State.RUNNING
def test_get_task_state_queued(self):
"""Test get_task_state returns QUEUED when desired_status is RUNNING but last_status is not RUNNING."""
task = EcsExecutorTask(
task_arn="arn:aws:ecs:us-east-1:123456789012:task/test-task",
last_status="PENDING",
desired_status="RUNNING",
containers=[{"name": "container1", "exit_code": 0}],
)
assert task.get_task_state() == State.QUEUED
def test_get_task_state_removed_timeout(self):
"""Test get_task_state returns REMOVED when task timed out."""
task = EcsExecutorTask(
task_arn="arn:aws:ecs:us-east-1:123456789012:task/test-task",
last_status="STOPPED",
desired_status="STOPPED",
containers=[{"name": "container1", "exit_code": 0}],
started_at=None,
)
assert task.get_task_state() == State.REMOVED
def test_get_task_state_running_not_finished(self):
"""Test get_task_state returns RUNNING when task is not finished."""
task = EcsExecutorTask(
task_arn="arn:aws:ecs:us-east-1:123456789012:task/test-task",
last_status="RUNNING",
desired_status="RUNNING",
containers=[{"name": "container1"}], # No exit_code
)
assert task.get_task_state() == State.RUNNING
def test_get_task_state_success(self):
"""Test get_task_state returns SUCCESS when all containers succeeded."""
task = EcsExecutorTask(
task_arn="arn:aws:ecs:us-east-1:123456789012:task/test-task",
last_status="STOPPED",
desired_status="STOPPED",
containers=[
{"name": "container1", "exit_code": 0},
{"name": "container2", "exit_code": 0},
],
started_at=datetime.datetime.now(),
)
assert task.get_task_state() == State.SUCCESS
def test_get_task_state_failed(self):
"""Test get_task_state returns FAILED when at least one container failed."""
task = EcsExecutorTask(
task_arn="arn:aws:ecs:us-east-1:123456789012:task/test-task",
last_status="STOPPED",
desired_status="STOPPED",
containers=[
{"name": "container1", "exit_code": 0},
{"name": "container2", "exit_code": 1},
],
started_at=datetime.datetime.now(),
)
assert task.get_task_state() == State.FAILED
def test_repr(self):
"""Test __repr__ method."""
task = EcsExecutorTask(
task_arn="arn:aws:ecs:us-east-1:123456789012:task/test-task",
last_status="RUNNING",
desired_status="RUNNING",
containers=[{"name": "container1", "exit_code": 0}],
)
expected = "(arn:aws:ecs:us-east-1:123456789012:task/test-task, RUNNING->RUNNING, running)"
assert repr(task) == expected
class TestEcsTaskCollection:
"""Test EcsTaskCollection class."""
def setup_method(self):
"""Set up test fixtures."""
self.collection = EcsTaskCollection()
self.task_key = TaskInstanceKey(
dag_id="test_dag", task_id="test_task", run_id="test_run", try_number=1
)
self.task_arn = "arn:aws:ecs:us-east-1:123456789012:task/test-task"
self.task = EcsExecutorTask(
task_arn=self.task_arn,
last_status="RUNNING",
desired_status="RUNNING",
containers=[{"name": "container1", "exit_code": 0}],
)
self.cmd = ["echo", "hello"]
self.queue = "default"
self.exec_config = {"key": "value"}
def test_init(self):
"""Test EcsTaskCollection initialization."""
collection = EcsTaskCollection()
assert collection.key_to_arn == {}
assert collection.arn_to_key == {}
assert collection.tasks == {}
assert collection.key_to_failure_counts == {}
assert collection.key_to_task_info == {}
def test_add_task(self):
"""Test adding a task to the collection."""
self.collection.add_task(
task=self.task,
airflow_task_key=self.task_key,
queue=self.queue,
airflow_cmd=self.cmd,
exec_config=self.exec_config,
attempt_number=1,
)
assert self.collection.key_to_arn[self.task_key] == self.task_arn
assert self.collection.arn_to_key[self.task_arn] == self.task_key
assert self.collection.tasks[self.task_arn] == self.task
assert self.collection.key_to_failure_counts[self.task_key] == 1
assert self.collection.key_to_task_info[self.task_key].cmd == self.cmd
assert self.collection.key_to_task_info[self.task_key].queue == self.queue
assert self.collection.key_to_task_info[self.task_key].config == self.exec_config
def test_update_task(self):
"""Test updating a task in the collection."""
self.collection.add_task(
task=self.task,
airflow_task_key=self.task_key,
queue=self.queue,
airflow_cmd=self.cmd,
exec_config=self.exec_config,
attempt_number=1,
)
updated_task = EcsExecutorTask(
task_arn=self.task_arn,
last_status="STOPPED",
desired_status="STOPPED",
containers=[{"name": "container1", "exit_code": 0}],
)
self.collection.update_task(updated_task)
assert self.collection.tasks[self.task_arn].last_status == "STOPPED"
assert self.collection.tasks[self.task_arn].desired_status == "STOPPED"
def test_task_by_key(self):
"""Test getting a task by Airflow task key."""
self.collection.add_task(
task=self.task,
airflow_task_key=self.task_key,
queue=self.queue,
airflow_cmd=self.cmd,
exec_config=self.exec_config,
attempt_number=1,
)
retrieved_task = self.collection.task_by_key(self.task_key)
assert retrieved_task == self.task
def test_task_by_arn(self):
"""Test getting a task by ARN."""
self.collection.add_task(
task=self.task,
airflow_task_key=self.task_key,
queue=self.queue,
airflow_cmd=self.cmd,
exec_config=self.exec_config,
attempt_number=1,
)
retrieved_task = self.collection.task_by_arn(self.task_arn)
assert retrieved_task == self.task
def test_pop_by_key(self):
"""Test removing a task by Airflow task key."""
self.collection.add_task(
task=self.task,
airflow_task_key=self.task_key,
queue=self.queue,
airflow_cmd=self.cmd,
exec_config=self.exec_config,
attempt_number=1,
)
popped_task = self.collection.pop_by_key(self.task_key)
assert popped_task == self.task
assert self.task_key not in self.collection.key_to_arn
assert self.task_arn not in self.collection.arn_to_key
assert self.task_arn not in self.collection.tasks
assert self.task_key not in self.collection.key_to_task_info
assert self.task_key not in self.collection.key_to_failure_counts
def test_get_all_arns(self):
"""Test getting all ARNs from collection."""
self.collection.add_task(
task=self.task,
airflow_task_key=self.task_key,
queue=self.queue,
airflow_cmd=self.cmd,
exec_config=self.exec_config,
attempt_number=1,
)
arns = self.collection.get_all_arns()
assert arns == [self.task_arn]
def test_get_all_task_keys(self):
"""Test getting all task keys from collection."""
self.collection.add_task(
task=self.task,
airflow_task_key=self.task_key,
queue=self.queue,
airflow_cmd=self.cmd,
exec_config=self.exec_config,
attempt_number=1,
)
keys = self.collection.get_all_task_keys()
assert keys == [self.task_key]
def test_failure_count_by_key(self):
"""Test getting failure count by task key."""
self.collection.add_task(
task=self.task,
airflow_task_key=self.task_key,
queue=self.queue,
airflow_cmd=self.cmd,
exec_config=self.exec_config,
attempt_number=3,
)
failure_count = self.collection.failure_count_by_key(self.task_key)
assert failure_count == 3
def test_increment_failure_count(self):
"""Test incrementing failure count."""
self.collection.add_task(
task=self.task,
airflow_task_key=self.task_key,
queue=self.queue,
airflow_cmd=self.cmd,
exec_config=self.exec_config,
attempt_number=1,
)
self.collection.increment_failure_count(self.task_key)
assert self.collection.key_to_failure_counts[self.task_key] == 2
def test_info_by_key(self):
"""Test getting task info by task key."""
self.collection.add_task(
task=self.task,
airflow_task_key=self.task_key,
queue=self.queue,
airflow_cmd=self.cmd,
exec_config=self.exec_config,
attempt_number=1,
)
task_info = self.collection.info_by_key(self.task_key)
assert task_info.cmd == self.cmd
assert task_info.queue == self.queue
assert task_info.config == self.exec_config
def test_getitem(self):
"""Test __getitem__ method."""
self.collection.add_task(
task=self.task,
airflow_task_key=self.task_key,
queue=self.queue,
airflow_cmd=self.cmd,
exec_config=self.exec_config,
attempt_number=1,
)
retrieved_task = self.collection[self.task_arn]
assert retrieved_task == self.task
def test_len(self):
"""Test __len__ method."""
assert len(self.collection) == 0
self.collection.add_task(
task=self.task,
airflow_task_key=self.task_key,
queue=self.queue,
airflow_cmd=self.cmd,
exec_config=self.exec_config,
attempt_number=1,
)
assert len(self.collection) == 1
class TestRecursiveFlattenDict:
"""Test _recursive_flatten_dict function."""
def test_flat_dict(self):
"""Test flattening a flat dictionary."""
input_dict = {"a": "value1", "b": "value2"}
expected = {"a": "value1", "b": "value2"}
assert _recursive_flatten_dict(input_dict) == expected
def test_nested_dict(self):
"""Test flattening a nested dictionary."""
input_dict = {"a": "value1", "b": {"c": "value2", "d": "value3"}}
expected = {"a": "value1", "c": "value2", "d": "value3"}
assert _recursive_flatten_dict(input_dict) == expected
def test_deeply_nested_dict(self):
"""Test flattening a deeply nested dictionary."""
input_dict = {"a": {"b": {"c": {"d": "value"}}}}
expected = {"d": "value"}
assert _recursive_flatten_dict(input_dict) == expected
def test_mixed_dict(self):
"""Test flattening a dictionary with mixed nested and flat values."""
input_dict = {"a": "value1", "b": {"c": "value2"}, "d": "value3"}
expected = {"a": "value1", "c": "value2", "d": "value3"}
assert _recursive_flatten_dict(input_dict) == expected
def test_empty_dict(self):
"""Test flattening an empty dictionary."""
assert _recursive_flatten_dict({}) == {}
def test_dict_with_empty_nested_dict(self):
"""Test flattening a dictionary with empty nested dictionaries."""
input_dict = {"a": "value1", "b": {}}
expected = {"a": "value1"}
assert _recursive_flatten_dict(input_dict) == expected
class TestParseAssignPublicIp:
"""Test parse_assign_public_ip function."""
def test_parse_assign_public_ip_true_fargate(self):
"""Test parsing assign_public_ip=True for Fargate launch type."""
result = parse_assign_public_ip("True", is_launch_type_ec2=False)
assert result == "ENABLED"
def test_parse_assign_public_ip_false_fargate(self):
"""Test parsing assign_public_ip=False for Fargate launch type."""
result = parse_assign_public_ip("False", is_launch_type_ec2=False)
assert result == "DISABLED"
def test_parse_assign_public_ip_true_ec2(self):
"""Test parsing assign_public_ip=True for EC2 launch type."""
result = parse_assign_public_ip("True", is_launch_type_ec2=True)
assert result is None
def test_parse_assign_public_ip_false_ec2(self):
"""Test parsing assign_public_ip=False for EC2 launch type."""
result = parse_assign_public_ip("False", is_launch_type_ec2=True)
assert result is None
def test_parse_assign_public_ip_default_fargate(self):
"""Test parsing assign_public_ip with default for Fargate launch type."""
result = parse_assign_public_ip("False", is_launch_type_ec2=False)
assert result == "DISABLED"
class TestCamelizeDictKeys:
"""Test camelize_dict_keys function."""
def test_camelize_flat_dict(self):
"""Test camelizing keys in a flat dictionary."""
input_dict = {"test_key": "value", "another_key": "value2"}
expected = {"testKey": "value", "anotherKey": "value2"}
assert camelize_dict_keys(input_dict) == expected
def test_camelize_nested_dict(self):
"""Test camelizing keys in a nested dictionary."""
input_dict = {"test_key": {"nested_key": "value"}}
expected = {"testKey": {"nestedKey": "value"}}
assert camelize_dict_keys(input_dict) == expected
def test_camelize_dict_with_tags(self):
"""Test that tags key is not camelized."""
input_dict = {"test_key": "value", "tags": {"custom_key": "custom_value"}}
expected = {"testKey": "value", "tags": {"custom_key": "custom_value"}}
assert camelize_dict_keys(input_dict) == expected
def test_camelize_dict_with_tags_uppercase(self):
"""Test that TAGS key (uppercase) gets camelized to tAGS."""
input_dict = {"test_key": "value", "TAGS": {"custom_key": "custom_value"}}
expected = {"testKey": "value", "tAGS": {"custom_key": "custom_value"}}
assert camelize_dict_keys(input_dict) == expected
def test_camelize_dict_with_mixed_case_tags(self):
"""Test that mixed case 'Tags' key gets camelized to tags."""
input_dict = {"test_key": "value", "Tags": {"custom_key": "custom_value"}}
expected = {"testKey": "value", "tags": {"custom_key": "custom_value"}}
assert camelize_dict_keys(input_dict) == expected
def test_camelize_empty_dict(self):
"""Test camelizing an empty dictionary."""
assert camelize_dict_keys({}) == {}
def test_camelize_dict_with_non_dict_values(self):
"""Test camelizing dictionary with non-dict values."""
input_dict = {"test_key": ["list", "values"], "another_key": 123}
expected = {"testKey": ["list", "values"], "anotherKey": 123}
assert camelize_dict_keys(input_dict) == expected
@mock.patch("airflow.providers.amazon.aws.executors.ecs.utils.camelize")
def test_camelize_dict_keys_with_mock(self, mock_camelize):
"""Test camelize_dict_keys with mocked camelize function."""
mock_camelize.side_effect = lambda x, uppercase_first_letter=False: f"camelized_{x}"
input_dict = {"test_key": {"nested_key": "value"}}
result = camelize_dict_keys(input_dict)
expected = {"camelized_test_key": {"camelized_nested_key": "value"}}
assert result == expected
mock_camelize.assert_called()
| {
"repo_id": "apache/airflow",
"file_path": "providers/amazon/tests/unit/amazon/aws/executors/ecs/test_utils.py",
"license": "Apache License 2.0",
"lines": 487,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-core/src/airflow/api_fastapi/execution_api/versions/v2025_11_07.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from cadwyn import ResponseInfo, VersionChange, convert_response_to_previous_version_for, schema
from airflow.api_fastapi.execution_api.datamodels.asset_event import (
AssetEventResponse,
AssetEventsResponse,
DagRunAssetReference,
)
from airflow.api_fastapi.execution_api.datamodels.dagrun import TriggerDAGRunPayload
from airflow.api_fastapi.execution_api.datamodels.taskinstance import DagRun, TIRunContext
class AddPartitionKeyField(VersionChange):
"""Add the `partition_key` field to DagRun model."""
description = __doc__
instructions_to_migrate_to_previous_version = (
schema(DagRun).field("partition_key").didnt_exist,
schema(AssetEventResponse).field("partition_key").didnt_exist,
schema(TriggerDAGRunPayload).field("partition_key").didnt_exist,
schema(DagRunAssetReference).field("partition_key").didnt_exist,
)
@convert_response_to_previous_version_for(TIRunContext) # type: ignore[arg-type]
def remove_partition_key_from_dag_run(response: ResponseInfo) -> None: # type: ignore[misc]
"""Remove the `partition_key` field from the dag_run object when converting to the previous version."""
if "dag_run" in response.body and isinstance(response.body["dag_run"], dict):
response.body["dag_run"].pop("partition_key", None)
@convert_response_to_previous_version_for(AssetEventsResponse) # type: ignore[arg-type]
def remove_partition_key_from_asset_events(response: ResponseInfo) -> None: # type: ignore[misc]
"""Remove the `partition_key` field from the dag_run object when converting to the previous version."""
events = response.body["asset_events"]
for elem in events:
elem.pop("partition_key", None)
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/api_fastapi/execution_api/versions/v2025_11_07.py",
"license": "Apache License 2.0",
"lines": 45,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:airflow-core/tests/unit/api_fastapi/execution_api/versions/v2025_09_23/test_asset_events.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from datetime import datetime
import pytest
from airflow._shared.timezones import timezone
from airflow.models.asset import AssetActive, AssetAliasModel, AssetEvent, AssetModel
DEFAULT_DATE = timezone.parse("2021-01-01T00:00:00")
pytestmark = pytest.mark.db_test
@pytest.fixture
def ver_client(client):
client.headers["Airflow-API-Version"] = "2025-09-23"
return client
@pytest.fixture
def test_asset_events(session):
def make_timestamp(day):
return datetime(2021, 1, day, tzinfo=timezone.utc)
common = {
"asset_id": 1,
"extra": {"foo": "bar"},
"source_dag_id": "foo",
"source_task_id": "bar",
"source_run_id": "custom",
"source_map_index": -1,
"partition_key": None,
}
events = [AssetEvent(id=i, timestamp=make_timestamp(i), **common) for i in (1, 2, 3)]
session.add_all(events)
session.commit()
yield events
for event in events:
session.delete(event)
session.commit()
@pytest.fixture
def test_asset(session):
asset = AssetModel(
id=1,
name="test_get_asset_by_name",
uri="s3://bucket/key",
group="asset",
extra={"foo": "bar"},
created_at=DEFAULT_DATE,
updated_at=DEFAULT_DATE,
)
asset_active = AssetActive.for_asset(asset)
session.add_all([asset, asset_active])
session.commit()
yield asset
session.delete(asset)
session.delete(asset_active)
session.commit()
@pytest.fixture
def test_asset_alias(session, test_asset_events, test_asset):
alias = AssetAliasModel(id=1, name="test_alias")
alias.asset_events = test_asset_events
alias.assets.append(test_asset)
session.add(alias)
session.commit()
yield alias
session.delete(alias)
session.commit()
class TestGetAssetEventByAsset:
@pytest.mark.parametrize(
("uri", "name"),
[
(None, "test_get_asset_by_name"),
("s3://bucket/key", None),
("s3://bucket/key", "test_get_asset_by_name"),
],
)
@pytest.mark.usefixtures("test_asset", "test_asset_events")
def test_should_not_have_partition_key(self, uri, name, ver_client):
response = ver_client.get(
"/execution/asset-events/by-asset",
params={"name": name, "uri": uri},
)
assert response.status_code == 200
assert response.json() == {
"asset_events": [
{
"id": 1,
"extra": {"foo": "bar"},
"source_task_id": "bar",
"source_dag_id": "foo",
"source_run_id": "custom",
"source_map_index": -1,
"created_dagruns": [],
"asset": {
"extra": {"foo": "bar"},
"group": "asset",
"name": "test_get_asset_by_name",
"uri": "s3://bucket/key",
},
"timestamp": "2021-01-01T00:00:00Z",
},
{
"id": 2,
"extra": {"foo": "bar"},
"source_task_id": "bar",
"source_dag_id": "foo",
"source_run_id": "custom",
"source_map_index": -1,
"asset": {
"extra": {"foo": "bar"},
"group": "asset",
"name": "test_get_asset_by_name",
"uri": "s3://bucket/key",
},
"created_dagruns": [],
"timestamp": "2021-01-02T00:00:00Z",
},
{
"id": 3,
"extra": {"foo": "bar"},
"source_task_id": "bar",
"source_dag_id": "foo",
"source_run_id": "custom",
"source_map_index": -1,
"asset": {
"extra": {"foo": "bar"},
"group": "asset",
"name": "test_get_asset_by_name",
"uri": "s3://bucket/key",
},
"created_dagruns": [],
"timestamp": "2021-01-03T00:00:00Z",
},
]
}
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/tests/unit/api_fastapi/execution_api/versions/v2025_09_23/test_asset_events.py",
"license": "Apache License 2.0",
"lines": 143,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-core/src/airflow/cli/hot_reload.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Hot reload utilities for development mode."""
from __future__ import annotations
import os
import signal
import sys
from collections.abc import Callable, Sequence
from pathlib import Path
from typing import TYPE_CHECKING
import structlog
if TYPE_CHECKING:
import subprocess
log = structlog.getLogger(__name__)
def run_with_reloader(
callback: Callable,
process_name: str = "process",
) -> None:
"""
Run a callback function with automatic reloading on file changes.
This function monitors specified paths for changes and restarts the process
when changes are detected. Useful for development mode hot-reloading.
:param callback: The function to run. This should be the main entry point
of the command that needs hot-reload support.
:param process_name: Name of the process being run (for logging purposes)
"""
# Default watch paths - watch the airflow source directory
import airflow
airflow_root = Path(airflow.__file__).parent
watch_paths = [airflow_root]
log.info("Starting %s in development mode with hot-reload enabled", process_name)
log.info("Watching paths: %s", watch_paths)
# Check if we're the main process or a reloaded child
reloader_pid = os.environ.get("AIRFLOW_DEV_RELOADER_PID")
if reloader_pid is None:
# We're the main process - set up the reloader
os.environ["AIRFLOW_DEV_RELOADER_PID"] = str(os.getpid())
_run_reloader(watch_paths)
else:
# We're a child process - just run the callback
callback()
def _terminate_process_tree(
process: subprocess.Popen[bytes],
timeout: int = 5,
force_kill_remaining: bool = True,
) -> None:
"""
Terminate a process and all its children recursively.
Uses psutil to ensure all child processes are properly terminated,
which is important for cleaning up subprocesses like serve-log servers.
:param process: The subprocess.Popen process to terminate
:param timeout: Timeout in seconds to wait for graceful termination
:param force_kill_remaining: If True, force kill processes that don't terminate gracefully
"""
import subprocess
import psutil
try:
parent = psutil.Process(process.pid)
# Get all child processes recursively
children = parent.children(recursive=True)
# Terminate all children first
for child in children:
try:
child.terminate()
except (psutil.NoSuchProcess, psutil.AccessDenied):
pass
# Terminate the parent
parent.terminate()
# Wait for all processes to terminate
gone, alive = psutil.wait_procs(children + [parent], timeout=timeout)
# Force kill any remaining processes if requested
if force_kill_remaining:
for proc in alive:
try:
log.warning("Force killing process %s", proc.pid)
proc.kill()
except (psutil.NoSuchProcess, psutil.AccessDenied):
pass
except (psutil.NoSuchProcess, psutil.AccessDenied):
# Process already terminated
pass
except Exception as e:
log.warning("Error terminating process tree: %s", e)
# Fallback to simple termination
try:
process.terminate()
process.wait(timeout=timeout)
except subprocess.TimeoutExpired:
if force_kill_remaining:
log.warning("Process did not terminate gracefully, killing...")
process.kill()
process.wait()
def _run_reloader(watch_paths: Sequence[str | Path]) -> None:
"""
Watch for changes and restart the process.
Watches the provided paths and restarts the process by re-executing the
Python interpreter with the same arguments.
:param watch_paths: List of paths to watch for changes.
"""
import subprocess
from watchfiles import watch
process = None
should_exit = False
def start_process():
"""Start or restart the subprocess."""
nonlocal process
if process is not None:
log.info("Stopping process and all its children...")
_terminate_process_tree(process, timeout=5, force_kill_remaining=True)
log.info("Starting process...")
# Restart the process by re-executing Python with the same arguments
# Note: sys.argv is safe here as it comes from the original CLI invocation
# and is only used in development mode for hot-reloading the same process
process = subprocess.Popen([sys.executable] + sys.argv)
return process
def signal_handler(signum, frame):
"""Handle termination signals."""
nonlocal should_exit, process
should_exit = True
log.info("Received signal %s, shutting down...", signum)
if process:
_terminate_process_tree(process, timeout=5, force_kill_remaining=False)
sys.exit(0)
# Set up signal handlers
signal.signal(signal.SIGINT, signal_handler)
signal.signal(signal.SIGTERM, signal_handler)
# Start the initial process
process = start_process()
log.info("Hot-reload enabled. Watching for file changes...")
log.info("Press Ctrl+C to stop")
try:
for changes in watch(*watch_paths):
if should_exit:
break
log.info("Detected changes: %s", changes)
log.info("Reloading...")
# Restart the process
process = start_process()
except KeyboardInterrupt:
log.info("Shutting down...")
if process:
process.terminate()
process.wait()
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/cli/hot_reload.py",
"license": "Apache License 2.0",
"lines": 158,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:airflow-core/tests/unit/cli/test_hot_reload.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import os
import sys
from unittest import mock
import pytest
from airflow.cli import hot_reload
class TestHotReload:
"""Tests for hot reload utilities."""
@mock.patch("airflow.cli.hot_reload._run_reloader")
def test_run_with_reloader_missing_watchfiles(self, mock_run_reloader):
"""Test that run_with_reloader handles missing watchfiles by raising ImportError."""
# Simulate watchfiles not being available when _run_reloader tries to import it
mock_run_reloader.side_effect = ImportError("No module named 'watchfiles'")
# Clear the reloader PID env var to simulate being the main process
with mock.patch.dict(os.environ, {}, clear=True):
with pytest.raises(ImportError):
hot_reload.run_with_reloader(lambda: None)
@mock.patch("airflow.cli.hot_reload._run_reloader")
def test_run_with_reloader_main_process(self, mock_run_reloader):
"""Test run_with_reloader as the main process."""
# Clear the reloader PID env var to simulate being the main process
with mock.patch.dict(os.environ, {}, clear=True):
callback = mock.Mock()
hot_reload.run_with_reloader(callback)
# Should set the env var and call _run_reloader
assert "AIRFLOW_DEV_RELOADER_PID" in os.environ
mock_run_reloader.assert_called_once()
def test_run_with_reloader_child_process(self):
"""Test run_with_reloader as a child process."""
# Set the reloader PID env var to simulate being a child process
with mock.patch.dict(os.environ, {"AIRFLOW_DEV_RELOADER_PID": "12345"}):
callback = mock.Mock()
hot_reload.run_with_reloader(callback)
# Should just call the callback directly
callback.assert_called_once()
@mock.patch("subprocess.Popen")
@mock.patch("watchfiles.watch")
def test_run_reloader_starts_process(self, mock_watch, mock_popen):
"""Test that _run_reloader starts a subprocess."""
mock_process = mock.Mock()
mock_popen.return_value = mock_process
mock_watch.return_value = [] # Empty iterator, will exit immediately
watch_paths = ["/tmp/test"]
hot_reload._run_reloader(watch_paths)
# Should have started a process
mock_popen.assert_called_once()
assert mock_popen.call_args[0][0] == [sys.executable] + sys.argv
@mock.patch("airflow.cli.hot_reload._terminate_process_tree")
@mock.patch("subprocess.Popen")
@mock.patch("watchfiles.watch")
def test_run_reloader_restarts_on_changes(self, mock_watch, mock_popen, mock_terminate):
"""Test that _run_reloader restarts the process on file changes."""
mock_process = mock.Mock()
mock_popen.return_value = mock_process
# Simulate one file change and then exit
mock_watch.return_value = iter([[("change", "/tmp/test/file.py")]])
watch_paths = ["/tmp/test"]
hot_reload._run_reloader(watch_paths)
# Should have started process twice (initial + restart)
assert mock_popen.call_count == 2
# Should have terminated the first process
mock_terminate.assert_called()
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/tests/unit/cli/test_hot_reload.py",
"license": "Apache License 2.0",
"lines": 79,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/common/compat/tests/unit/common/compat/connection/test_connection.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import logging
from unittest import mock
import pytest
from airflow.models.connection import Connection
from airflow.providers.common.compat.connection import get_async_connection
class MockAgetBaseHook:
def __init__(*args, **kargs):
pass
async def aget_connection(self, conn_id: str):
return Connection(
conn_id="test_conn",
conn_type="http",
password="secret_token_aget",
)
class MockBaseHook:
def __init__(*args, **kargs):
pass
def get_connection(self, conn_id: str):
return Connection(
conn_id="test_conn_sync",
conn_type="http",
password="secret_token",
)
class TestGetAsyncConnection:
@mock.patch("airflow.providers.common.compat.connection.BaseHook", new_callable=MockAgetBaseHook)
@pytest.mark.asyncio
async def test_get_async_connection_with_aget(self, _, caplog):
with caplog.at_level(logging.DEBUG):
conn = await get_async_connection("test_conn")
assert conn.password == "secret_token_aget"
assert conn.conn_type == "http"
assert "Get connection using `BaseHook.aget_connection()." in caplog.text
@mock.patch("airflow.providers.common.compat.connection.BaseHook", new_callable=MockBaseHook)
@pytest.mark.asyncio
async def test_get_async_connection_with_get_connection(self, _, caplog):
with caplog.at_level(logging.DEBUG):
conn = await get_async_connection("test_conn")
assert conn.password == "secret_token"
assert conn.conn_type == "http"
assert "Get connection using `BaseHook.get_connection()." in caplog.text
| {
"repo_id": "apache/airflow",
"file_path": "providers/common/compat/tests/unit/common/compat/connection/test_connection.py",
"license": "Apache License 2.0",
"lines": 57,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:task-sdk/src/airflow/sdk/execution_time/sentry/configured.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Configured Sentry integration.
This module must only be imported conditionally since the Sentry SDK is NOT a
required dependency of the Airflow Task SDK. You shouldn't import this module
anyway, but use the parent ``airflow.sdk.execution_time.sentry`` path instead,
where things in this module are re-exported.
"""
from __future__ import annotations
import functools
import importlib
from typing import TYPE_CHECKING, Any
import sentry_sdk
import sentry_sdk.integrations.logging
import structlog
from airflow.sdk.execution_time.sentry.noop import NoopSentry
if TYPE_CHECKING:
from structlog.typing import FilteringBoundLogger as Logger
from airflow.sdk import Context
from airflow.sdk.execution_time.sentry.noop import Run, RunReturn
from airflow.sdk.execution_time.task_runner import RuntimeTaskInstance
from airflow.sdk.types import DagRunProtocol, RuntimeTaskInstanceProtocol
log = structlog.get_logger(logger_name=__name__)
class ConfiguredSentry(NoopSentry):
"""Configure Sentry SDK."""
SCOPE_DAG_RUN_TAGS = ("data_interval_start", "data_interval_end", "logical_date")
SCOPE_TASK_INSTANCE_TAGS = ("task_id", "dag_id", "try_number")
UNSUPPORTED_SENTRY_OPTIONS = frozenset(
(
"integrations",
"in_app_include",
"in_app_exclude",
"ignore_errors",
"before_breadcrumb",
)
)
def prepare_to_enrich_errors(self, executor_integration: str) -> None:
"""Initialize the Sentry SDK."""
from airflow.sdk.configuration import conf
sentry_sdk.integrations.logging.ignore_logger("airflow.task")
# LoggingIntegration is set by default.
integrations = []
if executor_integration:
try:
mod_p, cls_n = executor_integration.rsplit(".", 1)
integrations.append(getattr(importlib.import_module(mod_p), cls_n)())
except Exception:
log.exception("Invalid executor Sentry integration", import_path=executor_integration)
sentry_config_opts: dict[str, Any] = conf.getsection("sentry") or {}
if sentry_config_opts:
sentry_config_opts.pop("sentry_on")
old_way_dsn = sentry_config_opts.pop("sentry_dsn", None)
new_way_dsn = sentry_config_opts.pop("dsn", None)
# supported backward compatibility with old way dsn option
dsn = old_way_dsn or new_way_dsn
if unsupported_options := self.UNSUPPORTED_SENTRY_OPTIONS.intersection(sentry_config_opts):
log.warning(
"There are unsupported options in [sentry] section",
options=unsupported_options,
)
else:
dsn = None
if before_send := conf.getimport("sentry", "before_send", fallback=None):
sentry_config_opts["before_send"] = before_send
if transport := conf.getimport("sentry", "transport", fallback=None):
sentry_config_opts["transport"] = transport
if dsn:
sentry_sdk.init(dsn=dsn, integrations=integrations, **sentry_config_opts)
else:
# Setting up Sentry using environment variables.
log.debug("Defaulting to SENTRY_DSN in environment.")
sentry_sdk.init(integrations=integrations, **sentry_config_opts)
def add_tagging(self, dag_run: DagRunProtocol, task_instance: RuntimeTaskInstanceProtocol) -> None:
"""Add tagging for a task_instance."""
task = task_instance.task
with sentry_sdk.configure_scope() as scope:
for tag_name in self.SCOPE_TASK_INSTANCE_TAGS:
attribute = getattr(task_instance, tag_name)
scope.set_tag(tag_name, attribute)
for tag_name in self.SCOPE_DAG_RUN_TAGS:
attribute = getattr(dag_run, tag_name)
scope.set_tag(tag_name, attribute)
scope.set_tag("operator", task.__class__.__name__)
def add_breadcrumbs(self, task_instance: RuntimeTaskInstanceProtocol) -> None:
"""Add breadcrumbs inside of a task_instance."""
from airflow.sdk.execution_time.task_runner import RuntimeTaskInstance
breadcrumbs = RuntimeTaskInstance.get_task_breadcrumbs(
dag_id=task_instance.dag_id,
run_id=task_instance.run_id,
)
for breadcrumb in breadcrumbs:
sentry_sdk.add_breadcrumb(category="completed_tasks", data=breadcrumb, level="info")
def enrich_errors(self, run: Run) -> Run:
"""
Decorate errors.
Wrap :func:`airflow.sdk.execution_time.task_runner.run` to support task
specific tags and breadcrumbs.
"""
@functools.wraps(run)
def wrapped_run(ti: RuntimeTaskInstance, context: Context, log: Logger) -> RunReturn:
self.prepare_to_enrich_errors(ti.sentry_integration)
with sentry_sdk.push_scope():
try:
self.add_tagging(context["dag_run"], ti)
self.add_breadcrumbs(ti)
return run(ti, context, log)
except Exception as e:
sentry_sdk.capture_exception(e)
raise
return wrapped_run
def flush(self):
sentry_sdk.flush()
| {
"repo_id": "apache/airflow",
"file_path": "task-sdk/src/airflow/sdk/execution_time/sentry/configured.py",
"license": "Apache License 2.0",
"lines": 128,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:task-sdk/src/airflow/sdk/execution_time/sentry/noop.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING, Protocol
if TYPE_CHECKING:
from structlog.typing import FilteringBoundLogger as Logger
from airflow.sdk import Context
from airflow.sdk.execution_time.comms import ToSupervisor
from airflow.sdk.execution_time.task_runner import RuntimeTaskInstance
from airflow.sdk.types import DagRunProtocol, RuntimeTaskInstanceProtocol, TaskInstanceState
RunReturn = tuple[TaskInstanceState, ToSupervisor | None, BaseException | None]
class Run(Protocol):
def __call__(self, ti: RuntimeTaskInstance, context: Context, log: Logger) -> RunReturn: ...
class NoopSentry:
"""Blank class for Sentry."""
def add_tagging(self, dag_run: DagRunProtocol, task_instance: RuntimeTaskInstanceProtocol) -> None:
"""Blank function for tagging."""
def add_breadcrumbs(self, task_instance: RuntimeTaskInstanceProtocol) -> None:
"""Blank function for breadcrumbs."""
def enrich_errors(self, run: Run) -> Run:
"""Blank function for formatting a TaskInstance._run_raw_task."""
return run
def flush(self) -> None:
"""Blank function for flushing errors."""
| {
"repo_id": "apache/airflow",
"file_path": "task-sdk/src/airflow/sdk/execution_time/sentry/noop.py",
"license": "Apache License 2.0",
"lines": 39,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:task-sdk/tests/task_sdk/execution_time/test_sentry.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import datetime
import importlib
import sys
import types
from unittest import mock
import pytest
import uuid6
from airflow.providers.standard.operators.python import PythonOperator
from airflow.sdk._shared.module_loading import import_string
from airflow.sdk._shared.timezones import timezone
from airflow.sdk.api.datamodels._generated import DagRun, DagRunState, DagRunType, TaskInstanceState
from airflow.sdk.execution_time.comms import GetTaskBreadcrumbs, TaskBreadcrumbsResult
from airflow.sdk.execution_time.task_runner import RuntimeTaskInstance
from tests_common.test_utils.config import conf_vars
LOGICAL_DATE = timezone.utcnow()
SCHEDULE_INTERVAL = datetime.timedelta(days=1)
DATA_INTERVAL = (LOGICAL_DATE, LOGICAL_DATE + SCHEDULE_INTERVAL)
DAG_ID = "test_dag"
TASK_ID = "test_task"
RUN_ID = "test_run"
OPERATOR = "PythonOperator"
TRY_NUMBER = 0
STATE = TaskInstanceState.SUCCESS
TASK_DATA = {
"task_id": TASK_ID,
"state": STATE,
"operator": OPERATOR,
"duration": None,
}
def before_send(_):
pass
class CustomIntegration:
"""
Integration object to use in tests.
All instances of this class are equal to each other.
"""
def __hash__(self): # Implemented to satisfy Ruff.
return 0
def __eq__(self, other):
return type(self) is type(other)
class CustomTransport:
pass
def is_configured(obj):
from airflow.sdk.execution_time.sentry.configured import ConfiguredSentry
return isinstance(obj, ConfiguredSentry)
class TestSentryHook:
@pytest.fixture
def dag_run(self):
return DagRun.model_construct(
dag_id=DAG_ID,
run_id=RUN_ID,
logical_date=LOGICAL_DATE,
data_interval_start=DATA_INTERVAL[0],
data_interval_end=DATA_INTERVAL[1],
run_after=max(DATA_INTERVAL),
start_date=max(DATA_INTERVAL),
run_type=DagRunType.MANUAL,
state=DagRunState.RUNNING,
consumed_asset_events=[],
)
@pytest.fixture
def task_instance(self, dag_run):
ti_date = timezone.utcnow()
return RuntimeTaskInstance.model_construct(
id=uuid6.uuid7(),
task_id=TASK_ID,
dag_id=dag_run.dag_id,
run_id=dag_run.run_id,
try_number=TRY_NUMBER,
dag_version_id=uuid6.uuid7(),
task=PythonOperator(task_id=TASK_ID, python_callable=bool),
bundle_instance=mock.Mock(),
start_date=ti_date,
end_date=ti_date,
state=STATE,
)
@pytest.fixture(scope="class", autouse=True)
def mock_sentry_sdk(self):
sentry_sdk_integrations_logging = types.ModuleType("sentry_sdk.integrations.logging")
sentry_sdk_integrations_logging.ignore_logger = mock.MagicMock()
sentry_sdk = types.ModuleType("sentry_sdk")
sentry_sdk.init = mock.MagicMock()
sentry_sdk.integrations = mock.Mock(logging=sentry_sdk_integrations_logging)
sentry_sdk.configure_scope = mock.MagicMock()
sentry_sdk.add_breadcrumb = mock.MagicMock()
sys.modules["sentry_sdk"] = sentry_sdk
sys.modules["sentry_sdk.integrations.logging"] = sentry_sdk_integrations_logging
yield sentry_sdk
del sys.modules["sentry_sdk"]
del sys.modules["sentry_sdk.integrations.logging"]
@pytest.fixture(autouse=True)
def remove_mock_sentry_sdk(self, mock_sentry_sdk):
yield
mock_sentry_sdk.integrations.logging.ignore_logger.reset_mock()
mock_sentry_sdk.init.reset_mock()
mock_sentry_sdk.configure_scope.reset_mock()
mock_sentry_sdk.add_breadcrumb.reset_mock()
@pytest.fixture
def sentry(self, mock_sentry_sdk):
with conf_vars(
{
("sentry", "sentry_on"): "True",
("sentry", "default_integrations"): "False",
("sentry", "before_send"): "task_sdk.execution_time.test_sentry.before_send",
},
):
from airflow.sdk.execution_time import sentry
importlib.reload(sentry)
yield sentry.Sentry
importlib.reload(sentry)
@pytest.fixture
def sentry_custom_transport(self, mock_sentry_sdk):
with conf_vars(
{
("sentry", "sentry_on"): "True",
("sentry", "default_integrations"): "False",
("sentry", "transport"): "task_sdk.execution_time.test_sentry.CustomTransport",
},
):
from airflow.sdk.execution_time import sentry
importlib.reload(sentry)
yield sentry.Sentry
importlib.reload(sentry)
@pytest.fixture
def sentry_minimum(self, mock_sentry_sdk):
"""
Minimum sentry config
"""
with conf_vars({("sentry", "sentry_on"): "True"}):
from airflow.sdk.execution_time import sentry
importlib.reload(sentry)
yield sentry.Sentry
importlib.reload(sentry)
def test_prepare_to_enrich_errors(self, mock_sentry_sdk, sentry):
assert is_configured(sentry)
sentry.prepare_to_enrich_errors(executor_integration="")
assert mock_sentry_sdk.integrations.logging.ignore_logger.mock_calls == [mock.call("airflow.task")]
assert mock_sentry_sdk.init.mock_calls == [
mock.call(
integrations=[],
default_integrations=False,
before_send="task_sdk.execution_time.test_sentry.before_send",
),
]
def test_prepare_to_enrich_errors_with_executor_integration(self, mock_sentry_sdk, sentry):
assert is_configured(sentry)
executor_integration = "task_sdk.execution_time.test_sentry.CustomIntegration"
sentry.prepare_to_enrich_errors(executor_integration)
assert mock_sentry_sdk.integrations.logging.ignore_logger.mock_calls == [mock.call("airflow.task")]
assert mock_sentry_sdk.init.mock_calls == [
mock.call(
integrations=[import_string("task_sdk.execution_time.test_sentry.CustomIntegration")()],
default_integrations=False,
before_send="task_sdk.execution_time.test_sentry.before_send",
),
]
def test_add_tagging(self, mock_sentry_sdk, sentry, dag_run, task_instance):
"""
Test adding tags.
"""
sentry.add_tagging(dag_run=dag_run, task_instance=task_instance)
assert mock_sentry_sdk.configure_scope.mock_calls == [
mock.call.__call__(),
mock.call.__call__().__enter__(),
mock.call.__call__().__enter__().set_tag("task_id", TASK_ID),
mock.call.__call__().__enter__().set_tag("dag_id", DAG_ID),
mock.call.__call__().__enter__().set_tag("try_number", TRY_NUMBER),
mock.call.__call__().__enter__().set_tag("data_interval_start", DATA_INTERVAL[0]),
mock.call.__call__().__enter__().set_tag("data_interval_end", DATA_INTERVAL[1]),
mock.call.__call__().__enter__().set_tag("logical_date", LOGICAL_DATE),
mock.call.__call__().__enter__().set_tag("operator", OPERATOR),
mock.call.__call__().__exit__(None, None, None),
]
def test_add_breadcrumbs(self, mock_supervisor_comms, mock_sentry_sdk, sentry, task_instance):
"""
Test adding breadcrumbs.
"""
mock_supervisor_comms.send.return_value = TaskBreadcrumbsResult.model_construct(
breadcrumbs=[TASK_DATA],
)
sentry.add_breadcrumbs(task_instance=task_instance)
assert mock_sentry_sdk.add_breadcrumb.mock_calls == [
mock.call(category="completed_tasks", data=TASK_DATA, level="info"),
]
assert mock_supervisor_comms.send.mock_calls == [
mock.call(GetTaskBreadcrumbs(dag_id=DAG_ID, run_id=RUN_ID)),
]
def test_custom_transport(self, mock_sentry_sdk, sentry_custom_transport):
"""
Test transport gets passed to the sentry SDK
"""
assert is_configured(sentry_custom_transport)
sentry_custom_transport.prepare_to_enrich_errors(executor_integration="")
assert mock_sentry_sdk.integrations.logging.ignore_logger.mock_calls == [mock.call("airflow.task")]
assert mock_sentry_sdk.init.mock_calls == [
mock.call(
integrations=[],
default_integrations=False,
transport="task_sdk.execution_time.test_sentry.CustomTransport",
),
]
def test_minimum_config(self, mock_sentry_sdk, sentry_minimum):
"""
Test before_send doesn't raise an exception when not set
"""
assert is_configured(sentry_minimum)
sentry_minimum.prepare_to_enrich_errors(executor_integration="")
assert mock_sentry_sdk.integrations.logging.ignore_logger.mock_calls == [mock.call("airflow.task")]
assert mock_sentry_sdk.init.mock_calls == [mock.call(integrations=[])]
| {
"repo_id": "apache/airflow",
"file_path": "task-sdk/tests/task_sdk/execution_time/test_sentry.py",
"license": "Apache License 2.0",
"lines": 227,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-core/src/airflow/api_fastapi/common/db/task_instances.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from sqlalchemy.orm import joinedload
from sqlalchemy.orm.interfaces import LoaderOption
from airflow.models import Base
from airflow.models.dag_version import DagVersion
from airflow.models.dagrun import DagRun
from airflow.models.taskinstance import TaskInstance
def eager_load_TI_and_TIH_for_validation(orm_model: Base | None = None) -> tuple[LoaderOption, ...]:
"""Construct the eager loading options necessary for both TaskInstanceResponse and TaskInstanceHistoryResponse objects."""
if orm_model is None:
orm_model = TaskInstance
options: tuple[LoaderOption, ...] = (
joinedload(orm_model.dag_version).joinedload(DagVersion.bundle),
joinedload(orm_model.dag_run).options(joinedload(DagRun.dag_model)),
)
if orm_model is TaskInstance:
options += (joinedload(orm_model.task_instance_note),)
return options
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/api_fastapi/common/db/task_instances.py",
"license": "Apache License 2.0",
"lines": 34,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:helm-tests/tests/helm_tests/dagprocessor/test_labels_deployment.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import jmespath
from chart_utils.helm_template_generator import render_chart
class TestDagProcessorDeployment:
"""Tests dag-processor deployment labels."""
AIRFLOW_VERSION = "3.0.0"
TEMPLATE_FILE = "templates/dag-processor/dag-processor-deployment.yaml"
def test_should_add_global_labels(self):
"""Test adding only .Values.labels."""
docs = render_chart(
values={
"airflowVersion": self.AIRFLOW_VERSION,
"labels": {"test_global_label": "test_global_label_value"},
},
show_only=[self.TEMPLATE_FILE],
)
assert "test_global_label" in jmespath.search("spec.template.metadata.labels", docs[0])
assert (
jmespath.search("spec.template.metadata.labels", docs[0])["test_global_label"]
== "test_global_label_value"
)
def test_should_add_component_specific_labels(self):
"""Test adding only .Values.dagProcessor.labels."""
docs = render_chart(
values={
"airflowVersion": self.AIRFLOW_VERSION,
"dagProcessor": {
"labels": {"test_component_label": "test_component_label_value"},
},
},
show_only=[self.TEMPLATE_FILE],
)
assert "test_component_label" in jmespath.search("spec.template.metadata.labels", docs[0])
assert (
jmespath.search("spec.template.metadata.labels", docs[0])["test_component_label"]
== "test_component_label_value"
)
def test_should_merge_global_and_component_specific_labels(self):
"""Test adding both .Values.labels and .Values.dagProcessor.labels."""
docs = render_chart(
values={
"airflowVersion": self.AIRFLOW_VERSION,
"labels": {"test_global_label": "test_global_label_value"},
"dagProcessor": {
"labels": {"test_component_label": "test_component_label_value"},
},
},
show_only=[self.TEMPLATE_FILE],
)
assert "test_global_label" in jmespath.search("spec.template.metadata.labels", docs[0])
assert (
jmespath.search("spec.template.metadata.labels", docs[0])["test_global_label"]
== "test_global_label_value"
)
assert "test_component_label" in jmespath.search("spec.template.metadata.labels", docs[0])
assert (
jmespath.search("spec.template.metadata.labels", docs[0])["test_component_label"]
== "test_component_label_value"
)
def test_component_specific_labels_should_override_global_labels(self):
"""Test that component-specific labels take precedence over global labels with the same key."""
docs = render_chart(
values={
"airflowVersion": self.AIRFLOW_VERSION,
"labels": {"common_label": "global_value"},
"dagProcessor": {
"labels": {"common_label": "component_value"},
},
},
show_only=[self.TEMPLATE_FILE],
)
assert "common_label" in jmespath.search("spec.template.metadata.labels", docs[0])
assert jmespath.search("spec.template.metadata.labels", docs[0])["common_label"] == "component_value"
| {
"repo_id": "apache/airflow",
"file_path": "helm-tests/tests/helm_tests/dagprocessor/test_labels_deployment.py",
"license": "Apache License 2.0",
"lines": 89,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:helm-tests/tests/helm_tests/dagprocessor/test_labels_service_account.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import jmespath
from chart_utils.helm_template_generator import render_chart
class TestDagProcessorServiceAccount:
"""Tests dag-processor service account labels."""
AIRFLOW_VERSION = "3.0.0"
TEMPLATE_FILE = "templates/dag-processor/dag-processor-serviceaccount.yaml"
def test_should_add_global_labels(self):
"""Test adding only .Values.labels."""
docs = render_chart(
values={
"airflowVersion": self.AIRFLOW_VERSION,
"labels": {"test_global_label": "test_global_label_value"},
},
show_only=[self.TEMPLATE_FILE],
)
assert "test_global_label" in jmespath.search("metadata.labels", docs[0])
assert jmespath.search("metadata.labels", docs[0])["test_global_label"] == "test_global_label_value"
def test_should_add_component_specific_labels(self):
"""Test adding only .Values.dagProcessor.labels."""
docs = render_chart(
values={
"airflowVersion": self.AIRFLOW_VERSION,
"dagProcessor": {
"labels": {"test_component_label": "test_component_label_value"},
},
},
show_only=[self.TEMPLATE_FILE],
)
assert "test_component_label" in jmespath.search("metadata.labels", docs[0])
assert (
jmespath.search("metadata.labels", docs[0])["test_component_label"]
== "test_component_label_value"
)
def test_should_merge_global_and_component_specific_labels(self):
"""Test adding both .Values.labels and .Values.dagProcessor.labels."""
docs = render_chart(
values={
"airflowVersion": self.AIRFLOW_VERSION,
"labels": {"test_global_label": "test_global_label_value"},
"dagProcessor": {
"labels": {"test_component_label": "test_component_label_value"},
},
},
show_only=[self.TEMPLATE_FILE],
)
assert "test_global_label" in jmespath.search("metadata.labels", docs[0])
assert jmespath.search("metadata.labels", docs[0])["test_global_label"] == "test_global_label_value"
assert "test_component_label" in jmespath.search("metadata.labels", docs[0])
assert (
jmespath.search("metadata.labels", docs[0])["test_component_label"]
== "test_component_label_value"
)
def test_component_specific_labels_should_override_global_labels(self):
"""Test that component-specific labels take precedence over global labels with the same key."""
docs = render_chart(
values={
"airflowVersion": self.AIRFLOW_VERSION,
"labels": {"common_label": "global_value"},
"dagProcessor": {
"labels": {"common_label": "component_value"},
},
},
show_only=[self.TEMPLATE_FILE],
)
assert "common_label" in jmespath.search("metadata.labels", docs[0])
assert jmespath.search("metadata.labels", docs[0])["common_label"] == "component_value"
| {
"repo_id": "apache/airflow",
"file_path": "helm-tests/tests/helm_tests/dagprocessor/test_labels_service_account.py",
"license": "Apache License 2.0",
"lines": 83,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:helm-tests/tests/helm_tests/redis/test_labels_networkpolicy.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import jmespath
from chart_utils.helm_template_generator import render_chart
class TestRedisNetworkPolicy:
"""Tests redis network policy labels."""
AIRFLOW_EXECUTOR = "CeleryExecutor"
TEMPLATE_FILE = "templates/redis/redis-networkpolicy.yaml"
def test_should_add_global_labels(self):
"""Test adding only .Values.labels."""
docs = render_chart(
values={
"executor": self.AIRFLOW_EXECUTOR,
"redis": {"enabled": True},
"networkPolicies": {"enabled": True},
"labels": {"test_global_label": "test_global_label_value"},
},
show_only=[self.TEMPLATE_FILE],
)
assert "test_global_label" in jmespath.search("metadata.labels", docs[0])
assert jmespath.search("metadata.labels", docs[0])["test_global_label"] == "test_global_label_value"
def test_should_add_component_specific_labels(self):
"""Test adding only .Values.redis.labels."""
docs = render_chart(
values={
"executor": self.AIRFLOW_EXECUTOR,
"redis": {
"enabled": True,
"labels": {"test_component_label": "test_component_label_value"},
},
"networkPolicies": {"enabled": True},
},
show_only=[self.TEMPLATE_FILE],
)
assert "test_component_label" in jmespath.search("metadata.labels", docs[0])
assert (
jmespath.search("metadata.labels", docs[0])["test_component_label"]
== "test_component_label_value"
)
def test_should_merge_global_and_component_specific_labels(self):
"""Test adding both .Values.labels and .Values.redis.labels."""
docs = render_chart(
values={
"executor": self.AIRFLOW_EXECUTOR,
"redis": {
"enabled": True,
"labels": {"test_component_label": "test_component_label_value"},
},
"networkPolicies": {"enabled": True},
"labels": {"test_global_label": "test_global_label_value"},
},
show_only=[self.TEMPLATE_FILE],
)
assert "test_global_label" in jmespath.search("metadata.labels", docs[0])
assert jmespath.search("metadata.labels", docs[0])["test_global_label"] == "test_global_label_value"
assert "test_component_label" in jmespath.search("metadata.labels", docs[0])
assert (
jmespath.search("metadata.labels", docs[0])["test_component_label"]
== "test_component_label_value"
)
def test_component_specific_labels_should_override_global_labels(self):
"""Test that component-specific labels take precedence over global labels with the same key."""
docs = render_chart(
values={
"executor": self.AIRFLOW_EXECUTOR,
"redis": {
"enabled": True,
"labels": {"common_label": "component_value"},
},
"networkPolicies": {"enabled": True},
"labels": {"common_label": "global_value"},
},
show_only=[self.TEMPLATE_FILE],
)
assert "common_label" in jmespath.search("metadata.labels", docs[0])
assert jmespath.search("metadata.labels", docs[0])["common_label"] == "component_value"
| {
"repo_id": "apache/airflow",
"file_path": "helm-tests/tests/helm_tests/redis/test_labels_networkpolicy.py",
"license": "Apache License 2.0",
"lines": 91,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:helm-tests/tests/helm_tests/redis/test_labels_service.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import jmespath
from chart_utils.helm_template_generator import render_chart
class TestRedisService:
"""Tests redis service labels."""
AIRFLOW_EXECUTOR = "CeleryExecutor"
TEMPLATE_FILE = "templates/redis/redis-service.yaml"
def test_should_add_global_labels(self):
"""Test adding only .Values.labels."""
docs = render_chart(
values={
"executor": self.AIRFLOW_EXECUTOR,
"redis": {"enabled": True},
"labels": {"test_global_label": "test_global_label_value"},
},
show_only=[self.TEMPLATE_FILE],
)
assert "test_global_label" in jmespath.search("metadata.labels", docs[0])
assert jmespath.search("metadata.labels", docs[0])["test_global_label"] == "test_global_label_value"
def test_should_add_component_specific_labels(self):
"""Test adding only .Values.redis.labels."""
docs = render_chart(
values={
"executor": self.AIRFLOW_EXECUTOR,
"redis": {
"enabled": True,
"labels": {"test_component_label": "test_component_label_value"},
},
},
show_only=[self.TEMPLATE_FILE],
)
assert "test_component_label" in jmespath.search("metadata.labels", docs[0])
assert (
jmespath.search("metadata.labels", docs[0])["test_component_label"]
== "test_component_label_value"
)
def test_should_merge_global_and_component_specific_labels(self):
"""Test adding both .Values.labels and .Values.redis.labels."""
docs = render_chart(
values={
"executor": self.AIRFLOW_EXECUTOR,
"redis": {
"enabled": True,
"labels": {"test_component_label": "test_component_label_value"},
},
"labels": {"test_global_label": "test_global_label_value"},
},
show_only=[self.TEMPLATE_FILE],
)
assert "test_global_label" in jmespath.search("metadata.labels", docs[0])
assert jmespath.search("metadata.labels", docs[0])["test_global_label"] == "test_global_label_value"
assert "test_component_label" in jmespath.search("metadata.labels", docs[0])
assert (
jmespath.search("metadata.labels", docs[0])["test_component_label"]
== "test_component_label_value"
)
def test_component_specific_labels_should_override_global_labels(self):
"""Test that component-specific labels take precedence over global labels with the same key."""
docs = render_chart(
values={
"executor": self.AIRFLOW_EXECUTOR,
"redis": {
"enabled": True,
"labels": {"common_label": "component_value"},
},
"labels": {"common_label": "global_value"},
},
show_only=[self.TEMPLATE_FILE],
)
assert "common_label" in jmespath.search("metadata.labels", docs[0])
assert jmespath.search("metadata.labels", docs[0])["common_label"] == "component_value"
| {
"repo_id": "apache/airflow",
"file_path": "helm-tests/tests/helm_tests/redis/test_labels_service.py",
"license": "Apache License 2.0",
"lines": 87,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:helm-tests/tests/helm_tests/redis/test_labels_serviceaccount.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import jmespath
from chart_utils.helm_template_generator import render_chart
class TestRedisServiceAccount:
"""Tests redis service account labels."""
AIRFLOW_EXECUTOR = "CeleryExecutor"
TEMPLATE_FILE = "templates/redis/redis-serviceaccount.yaml"
def test_should_add_global_labels(self):
"""Test adding only .Values.labels."""
docs = render_chart(
values={
"executor": self.AIRFLOW_EXECUTOR,
"redis": {
"enabled": True,
"serviceAccount": {"create": True},
},
"labels": {"test_global_label": "test_global_label_value"},
},
show_only=[self.TEMPLATE_FILE],
)
assert "test_global_label" in jmespath.search("metadata.labels", docs[0])
assert jmespath.search("metadata.labels", docs[0])["test_global_label"] == "test_global_label_value"
def test_should_add_component_specific_labels(self):
"""Test adding only .Values.redis.labels."""
docs = render_chart(
values={
"executor": self.AIRFLOW_EXECUTOR,
"redis": {
"enabled": True,
"serviceAccount": {"create": True},
"labels": {"test_component_label": "test_component_label_value"},
},
},
show_only=[self.TEMPLATE_FILE],
)
assert "test_component_label" in jmespath.search("metadata.labels", docs[0])
assert (
jmespath.search("metadata.labels", docs[0])["test_component_label"]
== "test_component_label_value"
)
def test_should_merge_global_and_component_specific_labels(self):
"""Test adding both .Values.labels and .Values.redis.labels."""
docs = render_chart(
values={
"executor": self.AIRFLOW_EXECUTOR,
"redis": {
"enabled": True,
"serviceAccount": {"create": True},
"labels": {"test_component_label": "test_component_label_value"},
},
"labels": {"test_global_label": "test_global_label_value"},
},
show_only=[self.TEMPLATE_FILE],
)
assert "test_global_label" in jmespath.search("metadata.labels", docs[0])
assert jmespath.search("metadata.labels", docs[0])["test_global_label"] == "test_global_label_value"
assert "test_component_label" in jmespath.search("metadata.labels", docs[0])
assert (
jmespath.search("metadata.labels", docs[0])["test_component_label"]
== "test_component_label_value"
)
def test_component_specific_labels_should_override_global_labels(self):
"""Test that component-specific labels take precedence over global labels with the same key."""
docs = render_chart(
values={
"executor": self.AIRFLOW_EXECUTOR,
"redis": {
"enabled": True,
"serviceAccount": {"create": True},
"labels": {"common_label": "component_value"},
},
"labels": {"common_label": "global_value"},
},
show_only=[self.TEMPLATE_FILE],
)
assert "common_label" in jmespath.search("metadata.labels", docs[0])
assert jmespath.search("metadata.labels", docs[0])["common_label"] == "component_value"
| {
"repo_id": "apache/airflow",
"file_path": "helm-tests/tests/helm_tests/redis/test_labels_serviceaccount.py",
"license": "Apache License 2.0",
"lines": 93,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:helm-tests/tests/helm_tests/redis/test_labels_statefulset.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import jmespath
from chart_utils.helm_template_generator import render_chart
class TestRedisStatefulSet:
"""Tests redis statefulset labels."""
AIRFLOW_EXECUTOR = "CeleryExecutor"
TEMPLATE_FILE = "templates/redis/redis-statefulset.yaml"
def test_should_add_global_labels_to_metadata(self):
"""Test adding only .Values.labels to metadata.labels."""
docs = render_chart(
values={
"executor": self.AIRFLOW_EXECUTOR,
"redis": {"enabled": True},
"labels": {"test_global_label": "test_global_label_value"},
},
show_only=[self.TEMPLATE_FILE],
)
assert "test_global_label" in jmespath.search("metadata.labels", docs[0])
assert jmespath.search("metadata.labels", docs[0])["test_global_label"] == "test_global_label_value"
def test_should_add_global_labels_to_pod_template(self):
"""Test adding only .Values.labels to spec.template.metadata.labels."""
docs = render_chart(
values={
"executor": self.AIRFLOW_EXECUTOR,
"redis": {"enabled": True},
"labels": {"test_global_label": "test_global_label_value"},
},
show_only=[self.TEMPLATE_FILE],
)
assert "test_global_label" in jmespath.search("spec.template.metadata.labels", docs[0])
assert (
jmespath.search("spec.template.metadata.labels", docs[0])["test_global_label"]
== "test_global_label_value"
)
def test_should_add_component_specific_labels_to_pod_template(self):
"""Test adding only .Values.redis.labels to spec.template.metadata.labels."""
docs = render_chart(
values={
"executor": self.AIRFLOW_EXECUTOR,
"redis": {
"enabled": True,
"labels": {"test_component_label": "test_component_label_value"},
},
},
show_only=[self.TEMPLATE_FILE],
)
assert "test_component_label" in jmespath.search("spec.template.metadata.labels", docs[0])
assert (
jmespath.search("spec.template.metadata.labels", docs[0])["test_component_label"]
== "test_component_label_value"
)
def test_should_merge_global_and_component_specific_labels_in_pod_template(self):
"""Test adding both .Values.labels and .Values.redis.labels to spec.template.metadata.labels."""
docs = render_chart(
values={
"executor": self.AIRFLOW_EXECUTOR,
"redis": {
"enabled": True,
"labels": {"test_component_label": "test_component_label_value"},
},
"labels": {"test_global_label": "test_global_label_value"},
},
show_only=[self.TEMPLATE_FILE],
)
assert "test_global_label" in jmespath.search("spec.template.metadata.labels", docs[0])
assert (
jmespath.search("spec.template.metadata.labels", docs[0])["test_global_label"]
== "test_global_label_value"
)
assert "test_component_label" in jmespath.search("spec.template.metadata.labels", docs[0])
assert (
jmespath.search("spec.template.metadata.labels", docs[0])["test_component_label"]
== "test_component_label_value"
)
def test_component_specific_labels_should_override_global_labels(self):
"""Test that component-specific labels take precedence over global labels with the same key."""
docs = render_chart(
values={
"executor": self.AIRFLOW_EXECUTOR,
"redis": {
"enabled": True,
"labels": {"common_label": "component_value"},
},
"labels": {"common_label": "global_value"},
},
show_only=[self.TEMPLATE_FILE],
)
assert "common_label" in jmespath.search("spec.template.metadata.labels", docs[0])
assert jmespath.search("spec.template.metadata.labels", docs[0])["common_label"] == "component_value"
| {
"repo_id": "apache/airflow",
"file_path": "helm-tests/tests/helm_tests/redis/test_labels_statefulset.py",
"license": "Apache License 2.0",
"lines": 105,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:helm-tests/tests/helm_tests/statsd/test_labels_deployment.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import jmespath
from chart_utils.helm_template_generator import render_chart
class TestStatsdDeployment:
"""Tests statsd deployment labels."""
TEMPLATE_FILE = "templates/statsd/statsd-deployment.yaml"
def test_should_add_global_labels_to_metadata(self):
"""Test adding only .Values.labels to metadata.labels."""
docs = render_chart(
values={
"statsd": {"enabled": True},
"labels": {"test_global_label": "test_global_label_value"},
},
show_only=[self.TEMPLATE_FILE],
)
assert "test_global_label" in jmespath.search("metadata.labels", docs[0])
assert jmespath.search("metadata.labels", docs[0])["test_global_label"] == "test_global_label_value"
def test_should_add_global_labels_to_pod_template(self):
"""Test adding only .Values.labels to spec.template.metadata.labels."""
docs = render_chart(
values={
"statsd": {"enabled": True},
"labels": {"test_global_label": "test_global_label_value"},
},
show_only=[self.TEMPLATE_FILE],
)
assert "test_global_label" in jmespath.search("spec.template.metadata.labels", docs[0])
assert (
jmespath.search("spec.template.metadata.labels", docs[0])["test_global_label"]
== "test_global_label_value"
)
def test_should_add_component_specific_labels_to_pod_template(self):
"""Test adding only .Values.statsd.labels to spec.template.metadata.labels."""
docs = render_chart(
values={
"statsd": {
"enabled": True,
"labels": {"test_component_label": "test_component_label_value"},
},
},
show_only=[self.TEMPLATE_FILE],
)
assert "test_component_label" in jmespath.search("spec.template.metadata.labels", docs[0])
assert (
jmespath.search("spec.template.metadata.labels", docs[0])["test_component_label"]
== "test_component_label_value"
)
def test_should_merge_global_and_component_specific_labels_in_pod_template(self):
"""Test adding both .Values.labels and .Values.statsd.labels to spec.template.metadata.labels."""
docs = render_chart(
values={
"statsd": {
"enabled": True,
"labels": {"test_component_label": "test_component_label_value"},
},
"labels": {"test_global_label": "test_global_label_value"},
},
show_only=[self.TEMPLATE_FILE],
)
assert "test_global_label" in jmespath.search("spec.template.metadata.labels", docs[0])
assert (
jmespath.search("spec.template.metadata.labels", docs[0])["test_global_label"]
== "test_global_label_value"
)
assert "test_component_label" in jmespath.search("spec.template.metadata.labels", docs[0])
assert (
jmespath.search("spec.template.metadata.labels", docs[0])["test_component_label"]
== "test_component_label_value"
)
def test_component_specific_labels_should_override_global_labels(self):
"""Test that component-specific labels take precedence over global labels with the same key."""
docs = render_chart(
values={
"statsd": {
"enabled": True,
"labels": {"common_label": "component_value"},
},
"labels": {"common_label": "global_value"},
},
show_only=[self.TEMPLATE_FILE],
)
assert "common_label" in jmespath.search("spec.template.metadata.labels", docs[0])
assert jmespath.search("spec.template.metadata.labels", docs[0])["common_label"] == "component_value"
| {
"repo_id": "apache/airflow",
"file_path": "helm-tests/tests/helm_tests/statsd/test_labels_deployment.py",
"license": "Apache License 2.0",
"lines": 99,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:helm-tests/tests/helm_tests/statsd/test_labels_ingress.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import jmespath
from chart_utils.helm_template_generator import render_chart
class TestStatsdIngress:
"""Tests statsd ingress labels."""
TEMPLATE_FILE = "templates/statsd/statsd-ingress.yaml"
def test_should_add_global_labels(self):
"""Test adding only .Values.labels."""
docs = render_chart(
values={
"statsd": {"enabled": True},
"ingress": {"statsd": {"enabled": True}},
"labels": {"test_global_label": "test_global_label_value"},
},
show_only=[self.TEMPLATE_FILE],
)
assert "test_global_label" in jmespath.search("metadata.labels", docs[0])
assert jmespath.search("metadata.labels", docs[0])["test_global_label"] == "test_global_label_value"
def test_should_add_component_specific_labels(self):
"""Test adding only .Values.statsd.labels."""
docs = render_chart(
values={
"statsd": {
"enabled": True,
"labels": {"test_component_label": "test_component_label_value"},
},
"ingress": {"statsd": {"enabled": True}},
},
show_only=[self.TEMPLATE_FILE],
)
assert "test_component_label" in jmespath.search("metadata.labels", docs[0])
assert (
jmespath.search("metadata.labels", docs[0])["test_component_label"]
== "test_component_label_value"
)
def test_should_merge_global_and_component_specific_labels(self):
"""Test adding both .Values.labels and .Values.statsd.labels."""
docs = render_chart(
values={
"statsd": {
"enabled": True,
"labels": {"test_component_label": "test_component_label_value"},
},
"ingress": {"statsd": {"enabled": True}},
"labels": {"test_global_label": "test_global_label_value"},
},
show_only=[self.TEMPLATE_FILE],
)
assert "test_global_label" in jmespath.search("metadata.labels", docs[0])
assert jmespath.search("metadata.labels", docs[0])["test_global_label"] == "test_global_label_value"
assert "test_component_label" in jmespath.search("metadata.labels", docs[0])
assert (
jmespath.search("metadata.labels", docs[0])["test_component_label"]
== "test_component_label_value"
)
def test_component_specific_labels_should_override_global_labels(self):
"""Test that component-specific labels take precedence over global labels with the same key."""
docs = render_chart(
values={
"statsd": {
"enabled": True,
"labels": {"common_label": "component_value"},
},
"ingress": {"statsd": {"enabled": True}},
"labels": {"common_label": "global_value"},
},
show_only=[self.TEMPLATE_FILE],
)
assert "common_label" in jmespath.search("metadata.labels", docs[0])
assert jmespath.search("metadata.labels", docs[0])["common_label"] == "component_value"
| {
"repo_id": "apache/airflow",
"file_path": "helm-tests/tests/helm_tests/statsd/test_labels_ingress.py",
"license": "Apache License 2.0",
"lines": 86,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:helm-tests/tests/helm_tests/statsd/test_labels_networkpolicy.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import jmespath
from chart_utils.helm_template_generator import render_chart
class TestStatsdNetworkPolicy:
"""Tests statsd network policy labels."""
TEMPLATE_FILE = "templates/statsd/statsd-networkpolicy.yaml"
def test_should_add_global_labels(self):
"""Test adding only .Values.labels."""
docs = render_chart(
values={
"statsd": {"enabled": True},
"networkPolicies": {"enabled": True},
"labels": {"test_global_label": "test_global_label_value"},
},
show_only=[self.TEMPLATE_FILE],
)
assert "test_global_label" in jmespath.search("metadata.labels", docs[0])
assert jmespath.search("metadata.labels", docs[0])["test_global_label"] == "test_global_label_value"
def test_should_add_component_specific_labels(self):
"""Test adding only .Values.statsd.labels."""
docs = render_chart(
values={
"statsd": {
"enabled": True,
"labels": {"test_component_label": "test_component_label_value"},
},
"networkPolicies": {"enabled": True},
},
show_only=[self.TEMPLATE_FILE],
)
assert "test_component_label" in jmespath.search("metadata.labels", docs[0])
assert (
jmespath.search("metadata.labels", docs[0])["test_component_label"]
== "test_component_label_value"
)
def test_should_merge_global_and_component_specific_labels(self):
"""Test adding both .Values.labels and .Values.statsd.labels."""
docs = render_chart(
values={
"statsd": {
"enabled": True,
"labels": {"test_component_label": "test_component_label_value"},
},
"networkPolicies": {"enabled": True},
"labels": {"test_global_label": "test_global_label_value"},
},
show_only=[self.TEMPLATE_FILE],
)
assert "test_global_label" in jmespath.search("metadata.labels", docs[0])
assert jmespath.search("metadata.labels", docs[0])["test_global_label"] == "test_global_label_value"
assert "test_component_label" in jmespath.search("metadata.labels", docs[0])
assert (
jmespath.search("metadata.labels", docs[0])["test_component_label"]
== "test_component_label_value"
)
def test_component_specific_labels_should_override_global_labels(self):
"""Test that component-specific labels take precedence over global labels with the same key."""
docs = render_chart(
values={
"statsd": {
"enabled": True,
"labels": {"common_label": "component_value"},
},
"networkPolicies": {"enabled": True},
"labels": {"common_label": "global_value"},
},
show_only=[self.TEMPLATE_FILE],
)
assert "common_label" in jmespath.search("metadata.labels", docs[0])
assert jmespath.search("metadata.labels", docs[0])["common_label"] == "component_value"
| {
"repo_id": "apache/airflow",
"file_path": "helm-tests/tests/helm_tests/statsd/test_labels_networkpolicy.py",
"license": "Apache License 2.0",
"lines": 86,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:helm-tests/tests/helm_tests/statsd/test_labels_service.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import jmespath
from chart_utils.helm_template_generator import render_chart
class TestStatsdService:
"""Tests statsd service labels."""
TEMPLATE_FILE = "templates/statsd/statsd-service.yaml"
def test_should_add_global_labels(self):
"""Test adding only .Values.labels."""
docs = render_chart(
values={
"statsd": {"enabled": True},
"labels": {"test_global_label": "test_global_label_value"},
},
show_only=[self.TEMPLATE_FILE],
)
assert "test_global_label" in jmespath.search("metadata.labels", docs[0])
assert jmespath.search("metadata.labels", docs[0])["test_global_label"] == "test_global_label_value"
def test_should_add_component_specific_labels(self):
"""Test adding only .Values.statsd.labels."""
docs = render_chart(
values={
"statsd": {
"enabled": True,
"labels": {"test_component_label": "test_component_label_value"},
},
},
show_only=[self.TEMPLATE_FILE],
)
assert "test_component_label" in jmespath.search("metadata.labels", docs[0])
assert (
jmespath.search("metadata.labels", docs[0])["test_component_label"]
== "test_component_label_value"
)
def test_should_merge_global_and_component_specific_labels(self):
"""Test adding both .Values.labels and .Values.statsd.labels."""
docs = render_chart(
values={
"statsd": {
"enabled": True,
"labels": {"test_component_label": "test_component_label_value"},
},
"labels": {"test_global_label": "test_global_label_value"},
},
show_only=[self.TEMPLATE_FILE],
)
assert "test_global_label" in jmespath.search("metadata.labels", docs[0])
assert jmespath.search("metadata.labels", docs[0])["test_global_label"] == "test_global_label_value"
assert "test_component_label" in jmespath.search("metadata.labels", docs[0])
assert (
jmespath.search("metadata.labels", docs[0])["test_component_label"]
== "test_component_label_value"
)
def test_component_specific_labels_should_override_global_labels(self):
"""Test that component-specific labels take precedence over global labels with the same key."""
docs = render_chart(
values={
"statsd": {
"enabled": True,
"labels": {"common_label": "component_value"},
},
"labels": {"common_label": "global_value"},
},
show_only=[self.TEMPLATE_FILE],
)
assert "common_label" in jmespath.search("metadata.labels", docs[0])
assert jmespath.search("metadata.labels", docs[0])["common_label"] == "component_value"
| {
"repo_id": "apache/airflow",
"file_path": "helm-tests/tests/helm_tests/statsd/test_labels_service.py",
"license": "Apache License 2.0",
"lines": 82,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/fab/src/airflow/providers/fab/auth_manager/api_fastapi/parameters.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import logging
from fastapi import Query
from airflow.configuration import conf
log = logging.getLogger(__name__)
def get_effective_limit(default: int = 100):
"""
Return a FastAPI dependency that enforces API page limit rules.
:param default: Default limit if not provided by client.
"""
def _limit(
limit: int = Query(
default,
ge=0,
),
) -> int:
max_val = conf.getint("api", "maximum_page_limit")
fallback = conf.getint("api", "fallback_page_limit")
if limit == 0:
return fallback
if limit > max_val:
log.warning(
"The limit param value %s passed in API exceeds the configured maximum page limit %s",
limit,
max_val,
)
return max_val
return limit
return _limit
| {
"repo_id": "apache/airflow",
"file_path": "providers/fab/src/airflow/providers/fab/auth_manager/api_fastapi/parameters.py",
"license": "Apache License 2.0",
"lines": 45,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/fab/src/airflow/providers/fab/auth_manager/api_fastapi/sorting.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from collections.abc import Mapping
from typing import TYPE_CHECKING, Any
from fastapi import HTTPException, status
from sqlalchemy import asc, desc
if TYPE_CHECKING:
from sqlalchemy.orm import InstrumentedAttribute
from sqlalchemy.sql.elements import ColumnElement
def build_ordering(
order_by: str, *, allowed: Mapping[str, ColumnElement[Any]] | Mapping[str, InstrumentedAttribute[Any]]
) -> ColumnElement[Any]:
"""
Build an SQLAlchemy ORDER BY expression from the `order_by` parameter.
:param order_by: Public field name, optionally prefixed with "-" for descending.
:param allowed: Map of public field to SQLAlchemy column/expression.
"""
is_desc = order_by.startswith("-")
key = order_by.lstrip("-")
if key not in allowed:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=f"Ordering with '{order_by}' is disallowed or the attribute does not exist on the model",
)
col = allowed[key]
return desc(col) if is_desc else asc(col)
| {
"repo_id": "apache/airflow",
"file_path": "providers/fab/src/airflow/providers/fab/auth_manager/api_fastapi/sorting.py",
"license": "Apache License 2.0",
"lines": 41,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/fab/tests/unit/fab/auth_manager/api_fastapi/test_parameters.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import logging
from typing import TYPE_CHECKING
from unittest.mock import patch
if TYPE_CHECKING:
import pytest
from airflow.providers.fab.auth_manager.api_fastapi.parameters import get_effective_limit
class TestParametersDependency:
@patch("airflow.providers.fab.auth_manager.api_fastapi.parameters.conf")
def test_get_effective_limit_uses_fallback_when_zero(self, conf_mock):
conf_mock.getint.side_effect = lambda section, option: {
"maximum_page_limit": 100,
"fallback_page_limit": 25,
}[option]
limit_dep = get_effective_limit()
assert limit_dep(limit=0) == 25
conf_mock.getint.assert_any_call("api", "maximum_page_limit")
conf_mock.getint.assert_any_call("api", "fallback_page_limit")
@patch("airflow.providers.fab.auth_manager.api_fastapi.parameters.conf")
def test_get_effective_limit_clamps_to_max_and_logs_warning(
self, conf_mock, caplog: pytest.LogCaptureFixture
):
conf_mock.getint.side_effect = lambda section, option: {
"maximum_page_limit": 50,
"fallback_page_limit": 20,
}[option]
limit_dep = get_effective_limit()
caplog.set_level(logging.WARNING, logger="airflow.providers.fab.auth_manager.api_fastapi.parameters")
result = limit_dep(limit=1000)
assert result == 50
assert any("exceeds the configured maximum page limit" in rec.getMessage() for rec in caplog.records)
@patch("airflow.providers.fab.auth_manager.api_fastapi.parameters.conf")
def test_get_effective_limit_returns_given_value_when_within_range(self, conf_mock):
conf_mock.getint.side_effect = lambda section, option: {
"maximum_page_limit": 200,
"fallback_page_limit": 10,
}[option]
limit_dep = get_effective_limit()
assert limit_dep(limit=150) == 150
| {
"repo_id": "apache/airflow",
"file_path": "providers/fab/tests/unit/fab/auth_manager/api_fastapi/test_parameters.py",
"license": "Apache License 2.0",
"lines": 55,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/fab/tests/unit/fab/auth_manager/api_fastapi/test_sorting.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import pytest
from fastapi import HTTPException
from sqlalchemy import column, func
from sqlalchemy.dialects import postgresql
from sqlalchemy.sql.elements import ColumnElement
from airflow.providers.fab.auth_manager.api_fastapi.sorting import build_ordering
def _sql(expr) -> str:
return str(expr.compile(dialect=postgresql.dialect()))
def test_build_ordering_returns_asc_by_default():
allowed = {"name": column("name"), "id": column("id")}
expr = build_ordering("name", allowed=allowed)
assert isinstance(expr, ColumnElement)
sql = _sql(expr)
assert "name" in sql
assert "ASC" in sql
assert "DESC" not in sql
def test_build_ordering_desc_when_prefixed_with_dash():
allowed = {"name": column("name"), "id": column("id")}
expr = build_ordering("-id", allowed=allowed)
sql = _sql(expr)
assert "id" in sql
assert "DESC" in sql
def test_build_ordering_supports_sql_expressions():
# Ensure mapping can include arbitrary SQL expressions (e.g., lower(name))
allowed = {"name_i": func.lower(column("name"))}
expr = build_ordering("-name_i", allowed=allowed)
sql = _sql(expr)
assert "lower(" in sql
assert "DESC" in sql
def test_build_ordering_raises_http_400_for_disallowed_key():
allowed = {"name": column("name")}
with pytest.raises(HTTPException) as ex:
build_ordering("unknown", allowed=allowed)
assert ex.value.status_code == 400
assert "disallowed" in str(ex.value.detail) or "does not exist" in str(ex.value.detail)
| {
"repo_id": "apache/airflow",
"file_path": "providers/fab/tests/unit/fab/auth_manager/api_fastapi/test_sorting.py",
"license": "Apache License 2.0",
"lines": 52,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-ctl/src/airflowctl/ctl/commands/dag_command.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import sys
from typing import Literal
import rich
from airflowctl.api.client import NEW_API_CLIENT, ClientKind, ServerResponseError, provide_api_client
from airflowctl.api.datamodels.generated import DAGPatchBody
from airflowctl.ctl.console_formatting import AirflowConsole
def update_dag_state(
dag_id: str,
operation: Literal["pause", "unpause"],
api_client,
output: str,
):
"""Update DAG state (pause/unpause)."""
try:
response = api_client.dags.update(
dag_id=dag_id, dag_body=DAGPatchBody(is_paused=operation == "pause")
)
except ServerResponseError as e:
rich.print(f"[red]Error while trying to {operation} Dag {dag_id}: {e}[/red]")
sys.exit(1)
response_dict = response.model_dump()
rich.print(f"[green]Dag {operation} successful {dag_id}[/green]")
rich.print("[green]Further Dag details:[/green]")
AirflowConsole().print_as(
data=[response_dict],
output=output,
)
return response_dict
@provide_api_client(kind=ClientKind.CLI)
def pause(args, api_client=NEW_API_CLIENT) -> None:
"""Pause a DAG."""
return update_dag_state(
dag_id=args.dag_id,
operation="pause",
api_client=api_client,
output=args.output,
)
@provide_api_client(kind=ClientKind.CLI)
def unpause(args, api_client=NEW_API_CLIENT) -> None:
"""Unpause a DAG."""
return update_dag_state(
dag_id=args.dag_id,
operation="unpause",
api_client=api_client,
output=args.output,
)
| {
"repo_id": "apache/airflow",
"file_path": "airflow-ctl/src/airflowctl/ctl/commands/dag_command.py",
"license": "Apache License 2.0",
"lines": 63,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:airflow-ctl/tests/airflow_ctl/ctl/commands/test_dag_command.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import datetime
import pytest
from airflowctl.api.client import ClientKind
from airflowctl.api.datamodels.generated import DAGResponse
from airflowctl.ctl import cli_parser
from airflowctl.ctl.commands import dag_command
class TestDagCommands:
parser = cli_parser.get_parser()
dag_id = "test_dag"
dag_display_name = "dag_display_name"
dag_response_paused = DAGResponse(
dag_id=dag_id,
dag_display_name=dag_display_name,
is_paused=False,
last_parsed_time=datetime.datetime(2024, 12, 31, 23, 59, 59),
last_expired=datetime.datetime(2025, 1, 1, 0, 0, 0),
fileloc="fileloc",
relative_fileloc="relative_fileloc",
description="description",
timetable_summary="timetable_summary",
timetable_description="timetable_description",
tags=[],
max_active_tasks=1,
max_active_runs=1,
max_consecutive_failed_dag_runs=1,
has_task_concurrency_limits=True,
has_import_errors=True,
next_dagrun_logical_date=datetime.datetime(2025, 1, 1, 0, 0, 0),
next_dagrun_data_interval_start=datetime.datetime(2025, 1, 1, 0, 0, 0),
next_dagrun_data_interval_end=datetime.datetime(2025, 1, 1, 0, 0, 0),
next_dagrun_run_after=datetime.datetime(2025, 1, 1, 0, 0, 0),
owners=["apache-airflow"],
file_token="file_token",
bundle_name="bundle_name",
is_stale=False,
)
dag_response_unpaused = DAGResponse(
dag_id=dag_id,
dag_display_name=dag_display_name,
is_paused=True,
last_parsed_time=datetime.datetime(2024, 12, 31, 23, 59, 59),
last_expired=datetime.datetime(2025, 1, 1, 0, 0, 0),
fileloc="fileloc",
relative_fileloc="relative_fileloc",
description="description",
timetable_summary="timetable_summary",
timetable_description="timetable_description",
tags=[],
max_active_tasks=1,
max_active_runs=1,
max_consecutive_failed_dag_runs=1,
has_task_concurrency_limits=True,
has_import_errors=True,
next_dagrun_logical_date=datetime.datetime(2025, 1, 1, 0, 0, 0),
next_dagrun_data_interval_start=datetime.datetime(2025, 1, 1, 0, 0, 0),
next_dagrun_data_interval_end=datetime.datetime(2025, 1, 1, 0, 0, 0),
next_dagrun_run_after=datetime.datetime(2025, 1, 1, 0, 0, 0),
owners=["apache-airflow"],
file_token="file_token",
bundle_name="bundle_name",
is_stale=False,
)
def test_pause_dag(self, api_client_maker, monkeypatch):
api_client = api_client_maker(
path=f"/api/v2/dags/{self.dag_id}",
response_json=self.dag_response_paused.model_dump(mode="json"),
expected_http_status_code=200,
kind=ClientKind.CLI,
)
assert self.dag_response_paused.is_paused is False
dag_response_dict = dag_command.pause(
self.parser.parse_args(["dags", "pause", self.dag_id]),
api_client=api_client,
)
assert dag_response_dict["is_paused"] is False
def test_pause_fail(self, api_client_maker, monkeypatch):
api_client = api_client_maker(
path=f"/api/v2/dags/{self.dag_id}",
response_json={"detail": "DAG not found"},
expected_http_status_code=404,
kind=ClientKind.CLI,
)
with pytest.raises(SystemExit):
dag_command.pause(
self.parser.parse_args(["dags", "pause", self.dag_id]),
api_client=api_client,
)
def test_unpause_dag(self, api_client_maker, monkeypatch):
api_client = api_client_maker(
path=f"/api/v2/dags/{self.dag_id}",
response_json=self.dag_response_unpaused.model_dump(mode="json"),
expected_http_status_code=200,
kind=ClientKind.CLI,
)
assert self.dag_response_unpaused.is_paused is True
dag_response_dict = dag_command.unpause(
self.parser.parse_args(["dags", "unpause", self.dag_id]),
api_client=api_client,
)
assert dag_response_dict["is_paused"] is True
def test_unpause_fail(self, api_client_maker, monkeypatch):
api_client = api_client_maker(
path=f"/api/v2/dags/{self.dag_id}",
response_json={"detail": "DAG not found"},
expected_http_status_code=404,
kind=ClientKind.CLI,
)
with pytest.raises(SystemExit):
dag_command.unpause(
self.parser.parse_args(["dags", "unpause", self.dag_id]),
api_client=api_client,
)
| {
"repo_id": "apache/airflow",
"file_path": "airflow-ctl/tests/airflow_ctl/ctl/commands/test_dag_command.py",
"license": "Apache License 2.0",
"lines": 129,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:dev/breeze/tests/test_use_airflow_version.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import re
import pytest
from airflow_breeze.global_constants import GITHUB_REPO_BRANCH_PATTERN, PR_NUMBER_PATTERN
@pytest.mark.parametrize(
("value", "should_match_pr", "should_match_repo"),
[
pytest.param("57219", True, False, id="pr_number"),
pytest.param("12345", True, False, id="another_pr_number"),
pytest.param("jason810496/airflow:ci/breeze/compile-ui-assets", False, True, id="repo_branch"),
pytest.param("apache/airflow:main", False, True, id="apache_repo"),
pytest.param("2.7.3", False, False, id="version_number"),
pytest.param("wheel", False, False, id="wheel"),
pytest.param("sdist", False, False, id="sdist"),
pytest.param("none", False, False, id="none"),
pytest.param("57219abc", False, False, id="pr_with_chars"),
pytest.param("abc57219", False, False, id="chars_with_number"),
],
)
def test_pr_number_pattern(value, should_match_pr, should_match_repo):
"""Test that PR number pattern correctly matches PR numbers only."""
pr_match = re.match(PR_NUMBER_PATTERN, value)
repo_match = re.match(GITHUB_REPO_BRANCH_PATTERN, value)
if should_match_pr:
assert pr_match is not None, f"Expected {value} to match PR pattern"
else:
assert pr_match is None, f"Expected {value} to NOT match PR pattern"
if should_match_repo:
assert repo_match is not None, f"Expected {value} to match repo pattern"
else:
assert repo_match is None, f"Expected {value} to NOT match repo pattern"
| {
"repo_id": "apache/airflow",
"file_path": "dev/breeze/tests/test_use_airflow_version.py",
"license": "Apache License 2.0",
"lines": 47,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:helm-tests/tests/helm_tests/security/test_rbac_pod_launcher.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import jmespath
import pytest
from chart_utils.helm_template_generator import render_chart
class TestPodLauncher:
"""Tests RBAC Pod Launcher."""
@pytest.mark.parametrize(
("rbac_create", "allow_pod_launching", "multi_ns", "expected_kind", "expected_name"),
[
(True, True, False, "Role", "release-name-pod-launcher-role"),
(True, True, True, "ClusterRole", "default-release-name-pod-launcher-role"),
(True, False, False, None, None),
(False, True, False, None, None),
],
)
def test_pod_launcher_role(
self, rbac_create, allow_pod_launching, multi_ns, expected_kind, expected_name
):
docs = render_chart(
values={
"rbac": {"create": rbac_create},
"allowPodLaunching": allow_pod_launching,
"multiNamespaceMode": multi_ns,
},
show_only=["templates/rbac/pod-launcher-role.yaml"],
)
if expected_kind is None:
assert docs == []
else:
assert docs[0]["kind"] == expected_kind
assert docs[0]["metadata"]["name"] == expected_name
@pytest.mark.parametrize(
(
"rbac_create",
"allow_pod_launching",
"executor",
"triggerer_enabled",
"multi_ns",
"expected_subjects",
),
[
# Only scheduler and worker SAs for KubernetesExecutor, CeleryExecutor
(
True,
True,
"CeleryExecutor,KubernetesExecutor",
False,
False,
["release-name-airflow-scheduler", "release-name-airflow-worker"],
),
# Add triggerer SA if enabled
(
True,
True,
"CeleryExecutor,KubernetesExecutor",
True,
False,
[
"release-name-airflow-scheduler",
"release-name-airflow-worker",
"release-name-airflow-triggerer",
],
),
# RoleBinding not created if allowPodLaunching is False
(True, False, "CeleryExecutor,KubernetesExecutor", False, False, []),
# RoleBinding not created if rbac.create is False
(False, True, "CeleryExecutor,KubernetesExecutor", False, False, []),
],
)
def test_pod_launcher_rolebinding(
self,
rbac_create,
allow_pod_launching,
executor,
triggerer_enabled,
multi_ns,
expected_subjects,
):
docs = render_chart(
values={
"rbac": {"create": rbac_create},
"allowPodLaunching": allow_pod_launching,
"executor": executor,
"triggerer": {"enabled": triggerer_enabled},
"multiNamespaceMode": multi_ns,
},
show_only=["templates/rbac/pod-launcher-rolebinding.yaml"],
)
if not (rbac_create and allow_pod_launching):
assert docs == []
else:
actual = jmespath.search("subjects[*].name", docs[0]) if docs else []
assert sorted(actual) == sorted(expected_subjects)
| {
"repo_id": "apache/airflow",
"file_path": "helm-tests/tests/helm_tests/security/test_rbac_pod_launcher.py",
"license": "Apache License 2.0",
"lines": 109,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/amazon/tests/system/amazon/aws/example_mwaa_airflow2.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from datetime import datetime
import boto3
from airflow.providers.amazon.aws.hooks.mwaa import MwaaHook
from airflow.providers.amazon.aws.hooks.sts import StsHook
from airflow.providers.amazon.aws.operators.mwaa import MwaaTriggerDagRunOperator
from airflow.providers.amazon.aws.sensors.mwaa import MwaaDagRunSensor, MwaaTaskSensor
from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS
if AIRFLOW_V_3_0_PLUS:
from airflow.sdk import DAG, chain, task
else:
# Airflow 2 path
from airflow.decorators import task # type: ignore[attr-defined,no-redef]
from airflow.models.baseoperator import chain # type: ignore[attr-defined,no-redef]
from airflow.models.dag import DAG # type: ignore[attr-defined,no-redef,assignment]
from system.amazon.aws.utils import SystemTestContextBuilder
DAG_ID = "example_mwaa_airflow2"
# Externally fetched variables:
EXISTING_ENVIRONMENT_NAME_KEY = "ENVIRONMENT_NAME"
EXISTING_DAG_ID_KEY = "DAG_ID"
EXISTING_TASK_ID_KEY = "TASK_ID"
ROLE_WITHOUT_INVOKE_REST_API_ARN_KEY = "ROLE_WITHOUT_INVOKE_REST_API_ARN"
sys_test_context_task = (
SystemTestContextBuilder()
# NOTE: Creating a functional MWAA environment is time-consuming and requires
# manually creating and configuring an S3 bucket for DAG storage and a VPC with
# private subnets which is out of scope for this demo. To simplify this demo and
# make it run in a reasonable time, an existing MWAA environment already
# containing a DAG is required.
# Here's a quick start guide to create an MWAA environment using AWS CloudFormation:
# https://docs.aws.amazon.com/mwaa/latest/userguide/quick-start.html
# If creating the environment using the AWS console, make sure to have a VPC with
# at least 1 private subnet to be able to select the VPC while going through the
# environment creation steps in the console wizard.
# Make sure to set the environment variables with appropriate values
.add_variable(EXISTING_ENVIRONMENT_NAME_KEY)
.add_variable(EXISTING_DAG_ID_KEY)
.add_variable(ROLE_WITHOUT_INVOKE_REST_API_ARN_KEY)
.add_variable(EXISTING_TASK_ID_KEY)
.build()
)
@task
def unpause_dag(env_name: str, dag_id: str):
mwaa_hook = MwaaHook()
response = mwaa_hook.invoke_rest_api(
env_name=env_name, path=f"/dags/{dag_id}", method="PATCH", body={"is_paused": False}
)
return not response["RestApiResponse"]["is_paused"]
# This task in the system test verifies that the MwaaHook's IAM fallback mechanism continues to work with
# the live MWAA API. This fallback depends on parsing a specific error message from the MWAA API, so we
# want to ensure we find out if the API response format ever changes. Unit tests cover this with mocked
# responses, but this system test validates against the real API.
@task
def test_iam_fallback(role_to_assume_arn, mwaa_env_name):
assumed_role = StsHook().conn.assume_role(
RoleArn=role_to_assume_arn, RoleSessionName="MwaaSysTestIamFallback"
)
credentials = assumed_role["Credentials"]
session = boto3.Session(
aws_access_key_id=credentials["AccessKeyId"],
aws_secret_access_key=credentials["SecretAccessKey"],
aws_session_token=credentials["SessionToken"],
)
mwaa_hook = MwaaHook()
mwaa_hook.conn = session.client("mwaa")
response = mwaa_hook.invoke_rest_api(env_name=mwaa_env_name, path="/dags", method="GET")
return "dags" in response["RestApiResponse"]
with DAG(
dag_id=DAG_ID,
schedule="@once",
start_date=datetime(2021, 1, 1),
catchup=False,
) as dag:
test_context = sys_test_context_task()
env_name = test_context[EXISTING_ENVIRONMENT_NAME_KEY]
trigger_dag_id = test_context[EXISTING_DAG_ID_KEY]
task_id = test_context[EXISTING_TASK_ID_KEY]
restricted_role_arn = test_context[ROLE_WITHOUT_INVOKE_REST_API_ARN_KEY]
# [START howto_operator_mwaa_trigger_dag_run]
trigger_dag_run = MwaaTriggerDagRunOperator(
task_id="trigger_dag_run",
env_name=env_name,
trigger_dag_id=trigger_dag_id,
wait_for_completion=True,
)
# [END howto_operator_mwaa_trigger_dag_run]
# [START howto_sensor_mwaa_task]
wait_for_task = MwaaTaskSensor(
task_id="wait_for_task",
external_env_name=env_name,
external_dag_id=trigger_dag_id,
external_task_id=task_id,
poke_interval=5,
)
# [END howto_sensor_mwaa_task]
# [START howto_sensor_mwaa_dag_run]
wait_for_dag_run = MwaaDagRunSensor(
task_id="wait_for_dag_run",
external_env_name=env_name,
external_dag_id=trigger_dag_id,
external_dag_run_id="{{ task_instance.xcom_pull(task_ids='trigger_dag_run')['RestApiResponse']['dag_run_id'] }}",
poke_interval=5,
)
# [END howto_sensor_mwaa_dag_run]
trigger_dag_run_dont_wait = MwaaTriggerDagRunOperator(
task_id="trigger_dag_run_dont_wait",
env_name=env_name,
trigger_dag_id=trigger_dag_id,
wait_for_completion=False,
)
wait_for_task_concurrent = MwaaTaskSensor(
task_id="wait_for_task_concurrent",
external_env_name=env_name,
external_dag_id=trigger_dag_id,
external_task_id=task_id,
poke_interval=5,
)
test_context >> [
unpause_dag(env_name, trigger_dag_id),
test_iam_fallback(restricted_role_arn, env_name),
trigger_dag_run,
trigger_dag_run_dont_wait,
]
chain(trigger_dag_run, wait_for_task, wait_for_dag_run)
chain(trigger_dag_run_dont_wait, wait_for_task_concurrent)
from tests_common.test_utils.watcher import watcher
# This test needs watcher in order to properly mark success/failure
# when "tearDown" task with trigger rule is part of the DAG
list(dag.tasks) >> watcher()
from tests_common.test_utils.system_tests import get_test_run # noqa: E402
# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest)
test_run = get_test_run(dag)
| {
"repo_id": "apache/airflow",
"file_path": "providers/amazon/tests/system/amazon/aws/example_mwaa_airflow2.py",
"license": "Apache License 2.0",
"lines": 148,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/apache/impala/tests/unit/apache/impala/hooks/test_impala_sql.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import json
from unittest import mock
from unittest.mock import MagicMock, patch
import pytest
from sqlalchemy.engine.url import make_url
from airflow.models import Connection
from airflow.providers.apache.impala.hooks.impala import ImpalaHook
DEFAULT_CONN_ID = "impala_default"
DEFAULT_HOST = "localhost"
DEFAULT_PORT = 21050
DEFAULT_LOGIN = "user"
DEFAULT_PASSWORD = "pass"
DEFAULT_SCHEMA = "default_db"
@pytest.fixture
def mock_connection(create_connection_without_db) -> Connection:
"""create a mocked Airflow connection for Impala."""
conn = Connection(
conn_id=DEFAULT_CONN_ID,
conn_type="impala",
host=DEFAULT_HOST,
login=DEFAULT_LOGIN,
password=None,
port=DEFAULT_PORT,
schema=DEFAULT_SCHEMA,
)
create_connection_without_db(conn)
return conn
@pytest.fixture
def impala_hook() -> ImpalaHook:
"""Fixture for ImpalaHook with mocked connection"""
return ImpalaHook(impala_conn_id=DEFAULT_CONN_ID)
def get_cursor_descriptions(fields: list[str]) -> list[tuple[str]]:
return [(field,) for field in fields]
@pytest.mark.parametrize(
("host", "login", "password", "port", "schema", "extra_dict", "expected_query"),
[
(
"localhost",
"user",
"pass",
21050,
"default_db",
{},
{},
),
(
"impala-secure.company.com",
"secure_user",
"secret",
21050,
"analytics",
{"use_ssl": "True"},
{"use_ssl": "True"},
),
(
"impala-kerberos.company.com",
"kerb_user",
None,
21050,
"sales",
{"auth_mechanism": "GSSAPI", "kerberos_service_name": "impala"},
{"auth_mechanism": "GSSAPI", "kerberos_service_name": "impala"},
),
(
"impala.company.com",
"timeout_user",
"pw123",
21050,
"warehouse",
{"timeout": 30},
{"timeout": "30"},
),
],
)
def test_sqlalchemy_url_property(
impala_hook, mock_connection, host, login, password, port, schema, extra_dict, expected_query
):
"""Tests various custom configurations passed via the 'extra' field."""
mock_connection.host = host
mock_connection.login = login
mock_connection.password = password
mock_connection.port = port
mock_connection.schema = schema
mock_connection.extra = json.dumps(extra_dict) if extra_dict else None
with patch.object(impala_hook, "get_connection", return_value=mock_connection):
url = impala_hook.sqlalchemy_url
expected_password = password or ""
assert url.drivername == "impala"
assert url.username == login
assert url.password == expected_password
assert url.host == host
assert url.port == port
assert url.database == schema
assert url.query == expected_query
@pytest.mark.parametrize(
("sql", "expected_rows"),
[
("SELECT * FROM users", [("Alice", 1), ("Bob", 2)]),
("SELECT 1", [(1,)]),
],
)
def test_impala_run_query(impala_hook, mock_connection, sql, expected_rows):
cursor = MagicMock()
cursor.fetchall.return_value = expected_rows
cursor.description = get_cursor_descriptions([f"col{i}" for i in range(len(expected_rows[0]))])
type(cursor).rowcount = mock.PropertyMock(return_value=len(expected_rows))
mock_conn = MagicMock()
mock_conn.host = mock_connection.host
mock_conn.login = mock_connection.login
mock_conn.password = mock_connection.password
mock_conn.schema = mock_connection.schema
mock_conn.cursor.return_value = cursor
with patch("airflow.providers.apache.impala.hooks.impala.connect", return_value=mock_conn):
with patch.object(impala_hook, "get_connection", return_value=mock_conn):
result = impala_hook.run(sql, handler=lambda cur: cur.fetchall())
cursor.execute.assert_called_once_with(sql)
assert result == expected_rows
def test_get_sqlalchemy_engine(impala_hook, mock_connection, mocker):
mock_create_engine = mocker.patch("airflow.providers.common.sql.hooks.sql.create_engine", autospec=True)
mock_engine = MagicMock()
mock_create_engine.return_value = mock_engine
with patch.object(impala_hook, "get_connection", return_value=mock_connection):
engine = impala_hook.get_sqlalchemy_engine()
assert engine is mock_engine
call_args = mock_create_engine.call_args[1]
actual_url = call_args["url"]
assert actual_url.drivername == "impala"
assert actual_url.host == DEFAULT_HOST
assert actual_url.username == DEFAULT_LOGIN
assert actual_url.password == (mock_connection.password or "")
assert actual_url.port == DEFAULT_PORT
assert actual_url.database == DEFAULT_SCHEMA
assert actual_url.query == {}
def test_get_url(impala_hook, mock_connection):
"""Ensure get_uri() returns correct formatted URI for Impala connection"""
mock_connection.host = "impala.company.com"
mock_connection.port = 21050
mock_connection.login = "user"
mock_connection.password = "secret"
mock_connection.schema = "analytics"
mock_connection.extra = json.dumps({"use_ssl": "True", "auth_mechanism": "PLAIN"})
with patch.object(impala_hook, "get_connection", return_value=mock_connection):
uri = impala_hook.get_uri()
expected_uri = "impala://user:secret@impala.company.com:21050/analytics?use_ssl=True&auth_mechanism=PLAIN"
assert make_url(uri) == make_url(expected_uri)
@pytest.mark.parametrize("sql", ["", " ", "\n"])
def test_run_with_empty_sql(impala_hook, sql):
"""Test that running an empty SQL string."""
with pytest.raises(ValueError, match="List of SQL statements is empty"):
impala_hook.run(sql)
@pytest.fixture
def impala_hook_with_timeout(create_connection_without_db):
conn = Connection(
conn_id="impala_with_timeout",
conn_type="impala",
host=DEFAULT_HOST,
login=DEFAULT_LOGIN,
password=DEFAULT_PASSWORD,
port=DEFAULT_PORT,
schema=DEFAULT_SCHEMA,
extra=json.dumps({"timeout": 10}),
)
create_connection_without_db(conn)
return ImpalaHook(impala_conn_id="impala_with_timeout")
def test_execution_timeout_exceeded(impala_hook_with_timeout):
test_sql = "SELECT * FROM big_table"
with patch(
"airflow.providers.apache.impala.hooks.impala.ImpalaHook.run",
side_effect=TimeoutError("Query exceeded execution timeout"),
):
with pytest.raises(TimeoutError, match="Query exceeded execution timeout"):
impala_hook_with_timeout.run(sql=test_sql)
| {
"repo_id": "apache/airflow",
"file_path": "providers/apache/impala/tests/unit/apache/impala/hooks/test_impala_sql.py",
"license": "Apache License 2.0",
"lines": 189,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-core/src/airflow/api_fastapi/core_api/datamodels/ui/teams.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from airflow.api_fastapi.core_api.base import BaseModel
class TeamResponse(BaseModel):
"""Base serializer for Team."""
name: str
class TeamCollectionResponse(BaseModel):
"""Team collection serializer for responses."""
teams: list[TeamResponse]
total_entries: int
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/api_fastapi/core_api/datamodels/ui/teams.py",
"license": "Apache License 2.0",
"lines": 25,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:airflow-core/src/airflow/api_fastapi/core_api/routes/ui/teams.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import Annotated
from fastapi import Depends, HTTPException, status
from sqlalchemy import select
from airflow.api_fastapi.common.db.common import SessionDep, paginated_select
from airflow.api_fastapi.common.parameters import (
QueryLimit,
QueryOffset,
SortParam,
)
from airflow.api_fastapi.common.router import AirflowRouter
from airflow.api_fastapi.core_api.datamodels.ui.teams import TeamCollectionResponse, TeamResponse
from airflow.api_fastapi.core_api.security import (
ReadableTeamsFilterDep,
requires_authenticated,
)
from airflow.configuration import conf
from airflow.models.team import Team
teams_router = AirflowRouter(tags=["Teams"], prefix="/teams")
@teams_router.get(
path="",
dependencies=[Depends(requires_authenticated())],
)
def list_teams(
limit: QueryLimit,
offset: QueryOffset,
order_by: Annotated[
SortParam,
Depends(SortParam(["name"], Team).dynamic_depends()),
],
readable_teams_filter: ReadableTeamsFilterDep,
session: SessionDep,
) -> TeamCollectionResponse:
if not conf.getboolean("core", "multi_team"):
raise HTTPException(
status.HTTP_403_FORBIDDEN, "Multi-team mode is not configured in the Airflow environment"
)
select_stmt, total_entries = paginated_select(
statement=select(Team),
filters=[readable_teams_filter],
order_by=order_by,
offset=offset,
limit=limit,
session=session,
)
teams = [
TeamResponse(**row._mapping) if not isinstance(row, Team) else row
for row in session.scalars(select_stmt)
]
return TeamCollectionResponse(
teams=teams,
total_entries=total_entries,
)
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/api_fastapi/core_api/routes/ui/teams.py",
"license": "Apache License 2.0",
"lines": 69,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:airflow-core/tests/unit/api_fastapi/core_api/routes/ui/test_teams.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import pytest
from airflow.models.team import Team
from tests_common.test_utils.asserts import assert_queries_count
from tests_common.test_utils.config import conf_vars
from tests_common.test_utils.db import (
clear_db_teams,
)
pytestmark = pytest.mark.db_test
def _clean_db():
clear_db_teams()
@pytest.fixture(autouse=True)
def clean_db(session):
_clean_db()
yield
_clean_db()
class TestListTeams:
@conf_vars({("core", "multi_team"): "true"})
def test_should_response_200(self, test_client, session):
session.add(Team(name="team1"))
session.add(Team(name="team2"))
session.add(Team(name="team3"))
session.commit()
with assert_queries_count(3):
response = test_client.get("/teams", params={})
assert response.status_code == 200
assert response.json() == {
"teams": [
{"name": "team1"},
{"name": "team2"},
{"name": "team3"},
],
"total_entries": 3,
}
@conf_vars({("core", "multi_team"): "true"})
def test_should_response_401(self, unauthenticated_test_client):
response = unauthenticated_test_client.get("/teams", params={})
assert response.status_code == 401
def test_should_response_403_flag_off(self, test_client):
response = test_client.get("/teams", params={})
assert response.status_code == 403
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/tests/unit/api_fastapi/core_api/routes/ui/test_teams.py",
"license": "Apache License 2.0",
"lines": 57,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/fab/src/airflow/providers/fab/auth_manager/api_fastapi/datamodels/roles.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from pydantic import Field
from airflow.api_fastapi.core_api.base import BaseModel, StrictBaseModel
class Action(BaseModel):
"""Outgoing representation of an action (permission name)."""
name: str
class Resource(BaseModel):
"""Outgoing representation of a resource."""
name: str
class ActionResource(BaseModel):
"""Pairing of an action with a resource."""
action: Action
resource: Resource
class Role(BaseModel):
"""Lightweight role reference used by /users schemas."""
name: str
class RoleBody(StrictBaseModel):
"""Incoming payload for creating/updating a role."""
name: str = Field(min_length=1)
permissions: list[ActionResource] = Field(
default_factory=list, alias="actions", validation_alias="actions"
)
class RoleResponse(BaseModel):
"""Outgoing representation of a role and its permissions."""
name: str
permissions: list[ActionResource] = Field(default_factory=list, serialization_alias="actions")
class RoleCollectionResponse(BaseModel):
"""Outgoing representation of a paginated collection of roles."""
roles: list[RoleResponse]
total_entries: int
class PermissionCollectionResponse(BaseModel):
"""Outgoing representation of a paginated collection of permissions."""
permissions: list[ActionResource] = Field(default_factory=list, serialization_alias="actions")
total_entries: int
| {
"repo_id": "apache/airflow",
"file_path": "providers/fab/src/airflow/providers/fab/auth_manager/api_fastapi/datamodels/roles.py",
"license": "Apache License 2.0",
"lines": 50,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/fab/src/airflow/providers/fab/auth_manager/api_fastapi/routes/roles.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from fastapi import Depends, Path, Query, status
from airflow.api_fastapi.core_api.openapi.exceptions import create_openapi_http_exception_doc
from airflow.providers.fab.auth_manager.api_fastapi.datamodels.roles import (
PermissionCollectionResponse,
RoleBody,
RoleCollectionResponse,
RoleResponse,
)
from airflow.providers.fab.auth_manager.api_fastapi.parameters import get_effective_limit
from airflow.providers.fab.auth_manager.api_fastapi.routes.router import fab_router
from airflow.providers.fab.auth_manager.api_fastapi.security import requires_fab_custom_view
from airflow.providers.fab.auth_manager.api_fastapi.services.roles import FABAuthManagerRoles
from airflow.providers.fab.auth_manager.cli_commands.utils import get_application_builder
from airflow.providers.fab.www.security import permissions
@fab_router.post(
"/roles",
responses=create_openapi_http_exception_doc(
[
status.HTTP_400_BAD_REQUEST,
status.HTTP_401_UNAUTHORIZED,
status.HTTP_403_FORBIDDEN,
status.HTTP_409_CONFLICT,
status.HTTP_500_INTERNAL_SERVER_ERROR,
]
),
dependencies=[Depends(requires_fab_custom_view("POST", permissions.RESOURCE_ROLE))],
)
def create_role(body: RoleBody) -> RoleResponse:
"""Create a new role (actions can be empty)."""
with get_application_builder():
return FABAuthManagerRoles.create_role(body=body)
@fab_router.get(
"/roles",
response_model=RoleCollectionResponse,
responses=create_openapi_http_exception_doc(
[
status.HTTP_400_BAD_REQUEST,
status.HTTP_401_UNAUTHORIZED,
status.HTTP_403_FORBIDDEN,
status.HTTP_500_INTERNAL_SERVER_ERROR,
]
),
dependencies=[Depends(requires_fab_custom_view("GET", permissions.RESOURCE_ROLE))],
)
def get_roles(
order_by: str = Query("name", description="Field to order by. Prefix with '-' for descending."),
limit: int = Depends(get_effective_limit()),
offset: int = Query(0, ge=0, description="Number of items to skip before starting to collect results."),
) -> RoleCollectionResponse:
"""List roles with pagination and ordering."""
with get_application_builder():
return FABAuthManagerRoles.get_roles(order_by=order_by, limit=limit, offset=offset)
@fab_router.delete(
"/roles/{name}",
responses=create_openapi_http_exception_doc(
[
status.HTTP_401_UNAUTHORIZED,
status.HTTP_403_FORBIDDEN,
status.HTTP_404_NOT_FOUND,
]
),
dependencies=[Depends(requires_fab_custom_view("DELETE", permissions.RESOURCE_ROLE))],
)
def delete_role(name: str = Path(..., min_length=1)) -> None:
"""Delete an existing role."""
with get_application_builder():
return FABAuthManagerRoles.delete_role(name=name)
@fab_router.get(
"/roles/{name}",
responses=create_openapi_http_exception_doc(
[status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN, status.HTTP_404_NOT_FOUND]
),
dependencies=[Depends(requires_fab_custom_view("GET", permissions.RESOURCE_ROLE))],
)
def get_role(name: str = Path(..., min_length=1)) -> RoleResponse:
"""Get an existing role."""
with get_application_builder():
return FABAuthManagerRoles.get_role(name=name)
@fab_router.patch(
"/roles/{name}",
responses=create_openapi_http_exception_doc(
[
status.HTTP_400_BAD_REQUEST,
status.HTTP_401_UNAUTHORIZED,
status.HTTP_403_FORBIDDEN,
status.HTTP_404_NOT_FOUND,
]
),
dependencies=[Depends(requires_fab_custom_view("PUT", permissions.RESOURCE_ROLE))],
)
def patch_role(
body: RoleBody,
name: str = Path(..., min_length=1),
update_mask: str | None = Query(None, description="Comma-separated list of fields to update"),
) -> RoleResponse:
"""Update an existing role."""
with get_application_builder():
return FABAuthManagerRoles.patch_role(name=name, body=body, update_mask=update_mask)
@fab_router.get(
"/permissions",
response_model=PermissionCollectionResponse,
responses=create_openapi_http_exception_doc(
[
status.HTTP_401_UNAUTHORIZED,
status.HTTP_403_FORBIDDEN,
status.HTTP_500_INTERNAL_SERVER_ERROR,
]
),
dependencies=[Depends(requires_fab_custom_view("GET", permissions.RESOURCE_ROLE))],
)
def get_permissions(
limit: int = Depends(get_effective_limit()),
offset: int = Query(0, ge=0, description="Number of items to skip before starting to collect results."),
):
"""List all action-resource (permission) pairs."""
with get_application_builder():
return FABAuthManagerRoles.get_permissions(limit=limit, offset=offset)
| {
"repo_id": "apache/airflow",
"file_path": "providers/fab/src/airflow/providers/fab/auth_manager/api_fastapi/routes/roles.py",
"license": "Apache License 2.0",
"lines": 134,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/fab/src/airflow/providers/fab/auth_manager/api_fastapi/security.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from fastapi import Depends, HTTPException, status
from airflow.api_fastapi.app import get_auth_manager
from airflow.api_fastapi.core_api.security import get_user
def requires_fab_custom_view(method: str, resource_name: str):
def _check(user=Depends(get_user)):
if not get_auth_manager().is_authorized_custom_view(
method=method, resource_name=resource_name, user=user
):
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Forbidden")
return _check
| {
"repo_id": "apache/airflow",
"file_path": "providers/fab/src/airflow/providers/fab/auth_manager/api_fastapi/security.py",
"license": "Apache License 2.0",
"lines": 27,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/fab/src/airflow/providers/fab/auth_manager/api_fastapi/services/roles.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING
from fastapi import HTTPException, status
from sqlalchemy import func, select
from sqlalchemy.orm import joinedload
from airflow.providers.fab.auth_manager.api_fastapi.datamodels.roles import (
Action as ActionModel,
ActionResource,
PermissionCollectionResponse,
Resource as ResourceModel,
RoleBody,
RoleCollectionResponse,
RoleResponse,
)
from airflow.providers.fab.auth_manager.api_fastapi.sorting import build_ordering
from airflow.providers.fab.auth_manager.models import Permission, Role
from airflow.providers.fab.www.utils import get_fab_auth_manager
if TYPE_CHECKING:
from airflow.providers.fab.auth_manager.security_manager.override import FabAirflowSecurityManagerOverride
class FABAuthManagerRoles:
"""Service layer for FAB Auth Manager role operations (create, validate, sync)."""
@staticmethod
def _check_action_and_resource(
security_manager: FabAirflowSecurityManagerOverride,
perms: list[tuple[str, str]],
) -> None:
for action_name, resource_name in perms:
if not security_manager.get_action(action_name):
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=f"The specified action: {action_name!r} was not found",
)
if not security_manager.get_resource(resource_name):
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=f"The specified resource: {resource_name!r} was not found",
)
@classmethod
def create_role(cls, body: RoleBody) -> RoleResponse:
security_manager = get_fab_auth_manager().security_manager
existing = security_manager.find_role(name=body.name)
if existing:
raise HTTPException(
status_code=status.HTTP_409_CONFLICT,
detail=f"Role with name {body.name!r} already exists; please update with the PATCH endpoint",
)
perms: list[tuple[str, str]] = [(ar.action.name, ar.resource.name) for ar in (body.permissions or [])]
cls._check_action_and_resource(security_manager, perms)
security_manager.bulk_sync_roles([{"role": body.name, "perms": perms}])
created = security_manager.find_role(name=body.name)
if not created:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Role was not created due to an unexpected error.",
)
return RoleResponse.model_validate(created)
@classmethod
def get_roles(cls, *, order_by: str, limit: int, offset: int) -> RoleCollectionResponse:
security_manager = get_fab_auth_manager().security_manager
session = security_manager.session
total_entries = session.scalars(select(func.count(Role.id))).one()
ordering = build_ordering(order_by, allowed={"name": Role.name, "role_id": Role.id})
stmt = select(Role).order_by(ordering).offset(offset).limit(limit)
roles = session.scalars(stmt).unique().all()
return RoleCollectionResponse(
roles=[RoleResponse.model_validate(r) for r in roles],
total_entries=total_entries,
)
@classmethod
def delete_role(cls, name: str) -> None:
security_manager = get_fab_auth_manager().security_manager
existing = security_manager.find_role(name=name)
if not existing:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Role with name {name!r} does not exist.",
)
security_manager.delete_role(existing)
@classmethod
def get_role(cls, name: str) -> RoleResponse:
security_manager = get_fab_auth_manager().security_manager
existing = security_manager.find_role(name=name)
if not existing:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Role with name {name!r} does not exist.",
)
return RoleResponse.model_validate(existing)
@classmethod
def patch_role(cls, body: RoleBody, name: str, update_mask: str | None = None) -> RoleResponse:
security_manager = get_fab_auth_manager().security_manager
existing = security_manager.find_role(name=name)
if not existing:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Role with name {name!r} does not exist.",
)
if update_mask:
fields_to_update = {f.strip() for f in update_mask.split(",") if f.strip()}
update_data = RoleResponse.model_validate(existing)
for field in fields_to_update:
if field == "actions":
update_data.permissions = body.permissions
elif hasattr(body, field):
setattr(update_data, field, getattr(body, field))
else:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=f"'{field}' in update_mask is unknown",
)
else:
update_data = RoleResponse(name=body.name, permissions=body.permissions or [])
perms: list[tuple[str, str]] = [(ar.action.name, ar.resource.name) for ar in (body.permissions or [])]
cls._check_action_and_resource(security_manager, perms)
security_manager.bulk_sync_roles([{"role": name, "perms": perms}])
new_name = update_data.name
if new_name and new_name != existing.name:
security_manager.update_role(role_id=existing.id, name=new_name)
return RoleResponse.model_validate(update_data)
@classmethod
def get_permissions(cls, *, limit: int, offset: int) -> PermissionCollectionResponse:
security_manager = get_fab_auth_manager().security_manager
session = security_manager.session
total_entries = session.scalars(select(func.count(Permission.id))).one()
query = (
select(Permission)
.options(joinedload(Permission.action), joinedload(Permission.resource))
.offset(offset)
.limit(limit)
)
permissions = session.scalars(query).all()
return PermissionCollectionResponse(
permissions=[
ActionResource(
action=ActionModel(name=p.action.name), resource=ResourceModel(name=p.resource.name)
)
for p in permissions
],
total_entries=total_entries,
)
| {
"repo_id": "apache/airflow",
"file_path": "providers/fab/src/airflow/providers/fab/auth_manager/api_fastapi/services/roles.py",
"license": "Apache License 2.0",
"lines": 156,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/fab/tests/unit/fab/auth_manager/api_fastapi/datamodels/test_roles.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import types
import pytest
from pydantic import ValidationError
from airflow.providers.fab.auth_manager.api_fastapi.datamodels.roles import (
Action,
ActionResource,
PermissionCollectionResponse,
Resource,
RoleBody,
RoleCollectionResponse,
RoleResponse,
)
class TestRoleModels:
def test_rolebody_accepts_actions_alias_and_maps_to_permissions(self):
data = {
"name": "viewer",
"actions": [
{"action": {"name": "can_read"}, "resource": {"name": "DAG"}},
{"action": {"name": "can_read"}, "resource": {"name": "Connection"}},
],
}
body = RoleBody.model_validate(data)
assert body.name == "viewer"
# Field(validation_alias="actions") should populate `permissions`
assert len(body.permissions) == 2
assert body.permissions[0].action.name == "can_read"
assert body.permissions[0].resource.name == "DAG"
def test_rolebody_defaults_permissions_to_empty_when_actions_missing(self):
body = RoleBody.model_validate({"name": "empty"})
assert body.name == "empty"
assert body.permissions == []
def test_rolebody_name_min_length_enforced(self):
with pytest.raises(ValidationError):
RoleBody.model_validate({"name": "", "actions": []})
def test_roleresponse_serializes_permissions_under_actions_alias(self):
ar = ActionResource(
action=Action(name="can_read"),
resource=Resource(name="DAG"),
)
rr = RoleResponse(name="viewer", permissions=[ar])
dumped = rr.model_dump(by_alias=True)
# Field(serialization_alias="actions") should rename `permissions` -> `actions`
assert "actions" in dumped
assert "permissions" not in dumped
assert dumped["name"] == "viewer"
assert dumped["actions"][0]["action"]["name"] == "can_read"
assert dumped["actions"][0]["resource"]["name"] == "DAG"
def test_roleresponse_model_validate_from_simple_namespace(self):
# Service returns plain objects; ensure model_validate handles them
obj = types.SimpleNamespace(
name="viewer",
permissions=[
types.SimpleNamespace(
action=types.SimpleNamespace(name="can_read"),
resource=types.SimpleNamespace(name="DAG"),
)
],
)
rr = RoleResponse.model_validate(obj)
assert rr.name == "viewer"
assert rr.permissions
first = rr.permissions[0]
assert first.action.name == "can_read"
def test_rolecollection_response_dump_and_counts(self):
ar = ActionResource(
action=Action(name="can_read"),
resource=Resource(name="DAG"),
)
rc = RoleCollectionResponse(
roles=[RoleResponse(name="viewer", permissions=[ar])],
total_entries=1,
)
dumped = rc.model_dump(by_alias=True)
assert dumped["total_entries"] == 1
assert isinstance(dumped["roles"], list)
assert dumped["roles"][0]["name"] == "viewer"
assert "actions" in dumped["roles"][0]
assert "permissions" not in dumped["roles"][0]
def test_rolecollection_model_validate_from_objects(self):
obj = types.SimpleNamespace(
roles=[
types.SimpleNamespace(
name="admin",
permissions=[
types.SimpleNamespace(
action=types.SimpleNamespace(name="can_read"),
resource=types.SimpleNamespace(name="DAG"),
)
],
)
],
total_entries=1,
)
rc = RoleCollectionResponse.model_validate(obj)
assert rc.total_entries == 1
assert len(rc.roles) == 1
assert rc.roles[0].name == "admin"
assert rc.roles[0].permissions[0].action.name == "can_read"
def test_rolecollection_missing_total_entries_raises(self):
with pytest.raises(ValidationError):
RoleCollectionResponse.model_validate({"roles": []})
def test_permission_collection_response_valid(self):
ar = ActionResource(
action=Action(name="can_read"),
resource=Resource(name="DAG"),
)
resp = PermissionCollectionResponse(
permissions=[ar],
total_entries=1,
)
dumped = resp.model_dump()
assert dumped["total_entries"] == 1
assert isinstance(dumped["permissions"], list)
assert dumped["permissions"][0]["action"]["name"] == "can_read"
assert dumped["permissions"][0]["resource"]["name"] == "DAG"
def test_permission_collection_response_model_validate_from_objects(self):
obj = types.SimpleNamespace(
permissions=[
types.SimpleNamespace(
action=types.SimpleNamespace(name="can_read"),
resource=types.SimpleNamespace(name="DAG"),
)
],
total_entries=1,
)
resp = PermissionCollectionResponse.model_validate(obj)
assert resp.total_entries == 1
assert len(resp.permissions) == 1
assert resp.permissions[0].action.name == "can_read"
def test_permission_collection_missing_total_entries_raises(self):
with pytest.raises(ValidationError):
PermissionCollectionResponse.model_validate({"permissions": []})
| {
"repo_id": "apache/airflow",
"file_path": "providers/fab/tests/unit/fab/auth_manager/api_fastapi/datamodels/test_roles.py",
"license": "Apache License 2.0",
"lines": 149,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/fab/tests/unit/fab/auth_manager/api_fastapi/routes/test_roles.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from contextlib import nullcontext as _noop_cm
from unittest.mock import ANY, MagicMock, patch
import pytest
from fastapi import HTTPException, status
from airflow.providers.fab.auth_manager.api_fastapi.datamodels.roles import (
Action,
ActionResource,
PermissionCollectionResponse,
Resource,
RoleCollectionResponse,
RoleResponse,
)
@pytest.mark.db_test
class TestRoles:
# POST /roles
@patch("airflow.providers.fab.auth_manager.api_fastapi.routes.roles.FABAuthManagerRoles")
@patch("airflow.providers.fab.auth_manager.api_fastapi.security.get_auth_manager")
@patch(
"airflow.providers.fab.auth_manager.api_fastapi.routes.roles.get_application_builder",
return_value=_noop_cm(),
)
def test_create_role(
self, mock_get_application_builder, mock_get_auth_manager, mock_roles, test_client, as_user
):
mgr = MagicMock()
mgr.is_authorized_custom_view.return_value = True
mock_get_auth_manager.return_value = mgr
dummy_out = RoleResponse(name="my_new_role", permissions=[])
mock_roles.create_role.return_value = dummy_out
with as_user():
resp = test_client.post("/fab/v1/roles", json={"name": "my_new_role", "actions": []})
assert resp.status_code == 200
assert resp.json() == dummy_out.model_dump(by_alias=True)
mock_roles.create_role.assert_called_once()
@patch("airflow.providers.fab.auth_manager.api_fastapi.routes.roles.FABAuthManagerRoles")
@patch("airflow.providers.fab.auth_manager.api_fastapi.security.get_auth_manager")
@patch(
"airflow.providers.fab.auth_manager.api_fastapi.routes.roles.get_application_builder",
return_value=_noop_cm(),
)
def test_create_role_forbidden(
self, mock_get_application_builder, mock_get_auth_manager, mock_roles, test_client, as_user
):
mgr = MagicMock()
mgr.is_authorized_custom_view.return_value = False
mock_get_auth_manager.return_value = mgr
with as_user():
resp = test_client.post("/fab/v1/roles", json={"name": "r", "actions": []})
assert resp.status_code == 403
mock_roles.create_role.assert_not_called()
@patch("airflow.providers.fab.auth_manager.api_fastapi.routes.roles.FABAuthManagerRoles")
@patch("airflow.providers.fab.auth_manager.api_fastapi.security.get_auth_manager")
@patch(
"airflow.providers.fab.auth_manager.api_fastapi.routes.roles.get_application_builder",
return_value=_noop_cm(),
)
def test_create_role_validation_422_empty_name(
self, mock_get_application_builder, mock_get_auth_manager, mock_roles, test_client, as_user
):
mgr = MagicMock()
mgr.is_authorized_custom_view.return_value = True
mock_get_auth_manager.return_value = mgr
with as_user():
resp = test_client.post("/fab/v1/roles", json={"name": "", "actions": []})
assert resp.status_code == 422
mock_roles.create_role.assert_not_called()
@patch("airflow.providers.fab.auth_manager.api_fastapi.routes.roles.FABAuthManagerRoles")
@patch("airflow.providers.fab.auth_manager.api_fastapi.security.get_auth_manager")
@patch(
"airflow.providers.fab.auth_manager.api_fastapi.routes.roles.get_application_builder",
return_value=_noop_cm(),
)
def test_create_role_validation_422_missing_name(
self, mock_get_application_builder, mock_get_auth_manager, mock_roles, test_client, as_user
):
mgr = MagicMock()
mgr.is_authorized_custom_view.return_value = True
mock_get_auth_manager.return_value = mgr
with as_user():
resp = test_client.post("/fab/v1/roles", json={"actions": []})
assert resp.status_code == 422
mock_roles.create_role.assert_not_called()
# GET /roles
@patch("airflow.providers.fab.auth_manager.api_fastapi.routes.roles.FABAuthManagerRoles")
@patch("airflow.providers.fab.auth_manager.api_fastapi.security.get_auth_manager")
@patch(
"airflow.providers.fab.auth_manager.api_fastapi.routes.roles.get_application_builder",
return_value=_noop_cm(),
)
@patch("airflow.providers.fab.auth_manager.api_fastapi.parameters.conf")
def test_get_roles_success_defaults(
self,
conf_mock,
mock_get_application_builder,
mock_get_auth_manager,
mock_roles,
test_client,
as_user,
):
conf_mock.getint.side_effect = lambda section, option: {
"maximum_page_limit": 500,
"fallback_page_limit": 25,
}[option]
mgr = MagicMock()
mgr.is_authorized_custom_view.return_value = True
mock_get_auth_manager.return_value = mgr
dummy = RoleCollectionResponse(
roles=[RoleResponse(name="viewer", permissions=[])],
total_entries=1,
limit=100,
offset=0,
)
mock_roles.get_roles.return_value = dummy
with as_user():
resp = test_client.get("/fab/v1/roles")
assert resp.status_code == 200
assert resp.json() == dummy.model_dump(by_alias=True)
mock_roles.get_roles.assert_called_once_with(order_by="name", limit=100, offset=0)
@patch("airflow.providers.fab.auth_manager.api_fastapi.routes.roles.FABAuthManagerRoles")
@patch("airflow.providers.fab.auth_manager.api_fastapi.security.get_auth_manager")
@patch(
"airflow.providers.fab.auth_manager.api_fastapi.routes.roles.get_application_builder",
return_value=_noop_cm(),
)
@patch("airflow.providers.fab.auth_manager.api_fastapi.parameters.conf")
def test_get_roles_passes_params_and_clamps_limit(
self,
conf_mock,
mock_get_application_builder,
mock_get_auth_manager,
mock_roles,
test_client,
as_user,
):
conf_mock.getint.side_effect = lambda section, option: {
"maximum_page_limit": 50,
"fallback_page_limit": 20,
}[option]
mgr = MagicMock()
mgr.is_authorized_custom_view.return_value = True
mock_get_auth_manager.return_value = mgr
dummy = RoleCollectionResponse(roles=[], total_entries=0, limit=50, offset=7)
mock_roles.get_roles.return_value = dummy
with as_user():
resp = test_client.get("/fab/v1/roles", params={"order_by": "-name", "limit": 1000, "offset": 7})
assert resp.status_code == 200
assert resp.json() == dummy.model_dump(by_alias=True)
mock_roles.get_roles.assert_called_once_with(order_by="-name", limit=50, offset=7)
@patch("airflow.providers.fab.auth_manager.api_fastapi.routes.roles.FABAuthManagerRoles")
@patch("airflow.providers.fab.auth_manager.api_fastapi.security.get_auth_manager")
@patch(
"airflow.providers.fab.auth_manager.api_fastapi.routes.roles.get_application_builder",
return_value=_noop_cm(),
)
@patch("airflow.providers.fab.auth_manager.api_fastapi.parameters.conf")
def test_get_roles_uses_fallback_when_limit_zero(
self,
conf_mock,
mock_get_application_builder,
mock_get_auth_manager,
mock_roles,
test_client,
as_user,
):
conf_mock.getint.side_effect = lambda section, option: {
"maximum_page_limit": 100,
"fallback_page_limit": 33,
}[option]
mgr = MagicMock()
mgr.is_authorized_custom_view.return_value = True
mock_get_auth_manager.return_value = mgr
dummy = RoleCollectionResponse(roles=[], total_entries=0, limit=33, offset=0)
mock_roles.get_roles.return_value = dummy
with as_user():
resp = test_client.get("/fab/v1/roles", params={"limit": 0})
assert resp.status_code == 200
assert resp.json() == dummy.model_dump(by_alias=True)
mock_roles.get_roles.assert_called_once_with(order_by="name", limit=33, offset=0)
@patch("airflow.providers.fab.auth_manager.api_fastapi.routes.roles.FABAuthManagerRoles")
@patch("airflow.providers.fab.auth_manager.api_fastapi.security.get_auth_manager")
@patch(
"airflow.providers.fab.auth_manager.api_fastapi.routes.roles.get_application_builder",
return_value=_noop_cm(),
)
def test_get_roles_forbidden(
self, mock_get_application_builder, mock_get_auth_manager, mock_roles, test_client, as_user
):
mgr = MagicMock()
mgr.is_authorized_custom_view.return_value = False
mock_get_auth_manager.return_value = mgr
with as_user():
resp = test_client.get("/fab/v1/roles")
assert resp.status_code == 403
mock_roles.get_roles.assert_not_called()
@patch("airflow.providers.fab.auth_manager.api_fastapi.routes.roles.FABAuthManagerRoles")
@patch("airflow.providers.fab.auth_manager.api_fastapi.security.get_auth_manager")
@patch(
"airflow.providers.fab.auth_manager.api_fastapi.routes.roles.get_application_builder",
return_value=_noop_cm(),
)
def test_get_roles_validation_422_negative_offset(
self, mock_get_application_builder, mock_get_auth_manager, mock_roles, test_client, as_user
):
mgr = MagicMock()
mgr.is_authorized_custom_view.return_value = True
mock_get_auth_manager.return_value = mgr
with as_user():
resp = test_client.get("/fab/v1/roles", params={"offset": -1})
assert resp.status_code == 422
mock_roles.get_roles.assert_not_called()
# DELETE /roles/{name}
@patch("airflow.providers.fab.auth_manager.api_fastapi.routes.roles.FABAuthManagerRoles")
@patch("airflow.providers.fab.auth_manager.api_fastapi.security.get_auth_manager")
@patch(
"airflow.providers.fab.auth_manager.api_fastapi.routes.roles.get_application_builder",
return_value=_noop_cm(),
)
def test_delete_role(
self, mock_get_application_builder, mock_get_auth_manager, mock_roles, test_client, as_user
):
mgr = MagicMock()
mgr.is_authorized_custom_view.return_value = True
mock_roles.delete_role.return_value = None
mock_get_auth_manager.return_value = mgr
with as_user():
resp = test_client.delete("/fab/v1/roles/roleA")
assert resp.status_code == 200
mock_roles.delete_role.assert_called_once_with(name="roleA")
@patch("airflow.providers.fab.auth_manager.api_fastapi.routes.roles.FABAuthManagerRoles")
@patch("airflow.providers.fab.auth_manager.api_fastapi.security.get_auth_manager")
@patch(
"airflow.providers.fab.auth_manager.api_fastapi.routes.roles.get_application_builder",
return_value=_noop_cm(),
)
def test_delete_role_forbidden(
self, mock_get_application_builder, mock_get_auth_manager, mock_roles, test_client, as_user
):
mgr = MagicMock()
mgr.is_authorized_custom_view.return_value = False
mock_get_auth_manager.return_value = mgr
with as_user():
resp = test_client.delete("/fab/v1/roles/roleA")
assert resp.status_code == 403
mock_roles.delete_role.assert_not_called()
@patch("airflow.providers.fab.auth_manager.api_fastapi.routes.roles.FABAuthManagerRoles")
@patch("airflow.providers.fab.auth_manager.api_fastapi.security.get_auth_manager")
@patch(
"airflow.providers.fab.auth_manager.api_fastapi.routes.roles.get_application_builder",
return_value=_noop_cm(),
)
def test_delete_role_validation_404_not_found(
self, mock_get_application_builder, mock_get_auth_manager, mock_roles, test_client, as_user
):
mgr = MagicMock()
mgr.is_authorized_custom_view.return_value = True
mock_get_auth_manager.return_value = mgr
mock_roles.delete_role.side_effect = HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Role with name 'non_existent_role' does not exist.",
)
with as_user():
resp = test_client.delete("/fab/v1/roles/non_existent_role")
assert resp.status_code == 404
mock_roles.delete_role.assert_called_once_with(name="non_existent_role")
@patch("airflow.providers.fab.auth_manager.api_fastapi.routes.roles.FABAuthManagerRoles")
@patch("airflow.providers.fab.auth_manager.api_fastapi.security.get_auth_manager")
@patch(
"airflow.providers.fab.auth_manager.api_fastapi.routes.roles.get_application_builder",
return_value=_noop_cm(),
)
def test_delete_role_validation_404_empty_name(
self, mock_get_application_builder, mock_get_auth_manager, mock_roles, test_client, as_user
):
mgr = MagicMock()
mgr.is_authorized_custom_view.return_value = True
mock_get_auth_manager.return_value = mgr
with as_user():
resp = test_client.delete("/fab/v1/roles/")
assert resp.status_code == 404
mock_roles.delete_role.assert_not_called()
@patch("airflow.providers.fab.auth_manager.api_fastapi.routes.roles.FABAuthManagerRoles")
@patch("airflow.providers.fab.auth_manager.api_fastapi.security.get_auth_manager")
@patch(
"airflow.providers.fab.auth_manager.api_fastapi.routes.roles.get_application_builder",
return_value=_noop_cm(),
)
def test_get_role(
self, mock_get_application_builder, mock_get_auth_manager, mock_roles, test_client, as_user
):
mgr = MagicMock()
mgr.is_authorized_custom_view.return_value = True
mock_get_auth_manager.return_value = mgr
dummy_out = RoleResponse(name="roleA", permissions=[])
mock_roles.get_role.return_value = dummy_out
with as_user():
resp = test_client.get("/fab/v1/roles/roleA")
assert resp.status_code == 200
assert resp.json() == dummy_out.model_dump(by_alias=True)
mock_roles.get_role.assert_called_once_with(name="roleA")
@patch("airflow.providers.fab.auth_manager.api_fastapi.routes.roles.FABAuthManagerRoles")
@patch("airflow.providers.fab.auth_manager.api_fastapi.security.get_auth_manager")
@patch(
"airflow.providers.fab.auth_manager.api_fastapi.routes.roles.get_application_builder",
return_value=_noop_cm(),
)
def test_get_role_forbidden(
self, mock_get_application_builder, mock_get_auth_manager, mock_roles, test_client, as_user
):
mgr = MagicMock()
mgr.is_authorized_custom_view.return_value = False
mock_get_auth_manager.return_value = mgr
with as_user():
resp = test_client.get("/fab/v1/roles/roleA")
assert resp.status_code == 403
mock_roles.get_role.assert_not_called()
@patch("airflow.providers.fab.auth_manager.api_fastapi.routes.roles.FABAuthManagerRoles")
@patch("airflow.providers.fab.auth_manager.api_fastapi.security.get_auth_manager")
@patch(
"airflow.providers.fab.auth_manager.api_fastapi.routes.roles.get_application_builder",
return_value=_noop_cm(),
)
def test_get_role_validation_404_not_found(
self, mock_get_application_builder, mock_get_auth_manager, mock_roles, test_client, as_user
):
mgr = MagicMock()
mgr.is_authorized_custom_view.return_value = True
mock_get_auth_manager.return_value = mgr
mock_roles.get_role.side_effect = HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Role with name 'non_existent_role' does not exist.",
)
with as_user():
resp = test_client.get("/fab/v1/roles/non_existent_role")
assert resp.status_code == 404
mock_roles.get_role.assert_called_once_with(name="non_existent_role")
@patch("airflow.providers.fab.auth_manager.api_fastapi.routes.roles.FABAuthManagerRoles")
@patch("airflow.providers.fab.auth_manager.api_fastapi.security.get_auth_manager")
@patch(
"airflow.providers.fab.auth_manager.api_fastapi.routes.roles.get_application_builder",
return_value=_noop_cm(),
)
def test_get_role_validation_404_empty_name(
self, mock_get_application_builder, mock_get_auth_manager, mock_roles, test_client, as_user
):
mgr = MagicMock()
mgr.is_authorized_custom_view.return_value = True
mock_get_auth_manager.return_value = mgr
with as_user():
resp = test_client.get("/fab/v1/roles/")
assert resp.status_code == 404
mock_roles.get_role.assert_not_called()
@patch("airflow.providers.fab.auth_manager.api_fastapi.routes.roles.FABAuthManagerRoles")
@patch("airflow.providers.fab.auth_manager.api_fastapi.security.get_auth_manager")
@patch(
"airflow.providers.fab.auth_manager.api_fastapi.routes.roles.get_application_builder",
return_value=_noop_cm(),
)
def test_patch_role(
self, mock_get_application_builder, mock_get_auth_manager, mock_roles, test_client, as_user
):
mgr = MagicMock()
mgr.is_authorized_custom_view.return_value = True
mock_get_auth_manager.return_value = mgr
dummy_out = RoleResponse(name="roleA", permissions=[])
mock_roles.patch_role.return_value = dummy_out
with as_user():
resp = test_client.patch("/fab/v1/roles/roleA", json={"name": "roleA", "actions": []})
assert resp.status_code == 200
assert resp.json() == dummy_out.model_dump(by_alias=True)
mock_roles.patch_role.assert_called_once_with(name="roleA", body=ANY, update_mask=None)
@patch("airflow.providers.fab.auth_manager.api_fastapi.routes.roles.FABAuthManagerRoles")
@patch("airflow.providers.fab.auth_manager.api_fastapi.security.get_auth_manager")
@patch(
"airflow.providers.fab.auth_manager.api_fastapi.routes.roles.get_application_builder",
return_value=_noop_cm(),
)
def test_patch_role_with_update_mask(
self, mock_get_application_builder, mock_get_auth_manager, mock_roles, test_client, as_user
):
mgr = MagicMock()
mgr.is_authorized_custom_view.return_value = True
mock_get_auth_manager.return_value = mgr
dummy_out = RoleResponse(name="roleA", permissions=[])
mock_roles.patch_role.return_value = dummy_out
with as_user():
resp = test_client.patch(
"/fab/v1/roles/roleA",
json={"name": "roleA", "actions": []},
params={"update_mask": "name,actions"},
)
assert resp.status_code == 200
assert resp.json() == dummy_out.model_dump(by_alias=True)
mock_roles.patch_role.assert_called_once_with(name="roleA", body=ANY, update_mask="name,actions")
@patch("airflow.providers.fab.auth_manager.api_fastapi.routes.roles.FABAuthManagerRoles")
@patch("airflow.providers.fab.auth_manager.api_fastapi.security.get_auth_manager")
@patch(
"airflow.providers.fab.auth_manager.api_fastapi.routes.roles.get_application_builder",
return_value=_noop_cm(),
)
def test_path_role_forbidden(
self, mock_get_application_builder, mock_get_auth_manager, mock_roles, test_client, as_user
):
mgr = MagicMock()
mgr.is_authorized_custom_view.return_value = False
mock_get_auth_manager.return_value = mgr
with as_user():
resp = test_client.patch("/fab/v1/roles/roleA", json={"name": "roleA", "actions": []})
assert resp.status_code == 403
mock_roles.patch_role.assert_not_called()
@patch("airflow.providers.fab.auth_manager.api_fastapi.routes.roles.FABAuthManagerRoles")
@patch("airflow.providers.fab.auth_manager.api_fastapi.security.get_auth_manager")
@patch(
"airflow.providers.fab.auth_manager.api_fastapi.routes.roles.get_application_builder",
return_value=_noop_cm(),
)
def test_patch_role_validation_404_not_found(
self, mock_get_application_builder, mock_get_auth_manager, mock_roles, test_client, as_user
):
mgr = MagicMock()
mgr.is_authorized_custom_view.return_value = True
mock_get_auth_manager.return_value = mgr
mock_roles.patch_role.side_effect = HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Role with name 'non_existent_role' does not exist.",
)
with as_user():
resp = test_client.patch(
"/fab/v1/roles/non_existent_role", json={"name": "non_existent_role", "actions": []}
)
assert resp.status_code == 404
mock_roles.patch_role.assert_called_once_with(
name="non_existent_role", body=ANY, update_mask=None
)
@patch("airflow.providers.fab.auth_manager.api_fastapi.routes.roles.FABAuthManagerRoles")
@patch("airflow.providers.fab.auth_manager.api_fastapi.security.get_auth_manager")
@patch(
"airflow.providers.fab.auth_manager.api_fastapi.routes.roles.get_application_builder",
return_value=_noop_cm(),
)
def test_patch_role_validation_404_empty_name(
self, mock_get_application_builder, mock_get_auth_manager, mock_roles, test_client, as_user
):
mgr = MagicMock()
mgr.is_authorized_custom_view.return_value = True
mock_get_auth_manager.return_value = mgr
mock_roles.patch_role.side_effect = HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Role with name 'non_existent_role' does not exist.",
)
with as_user():
resp = test_client.patch("/fab/v1/roles/", json={"name": "non_existent_role", "actions": []})
assert resp.status_code == 404
@patch("airflow.providers.fab.auth_manager.api_fastapi.routes.roles.FABAuthManagerRoles")
@patch("airflow.providers.fab.auth_manager.api_fastapi.security.get_auth_manager")
@patch(
"airflow.providers.fab.auth_manager.api_fastapi.routes.roles.get_application_builder",
return_value=_noop_cm(),
)
def test_path_role_unknown_update_mask(
self, mock_get_application_builder, mock_get_auth_manager, mock_roles, test_client, as_user
):
mgr = MagicMock()
mgr.is_authorized_custom_view.return_value = True
mock_get_auth_manager.return_value = mgr
mock_roles.patch_role.side_effect = HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="unknown_field in update_mask is unknown",
)
with as_user():
resp = test_client.patch(
"/fab/v1/roles/roleA",
json={"name": "roleA", "actions": []},
params={"update_mask": "unknown_field"},
)
assert resp.status_code == 400
mock_roles.patch_role.assert_called_once_with(name="roleA", body=ANY, update_mask="unknown_field")
@patch("airflow.providers.fab.auth_manager.api_fastapi.routes.roles.FABAuthManagerRoles")
@patch("airflow.providers.fab.auth_manager.api_fastapi.security.get_auth_manager")
@patch(
"airflow.providers.fab.auth_manager.api_fastapi.routes.roles.get_application_builder",
return_value=_noop_cm(),
)
def test_get_permissions_success(
self, mock_get_application_builder, mock_get_auth_manager, mock_permissions, test_client, as_user
):
mgr = MagicMock()
mgr.is_authorized_custom_view.return_value = True
mock_get_auth_manager.return_value = mgr
dummy = PermissionCollectionResponse(
permissions=[ActionResource(action=Action(name="can_read"), resource=Resource(name="DAG"))],
total_entries=1,
)
mock_permissions.get_permissions.return_value = dummy
with as_user():
resp = test_client.get("/fab/v1/permissions")
assert resp.status_code == 200
assert resp.json() == dummy.model_dump(by_alias=True)
mock_permissions.get_permissions.assert_called_once_with(limit=100, offset=0)
@patch("airflow.providers.fab.auth_manager.api_fastapi.routes.roles.FABAuthManagerRoles")
@patch("airflow.providers.fab.auth_manager.api_fastapi.security.get_auth_manager")
@patch(
"airflow.providers.fab.auth_manager.api_fastapi.routes.roles.get_application_builder",
return_value=_noop_cm(),
)
def test_get_permissions_forbidden(
self, mock_get_application_builder, mock_get_auth_manager, mock_permissions, test_client, as_user
):
mgr = MagicMock()
mgr.is_authorized_custom_view.return_value = False
mock_get_auth_manager.return_value = mgr
with as_user():
resp = test_client.get("/fab/v1/permissions")
assert resp.status_code == 403
mock_permissions.get_permissions.assert_not_called()
| {
"repo_id": "apache/airflow",
"file_path": "providers/fab/tests/unit/fab/auth_manager/api_fastapi/routes/test_roles.py",
"license": "Apache License 2.0",
"lines": 531,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/fab/tests/unit/fab/auth_manager/api_fastapi/services/test_roles.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import types
from unittest.mock import MagicMock, patch
import pytest
from fastapi import HTTPException
from sqlalchemy import column
from airflow.providers.fab.auth_manager.api_fastapi.datamodels.roles import (
Action,
ActionResource,
PermissionCollectionResponse,
Resource,
)
from airflow.providers.fab.auth_manager.api_fastapi.services.roles import (
FABAuthManagerRoles,
)
@pytest.fixture
def fab_auth_manager():
return MagicMock()
@pytest.fixture
def security_manager():
sm = MagicMock()
sm.get_action.side_effect = lambda n: object() if n in {"can_read", "can_edit"} else None
sm.get_resource.side_effect = lambda n: object() if n in {"DAG"} else None
return sm
def _make_role_obj(name: str, perms: list[tuple[str, str]]):
perm_objs = [
types.SimpleNamespace(
action=types.SimpleNamespace(name=a),
resource=types.SimpleNamespace(name=r),
)
for (a, r) in perms
]
return types.SimpleNamespace(id=1, name=name, permissions=perm_objs)
class _FakeScalarCount:
def __init__(self, value: int):
self._value = value
def one(self) -> int:
return self._value
class _FakeScalarRoles:
def __init__(self, items):
self._items = items
self._unique_called = False
def unique(self):
self._unique_called = True
return self
def all(self):
return self._items
@patch("airflow.providers.fab.auth_manager.api_fastapi.services.roles.get_fab_auth_manager")
class TestRolesService:
def setup_method(self):
self.body_ok = types.SimpleNamespace(
name="roleA",
permissions=[
types.SimpleNamespace(
action=types.SimpleNamespace(name="can_read"),
resource=types.SimpleNamespace(name="DAG"),
)
],
)
self.body_bad_action = types.SimpleNamespace(
name="roleB",
permissions=[
types.SimpleNamespace(
action=types.SimpleNamespace(name="no_such_action"),
resource=types.SimpleNamespace(name="DAG"),
)
],
)
self.body_bad_resource = types.SimpleNamespace(
name="roleC",
permissions=[
types.SimpleNamespace(
action=types.SimpleNamespace(name="can_read"),
resource=types.SimpleNamespace(name="NOPE"),
)
],
)
# POST /roles
def test_create_role_success(self, get_fab_auth_manager, fab_auth_manager, security_manager):
security_manager.find_role.side_effect = [
None,
_make_role_obj("roleA", [("can_read", "DAG")]),
]
fab_auth_manager.security_manager = security_manager
get_fab_auth_manager.return_value = fab_auth_manager
out = FABAuthManagerRoles.create_role(self.body_ok)
assert out.name == "roleA"
assert out.permissions
assert out.permissions[0].action.name == "can_read"
assert out.permissions[0].resource.name == "DAG"
security_manager.bulk_sync_roles.assert_called_once_with(
[{"role": "roleA", "perms": [("can_read", "DAG")]}]
)
def test_create_role_conflict(self, get_fab_auth_manager, fab_auth_manager, security_manager):
security_manager.find_role.return_value = object()
fab_auth_manager.security_manager = security_manager
get_fab_auth_manager.return_value = fab_auth_manager
with pytest.raises(HTTPException) as ex:
FABAuthManagerRoles.create_role(self.body_ok)
assert ex.value.status_code == 409
def test_create_role_action_not_found(self, get_fab_auth_manager, fab_auth_manager, security_manager):
security_manager.find_role.return_value = None
fab_auth_manager.security_manager = security_manager
get_fab_auth_manager.return_value = fab_auth_manager
with pytest.raises(HTTPException) as ex:
FABAuthManagerRoles.create_role(self.body_bad_action)
assert ex.value.status_code == 400
assert "action" in ex.value.detail
def test_create_role_resource_not_found(self, get_fab_auth_manager, fab_auth_manager, security_manager):
security_manager.find_role.return_value = None
fab_auth_manager.security_manager = security_manager
get_fab_auth_manager.return_value = fab_auth_manager
with pytest.raises(HTTPException) as ex:
FABAuthManagerRoles.create_role(self.body_bad_resource)
assert ex.value.status_code == 400
assert "resource" in ex.value.detail
def test_create_role_unexpected_no_created(
self, get_fab_auth_manager, fab_auth_manager, security_manager
):
security_manager.find_role.side_effect = [None, None]
fab_auth_manager.security_manager = security_manager
get_fab_auth_manager.return_value = fab_auth_manager
with pytest.raises(HTTPException) as ex:
FABAuthManagerRoles.create_role(self.body_ok)
assert ex.value.status_code == 500
# GET /roles
@patch("airflow.providers.fab.auth_manager.api_fastapi.services.roles.build_ordering")
def test_get_roles_happy_path(self, build_ordering, get_fab_auth_manager):
role1 = _make_role_obj("viewer", [("can_read", "DAG")])
role2 = _make_role_obj("admin", [("can_read", "DAG")])
fake_roles_result = _FakeScalarRoles([role1, role2])
session = MagicMock()
session.scalars.side_effect = [
_FakeScalarCount(2),
fake_roles_result,
]
fab_auth_manager = MagicMock()
fab_auth_manager.security_manager = MagicMock(session=session)
get_fab_auth_manager.return_value = fab_auth_manager
build_ordering.return_value = column("name").desc()
out = FABAuthManagerRoles.get_roles(order_by="-name", limit=5, offset=3)
assert out.total_entries == 2
assert [r.name for r in out.roles] == ["viewer", "admin"]
assert fake_roles_result._unique_called is True
build_ordering.assert_called_once()
args, kwargs = build_ordering.call_args
assert args[0] == "-name"
assert set(kwargs["allowed"].keys()) == {"name", "role_id"}
assert session.scalars.call_count == 2
@patch("airflow.providers.fab.auth_manager.api_fastapi.services.roles.build_ordering")
def test_get_roles_invalid_order_by_bubbles_400(self, build_ordering, get_fab_auth_manager):
session = MagicMock()
fab_auth_manager = MagicMock()
fab_auth_manager.security_manager = MagicMock(session=session)
get_fab_auth_manager.return_value = fab_auth_manager
build_ordering.side_effect = HTTPException(status_code=400, detail="disallowed")
with pytest.raises(HTTPException) as ex:
FABAuthManagerRoles.get_roles(order_by="nope", limit=10, offset=0)
assert ex.value.status_code == 400
# DELETE /roles/{name}
def test_delete_role_success(self, get_fab_auth_manager, fab_auth_manager, security_manager):
security_manager.find_role.return_value = _make_role_obj("roleA", [])
fab_auth_manager.security_manager = security_manager
get_fab_auth_manager.return_value = fab_auth_manager
FABAuthManagerRoles.delete_role(name="roleA")
security_manager.delete_role.assert_called_once()
def test_delete_role_not_found(self, get_fab_auth_manager, fab_auth_manager, security_manager):
security_manager.find_role.return_value = None
fab_auth_manager.security_manager = security_manager
get_fab_auth_manager.return_value = fab_auth_manager
with pytest.raises(HTTPException) as ex:
FABAuthManagerRoles.delete_role(name="roleA")
assert ex.value.status_code == 404
# GET /roles/{name}
def test_get_role_success(self, get_fab_auth_manager, fab_auth_manager, security_manager):
security_manager.find_role.return_value = _make_role_obj("roleA", [("can_read", "DAG")])
fab_auth_manager.security_manager = security_manager
get_fab_auth_manager.return_value = fab_auth_manager
out = FABAuthManagerRoles.get_role(name="roleA")
assert out.name == "roleA"
assert out.permissions
assert out.permissions[0].action.name == "can_read"
assert out.permissions[0].resource.name == "DAG"
def test_get_role_not_found(self, get_fab_auth_manager, fab_auth_manager, security_manager):
security_manager.find_role.return_value = None
fab_auth_manager.security_manager = security_manager
get_fab_auth_manager.return_value = fab_auth_manager
with pytest.raises(HTTPException) as ex:
FABAuthManagerRoles.get_role(name="roleA")
assert ex.value.status_code == 404
# PATCH /roles/{name}
def test_patch_role_success(self, get_fab_auth_manager, fab_auth_manager, security_manager):
role = _make_role_obj("viewer", [("can_read", "DAG")])
security_manager.find_role.return_value = role
fab_auth_manager.security_manager = security_manager
get_fab_auth_manager.return_value = fab_auth_manager
body = types.SimpleNamespace(
name="viewer",
permissions=[
types.SimpleNamespace(
action=types.SimpleNamespace(name="can_edit"),
resource=types.SimpleNamespace(name="DAG"),
)
],
)
out = FABAuthManagerRoles.patch_role(body=body, name="viewer")
assert out.name == "viewer"
assert out.permissions
assert out.permissions[0].action.name == "can_edit"
assert out.permissions[0].resource.name == "DAG"
def test_patch_role_rename_success(self, get_fab_auth_manager, fab_auth_manager, security_manager):
role = _make_role_obj("viewer", [("can_edit", "DAG")])
security_manager.find_role.return_value = role
fab_auth_manager.security_manager = security_manager
get_fab_auth_manager.return_value = fab_auth_manager
body = types.SimpleNamespace(
name="editor",
permissions=[
types.SimpleNamespace(
action=types.SimpleNamespace(name="can_edit"),
resource=types.SimpleNamespace(name="DAG"),
)
],
)
out = FABAuthManagerRoles.patch_role(body=body, name="viewer")
assert out.name == "editor"
assert out.permissions
assert out.permissions[0].action.name == "can_edit"
assert out.permissions[0].resource.name == "DAG"
def test_patch_role_with_update_mask(self, get_fab_auth_manager, fab_auth_manager, security_manager):
role = _make_role_obj("viewer", [("can_read", "DAG")])
security_manager.find_role.return_value = role
fab_auth_manager.security_manager = security_manager
get_fab_auth_manager.return_value = fab_auth_manager
body = types.SimpleNamespace(
name="viewer1",
permissions=[
types.SimpleNamespace(
action=types.SimpleNamespace(name="can_edit"),
resource=types.SimpleNamespace(name="DAG"),
)
],
)
out = FABAuthManagerRoles.patch_role(
body=body,
name="viewer",
update_mask="actions",
)
assert out.name == "viewer"
assert out.permissions
assert out.permissions[0].action.name == "can_edit"
assert out.permissions[0].resource.name == "DAG"
def test_patch_role_rename_with_update_mask(
self, get_fab_auth_manager, fab_auth_manager, security_manager
):
role = _make_role_obj("viewer", [("can_read", "DAG")])
security_manager.find_role.return_value = role
fab_auth_manager.security_manager = security_manager
get_fab_auth_manager.return_value = fab_auth_manager
body = types.SimpleNamespace(
name="viewer1",
permissions=[
types.SimpleNamespace(
action=types.SimpleNamespace(name="can_edit"),
resource=types.SimpleNamespace(name="DAG"),
)
],
)
out = FABAuthManagerRoles.patch_role(
body=body,
name="viewer",
update_mask="name",
)
assert out.name == "viewer1"
assert out.permissions
assert out.permissions[0].action.name == "can_read"
assert out.permissions[0].resource.name == "DAG"
def test_patch_role_not_found(self, get_fab_auth_manager, fab_auth_manager, security_manager):
security_manager.find_role.return_value = None
fab_auth_manager.security_manager = security_manager
get_fab_auth_manager.return_value = fab_auth_manager
body = types.SimpleNamespace(
name="viewer",
permissions=[
types.SimpleNamespace(
action=types.SimpleNamespace(name="can_edit"),
resource=types.SimpleNamespace(name="DAG"),
)
],
)
with pytest.raises(HTTPException) as ex:
FABAuthManagerRoles.patch_role(body=body, name="viewer")
assert ex.value.status_code == 404
def test_get_permissions_success(self, get_fab_auth_manager):
session = MagicMock()
perm_obj = types.SimpleNamespace(
action=types.SimpleNamespace(name="can_read"),
resource=types.SimpleNamespace(name="DAG"),
)
session.scalars.side_effect = [
types.SimpleNamespace(one=lambda: 1),
types.SimpleNamespace(all=lambda: [perm_obj]),
]
fab_auth_manager = MagicMock()
fab_auth_manager.security_manager = MagicMock(session=session)
get_fab_auth_manager.return_value = fab_auth_manager
out = FABAuthManagerRoles.get_permissions(limit=10, offset=0)
assert isinstance(out, PermissionCollectionResponse)
assert out.total_entries == 1
assert len(out.permissions) == 1
assert out.permissions[0] == ActionResource(
action=Action(name="can_read"), resource=Resource(name="DAG")
)
def test_get_permissions_empty(self, get_fab_auth_manager):
session = MagicMock()
session.scalars.side_effect = [
types.SimpleNamespace(one=lambda: 0),
types.SimpleNamespace(all=lambda: []),
]
fab_auth_manager = MagicMock()
fab_auth_manager.security_manager = MagicMock(session=session)
get_fab_auth_manager.return_value = fab_auth_manager
out = FABAuthManagerRoles.get_permissions(limit=10, offset=0)
assert out.total_entries == 0
assert out.permissions == []
def test_get_permissions_with_multiple(self, get_fab_auth_manager):
session = MagicMock()
perm_objs = [
types.SimpleNamespace(
action=types.SimpleNamespace(name="can_read"),
resource=types.SimpleNamespace(name="DAG"),
),
types.SimpleNamespace(
action=types.SimpleNamespace(name="can_edit"),
resource=types.SimpleNamespace(name="DAG"),
),
]
session.scalars.side_effect = [
types.SimpleNamespace(one=lambda: 2),
types.SimpleNamespace(all=lambda: perm_objs),
]
fab_auth_manager = MagicMock()
fab_auth_manager.security_manager = MagicMock(session=session)
get_fab_auth_manager.return_value = fab_auth_manager
out = FABAuthManagerRoles.get_permissions(limit=10, offset=0)
assert isinstance(out, PermissionCollectionResponse)
assert out.total_entries == 2
assert len(out.permissions) == 2
assert out.permissions[0] == ActionResource(
action=Action(name="can_read"), resource=Resource(name="DAG")
)
assert out.permissions[1] == ActionResource(
action=Action(name="can_edit"), resource=Resource(name="DAG")
)
| {
"repo_id": "apache/airflow",
"file_path": "providers/fab/tests/unit/fab/auth_manager/api_fastapi/services/test_roles.py",
"license": "Apache License 2.0",
"lines": 370,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/fab/tests/unit/fab/auth_manager/api_fastapi/test_security.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from unittest.mock import MagicMock, patch
import pytest
from fastapi import HTTPException
from airflow.providers.fab.auth_manager.api_fastapi.security import requires_fab_custom_view
class TestSecurityDependency:
@patch("airflow.providers.fab.auth_manager.api_fastapi.security.get_auth_manager")
def test_requires_fab_custom_view_allows_when_authorized(self, get_auth_manager):
mgr = MagicMock()
mgr.is_authorized_custom_view.return_value = True
get_auth_manager.return_value = mgr
check = requires_fab_custom_view(method="POST", resource_name="Role")
user = object()
assert check(user=user) is None
mgr.is_authorized_custom_view.assert_called_once_with(method="POST", resource_name="Role", user=user)
@patch("airflow.providers.fab.auth_manager.api_fastapi.security.get_auth_manager")
def test_requires_fab_custom_view_raises_403_when_unauthorized(self, get_auth_manager):
mgr = MagicMock()
mgr.is_authorized_custom_view.return_value = False
get_auth_manager.return_value = mgr
check = requires_fab_custom_view(method="DELETE", resource_name="Role")
with pytest.raises(HTTPException) as ex:
check(user=object())
assert ex.value.status_code == 403
| {
"repo_id": "apache/airflow",
"file_path": "providers/fab/tests/unit/fab/auth_manager/api_fastapi/test_security.py",
"license": "Apache License 2.0",
"lines": 40,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-core/src/airflow/utils/memray_utils.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from collections.abc import Callable
from enum import Enum
from functools import wraps
from typing import ParamSpec, TypeVar
import structlog
from airflow.configuration import AIRFLOW_HOME, conf
# Type variables for preserving function signatures
PS = ParamSpec("PS")
RT = TypeVar("RT")
log = structlog.get_logger(logger_name=__name__)
class MemrayTraceComponents(Enum):
"""Possible airflow components can apply memray trace."""
scheduler = "scheduler"
dag_processor = "dag_processor"
api = "api"
def enable_memray_trace(component: MemrayTraceComponents) -> Callable[[Callable[PS, RT]], Callable[PS, RT]]:
"""
Conditionally track memory using memray based on configuration.
Args:
component: Enum value of the component for configuration lookup
"""
def decorator(func: Callable[PS, RT]) -> Callable[PS, RT]:
@wraps(func)
def wrapper(*args: PS.args, **kwargs: PS.kwargs) -> RT: # type: ignore[return]
_memray_trace_components = conf.getenumlist(
"profiling", "memray_trace_components", MemrayTraceComponents
)
if component not in _memray_trace_components:
return func(*args, **kwargs)
try:
import memray
profile_path = f"{AIRFLOW_HOME}/{component.value}_memory.bin"
with memray.Tracker(
profile_path,
):
log.info("Memray tracing enabled for %s. Output: %s", component.value, profile_path)
return func(*args, **kwargs)
except ImportError as error:
# Silently fall back to running without tracking
if "memray" in str(error):
log.warning(
"ImportError memray.Tracker: %s in %s, please check the memray is installed",
error.msg,
component.value,
)
return func(*args, **kwargs)
raise error
return wrapper
return decorator
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/utils/memray_utils.py",
"license": "Apache License 2.0",
"lines": 67,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:airflow-core/tests/unit/utils/test_memray_utils.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from unittest.mock import MagicMock, Mock, patch
from airflow.configuration import AIRFLOW_HOME, conf
from airflow.utils.memray_utils import MemrayTraceComponents, enable_memray_trace
from tests_common.test_utils.config import conf_vars
class TestEnableMemrayTrackDecorator:
"""Test suite for enable_memray_trace decorator functionality."""
def setup_method(self):
self.mock_function = Mock(return_value="test_result")
self.mock_function.__name__ = "mock_function"
# Set up memray module mock
self.mock_memray_module = MagicMock()
self.mock_tracker = MagicMock()
self.mock_memray_module.Tracker.return_value = self.mock_tracker
# Configure tracker as context manager
self.mock_tracker.__enter__ = Mock(return_value=self.mock_tracker)
self.mock_tracker.__exit__ = Mock(return_value=None)
# Start patching memray module
self.memray_patcher = patch.dict("sys.modules", {"memray": self.mock_memray_module})
self.memray_patcher.start()
def teardown_method(self):
self.memray_patcher.stop()
@conf_vars({("profiling", "memray_trace_components"): "api,dag_processor"})
def test_memray_config(self):
_memray_trace_components = conf.getenumlist(
"profiling", "memray_trace_components", MemrayTraceComponents
)
assert _memray_trace_components == [
MemrayTraceComponents.api,
MemrayTraceComponents.dag_processor,
]
def test_memray_not_used_when_default_trace_component(self):
"""
Verify that memray is not imported or used when memray_trace_components is default (blank).
"""
import builtins
original_import = builtins.__import__
import_attempts = []
def track_imports(name, *args, **kwargs):
import_attempts.append(name)
return original_import(name, *args, **kwargs)
with patch("builtins.__import__", side_effect=track_imports):
decorated_function = enable_memray_trace(MemrayTraceComponents.scheduler)(self.mock_function)
result = decorated_function("arg1", kwarg="value")
assert "memray" not in import_attempts, "memray should not be imported when tracking is disabled"
self.mock_memray_module.Tracker.assert_not_called()
self.mock_tracker.__enter__.assert_not_called()
self.mock_tracker.__exit__.assert_not_called()
self.mock_function.assert_called_once_with("arg1", kwarg="value")
assert result == "test_result"
@conf_vars({("profiling", "memray_trace_components"): "scheduler,dag_processor"})
def test_memray_not_used_when_not_in_trace_component(self):
"""
Verify that memray is not imported or used when the component is not in memray_trace_components.
"""
import builtins
original_import = builtins.__import__
import_attempts = []
def track_imports(name, *args, **kwargs):
import_attempts.append(name)
return original_import(name, *args, **kwargs)
with patch("builtins.__import__", side_effect=track_imports):
decorated_function = enable_memray_trace(MemrayTraceComponents.api)(self.mock_function)
result = decorated_function("arg1", kwarg="value")
assert "memray" not in import_attempts, "memray should not be imported when tracking is disabled"
self.mock_memray_module.Tracker.assert_not_called()
self.mock_tracker.__enter__.assert_not_called()
self.mock_tracker.__exit__.assert_not_called()
self.mock_function.assert_called_once_with("arg1", kwarg="value")
assert result == "test_result"
@conf_vars({("profiling", "memray_trace_components"): "scheduler,api,dag_processor"})
def test_memray_tracker_activated_when_enabled(self):
"""
Verify that memray.Tracker is properly used when tracking is enabled.
"""
decorated_function = enable_memray_trace(MemrayTraceComponents.scheduler)(self.mock_function)
result = decorated_function("arg1", "arg2", kwarg1="value1")
expected_profile_path = f"{AIRFLOW_HOME}/{MemrayTraceComponents.scheduler.value}_memory.bin"
self.mock_memray_module.Tracker.assert_called_once_with(expected_profile_path)
self.mock_tracker.__enter__.assert_called_once()
self.mock_function.assert_called_once_with("arg1", "arg2", kwarg1="value1")
self.mock_tracker.__exit__.assert_called_once()
assert result == "test_result"
@conf_vars({("profiling", "memray_trace_components"): "scheduler,api,dag_processor"})
def test_function_metadata_preserved_after_decoration(self):
"""
Verify that decorator preserves original function metadata.
"""
def sample_function(a: int, b: str = "default") -> str:
"""Sample function with metadata."""
return f"{a}-{b}"
decorated_function = enable_memray_trace(MemrayTraceComponents.api)(sample_function)
assert decorated_function.__name__ == "sample_function"
assert decorated_function.__doc__ == "Sample function with metadata."
if hasattr(sample_function, "__annotations__"):
assert decorated_function.__annotations__ == sample_function.__annotations__
class TestEnableMemrayTrackErrorHandling:
"""Test suite for error handling in enable_memray_trace decorator."""
def setup_method(self):
self.mock_function = Mock(return_value="test_result")
self.mock_function.__name__ = "mock_function"
@conf_vars({("profiling", "memray_trace_components"): "scheduler,api,dag_processor"})
def test_graceful_fallback_on_memray_import_error(self):
"""
Verify graceful degradation when memray module is unavailable.
"""
with patch.dict("sys.modules", {"memray": None}):
decorated_function = enable_memray_trace(MemrayTraceComponents.dag_processor)(self.mock_function)
result = decorated_function("arg1")
self.mock_function.assert_called_once_with("arg1")
assert result == "test_result"
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/tests/unit/utils/test_memray_utils.py",
"license": "Apache License 2.0",
"lines": 128,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/http/src/airflow/providers/http/notifications/http.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from functools import cached_property
from typing import TYPE_CHECKING, Any
import aiohttp
from airflow.providers.common.compat.notifier import BaseNotifier
from airflow.providers.http.hooks.http import HttpAsyncHook, HttpHook
if TYPE_CHECKING:
from airflow.sdk.definitions.context import Context
class HttpNotifier(BaseNotifier):
"""
HTTP Notifier.
Sends HTTP requests to notify external systems.
:param http_conn_id: HTTP connection id that has the base URL and optional authentication credentials.
:param endpoint: The endpoint to be called i.e. resource/v1/query?
:param method: The HTTP method to use. Defaults to POST.
:param data: Payload to be uploaded or request parameters
:param json: JSON payload to be uploaded
:param headers: Additional headers to be passed through as a dictionary
:param extra_options: Additional options to be used when executing the request
"""
template_fields = ("http_conn_id", "endpoint", "data", "json", "headers", "extra_options")
def __init__(
self,
*,
http_conn_id: str = HttpHook.default_conn_name,
endpoint: str | None = None,
method: str = "POST",
data: dict[str, Any] | str | None = None,
json: dict[str, Any] | str | None = None,
headers: dict[str, Any] | None = None,
extra_options: dict[str, Any] | None = None,
**kwargs,
):
super().__init__(**kwargs)
self.http_conn_id = http_conn_id
self.endpoint = endpoint
self.method = method
self.data = data
self.json = json
self.headers = headers
self.extra_options = extra_options or {}
@cached_property
def hook(self) -> HttpHook:
"""HTTP Hook."""
return HttpHook(method=self.method, http_conn_id=self.http_conn_id)
@cached_property
def async_hook(self) -> HttpAsyncHook:
"""HTTP Async Hook."""
return HttpAsyncHook(method=self.method, http_conn_id=self.http_conn_id)
def notify(self, context: Context) -> None:
"""Send HTTP notification (sync)."""
resp = self.hook.run(
endpoint=self.endpoint,
data=self.data,
headers=self.headers,
extra_options=self.extra_options,
json=self.json,
)
self.log.debug("HTTP notification sent: %s %s", resp.status_code, resp.url)
async def async_notify(self, context: Context) -> None:
"""Send HTTP notification (async)."""
async with aiohttp.ClientSession() as session:
resp = await self.async_hook.run(
session=session,
endpoint=self.endpoint,
data=self.data,
json=self.json,
headers=self.headers,
extra_options=self.extra_options,
)
self.log.debug("HTTP notification sent (async): %s %s", resp.status, resp.url)
send_http_notification = HttpNotifier
| {
"repo_id": "apache/airflow",
"file_path": "providers/http/src/airflow/providers/http/notifications/http.py",
"license": "Apache License 2.0",
"lines": 88,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/http/tests/unit/http/notifications/test_http.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from unittest import mock
import pytest
from airflow.providers.http.notifications.http import HttpNotifier, send_http_notification
class TestHttpNotifier:
def test_class_and_notifier_are_same(self):
assert send_http_notification is HttpNotifier
@mock.patch("airflow.providers.http.notifications.http.HttpHook")
def test_http_notifier(self, mock_http_hook):
notifier = HttpNotifier(
http_conn_id="test_conn_id",
endpoint="/testing",
method="POST",
json={"message": "testing"},
headers={"Content-Type": "application/json"},
)
notifier.notify({})
mock_http_hook.return_value.run.assert_called_once_with(
endpoint="/testing",
data=None,
headers={"Content-Type": "application/json"},
extra_options={},
json={"message": "testing"},
)
mock_http_hook.assert_called_once_with(method="POST", http_conn_id="test_conn_id")
@pytest.mark.asyncio
@mock.patch("airflow.providers.http.notifications.http.HttpAsyncHook")
@mock.patch("aiohttp.ClientSession")
async def test_async_http_notifier(self, mock_session, mock_http_async_hook):
mock_hook = mock_http_async_hook.return_value
mock_hook.run = mock.AsyncMock()
notifier = HttpNotifier(
http_conn_id="test_conn_id",
endpoint="/test",
method="POST",
json={"message": "test"},
)
await notifier.async_notify({})
mock_hook.run.assert_called_once_with(
session=mock_session.return_value.__aenter__.return_value,
endpoint="/test",
data=None,
json={"message": "test"},
headers=None,
extra_options={},
)
@mock.patch("airflow.providers.http.notifications.http.HttpHook")
def test_http_notifier_templated(self, mock_http_hook, create_dag_without_db):
notifier = HttpNotifier(
endpoint="/{{ dag.dag_id }}",
json={"dag_id": "{{ dag.dag_id }}", "user": "{{ username }}"},
)
notifier(
{
"dag": create_dag_without_db("test_http_notification_templated"),
"username": "test-user",
}
)
mock_http_hook.return_value.run.assert_called_once_with(
endpoint="/test_http_notification_templated",
data=None,
headers=None,
extra_options={},
json={"dag_id": "test_http_notification_templated", "user": "test-user"},
)
| {
"repo_id": "apache/airflow",
"file_path": "providers/http/tests/unit/http/notifications/test_http.py",
"license": "Apache License 2.0",
"lines": 81,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-core/src/airflow/api_fastapi/execution_api/versions/v2025_10_27.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from cadwyn import ResponseInfo, VersionChange, convert_response_to_previous_version_for
from airflow.api_fastapi.execution_api.datamodels.taskinstance import TIRunContext
class MakeDagRunConfNullable(VersionChange):
"""Make DagRun.conf field nullable to match database schema."""
description = __doc__
instructions_to_migrate_to_previous_version = ()
@convert_response_to_previous_version_for(TIRunContext) # type: ignore[arg-type]
def ensure_conf_is_dict_in_dag_run(response: ResponseInfo) -> None: # type: ignore[misc]
"""Ensure conf is always a dict (never None) in previous versions."""
if "dag_run" in response.body and isinstance(response.body["dag_run"], dict):
if response.body["dag_run"].get("conf") is None:
response.body["dag_run"]["conf"] = {}
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/api_fastapi/execution_api/versions/v2025_10_27.py",
"license": "Apache License 2.0",
"lines": 29,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/google/src/airflow/providers/google/cloud/hooks/gen_ai.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module contains a Google Cloud GenAI Generative Model hook."""
from __future__ import annotations
import enum
import time
from typing import TYPE_CHECKING, Any
from google import genai
from airflow.providers.google.common.hooks.base_google import (
PROVIDE_PROJECT_ID,
GoogleBaseAsyncHook,
GoogleBaseHook,
)
if TYPE_CHECKING:
from google.genai.pagers import Pager
from google.genai.types import (
BatchJob,
ContentListUnion,
ContentListUnionDict,
CountTokensConfigOrDict,
CountTokensResponse,
CreateBatchJobConfig,
CreateCachedContentConfigOrDict,
CreateTuningJobConfigOrDict,
DeleteFileResponse,
DeleteResourceJob,
EmbedContentConfigOrDict,
EmbedContentResponse,
File,
GenerateContentConfig,
ListBatchJobsConfig,
TuningDatasetOrDict,
TuningJob,
)
class BatchJobStatus(enum.Enum):
"""Possible states of batch job in Gemini Batch API."""
SUCCEEDED = "JOB_STATE_SUCCEEDED"
PENDING = "JOB_STATE_PENDING"
FAILED = "JOB_STATE_FAILED"
RUNNING = "JOB_STATE_RUNNING"
CANCELLED = "JOB_STATE_CANCELLED"
EXPIRED = "JOB_STATE_EXPIRED"
class GenAIGenerativeModelHook(GoogleBaseHook):
"""Class for Google Cloud Generative AI Vertex AI hook."""
def get_genai_client(self, project_id: str, location: str):
return genai.Client(
vertexai=True,
project=project_id,
location=location,
)
@GoogleBaseHook.fallback_to_default_project_id
def embed_content(
self,
model: str,
location: str,
contents: ContentListUnion | ContentListUnionDict | list[str],
config: EmbedContentConfigOrDict | None = None,
project_id: str = PROVIDE_PROJECT_ID,
) -> EmbedContentResponse:
"""
Generate embeddings for words, phrases, sentences, and code.
:param project_id: Required. The ID of the Google Cloud project that the service belongs to.
:param location: Required. The ID of the Google Cloud location that the service belongs to.
:param model: Required. The model to use.
:param contents: Optional. The contents to use for embedding.
:param config: Optional. Configuration for embeddings.
"""
client = self.get_genai_client(project_id=project_id, location=location)
resp = client.models.embed_content(model=model, contents=contents, config=config)
return resp
@GoogleBaseHook.fallback_to_default_project_id
def generate_content(
self,
location: str,
model: str,
contents: ContentListUnionDict,
generation_config: GenerateContentConfig | None = None,
project_id: str = PROVIDE_PROJECT_ID,
) -> str:
"""
Make an API request to generate content using a model.
:param location: Required. The ID of the Google Cloud location that the service belongs to.
:param project_id: Required. The ID of the Google Cloud project that the service belongs to.
:param model: Required. The model to use.
:param contents: Required. The multi-part content of a message that a user or a program
gives to the generative model, in order to elicit a specific response.
:param generation_config: Optional. Generation configuration settings.
"""
client = self.get_genai_client(project_id=project_id, location=location)
response = client.models.generate_content(
model=model,
contents=contents,
config=generation_config,
)
return response.text
@GoogleBaseHook.fallback_to_default_project_id
def supervised_fine_tuning_train(
self,
source_model: str,
location: str,
training_dataset: TuningDatasetOrDict,
tuning_job_config: CreateTuningJobConfigOrDict | dict[str, Any] | None = None,
project_id: str = PROVIDE_PROJECT_ID,
) -> TuningJob:
"""
Create a tuning job to adapt model behavior with a labeled dataset.
:param project_id: Required. The ID of the Google Cloud project that the service belongs to.
:param location: Required. The ID of the Google Cloud location that the service belongs to.
:param source_model: Required. A pre-trained model optimized for performing natural
language tasks such as classification, summarization, extraction, content
creation, and ideation.
:param train_dataset: Required. Cloud Storage URI of your training dataset. The dataset
must be formatted as a JSONL file. For best results, provide at least 100 to 500 examples.
:param tuning_job_config: Optional. Configuration of the Tuning job to be created.
"""
client = self.get_genai_client(project_id=project_id, location=location)
tuning_job = client.tunings.tune(
base_model=source_model,
training_dataset=training_dataset,
config=tuning_job_config,
)
# Poll until completion
running = {"JOB_STATE_PENDING", "JOB_STATE_RUNNING"}
while tuning_job.state in running:
time.sleep(60)
tuning_job = client.tunings.get(name=tuning_job.name)
return tuning_job
@GoogleBaseHook.fallback_to_default_project_id
def count_tokens(
self,
location: str,
model: str,
contents: ContentListUnion | ContentListUnionDict,
config: CountTokensConfigOrDict | None = None,
project_id: str = PROVIDE_PROJECT_ID,
) -> CountTokensResponse:
"""
Use Count Tokens API to calculate the number of input tokens before sending a request to Gemini API.
:param project_id: Required. The ID of the Google Cloud project that the service belongs to.
:param location: Required. The ID of the Google Cloud location that the service belongs to.
:param contents: Required. The multi-part content of a message that a user or a program
gives to the generative model, in order to elicit a specific response.
:param model: Required. Model,
supporting prompts with text-only input, including natural language
tasks, multi-turn text and code chat, and code generation. It can
output text and code.
:param config: Optional. Configuration for Count Tokens.
"""
client = self.get_genai_client(project_id=project_id, location=location)
response = client.models.count_tokens(
model=model,
contents=contents,
config=config,
)
return response
@GoogleBaseHook.fallback_to_default_project_id
def create_cached_content(
self,
model: str,
location: str,
cached_content_config: CreateCachedContentConfigOrDict | None = None,
project_id: str = PROVIDE_PROJECT_ID,
) -> str:
"""
Create CachedContent to reduce the cost of requests containing repeat content.
:param project_id: Required. The ID of the Google Cloud project that the service belongs to.
:param location: Required. The ID of the Google Cloud location that the service belongs to.
:param model: Required. The name of the publisher model to use for cached content.
:param cached_content_config: Optional. Configuration of the Cached Content.
"""
client = self.get_genai_client(project_id=project_id, location=location)
resp = client.caches.create(
model=model,
config=cached_content_config,
)
return resp.name
class GenAIGeminiAPIHook(GoogleBaseHook):
"""Class for Google Cloud Generative AI Gemini Developer API hook."""
def __init__(self, gemini_api_key: str, **kwargs) -> None:
super().__init__(**kwargs)
self.gemini_api_key = gemini_api_key
def get_genai_client(self):
return genai.Client(
api_key=self.gemini_api_key,
vertexai=False,
)
def get_batch_job(
self,
job_name: str,
) -> BatchJob:
"""
Get batch job using Gemini Batch API.
:param job_name: Required. Batch job name.
"""
client = self.get_genai_client()
resp = client.batches.get(name=job_name)
return resp
def list_batch_jobs(
self,
list_batch_jobs_config: ListBatchJobsConfig | dict | None = None,
) -> Pager[BatchJob]:
"""
Get list of batch jobs using Gemini Batch API.
:param list_batch_jobs_config: Optional. Configuration of returned iterator.
"""
client = self.get_genai_client()
resp = client.batches.list(
config=list_batch_jobs_config,
)
return resp
def create_batch_job(
self,
model: str,
source: list | str,
create_batch_job_config: CreateBatchJobConfig | dict | None = None,
) -> BatchJob:
"""
Create batch job using Gemini Batch API to process large-scale, non-urgent tasks.
:param model: Required. Gemini model name to process requests.
:param source: Required. Requests that will be sent to chosen model.
Can be in format of Inline requests or file name.
:param create_batch_job_config: Optional. Configuration parameters for batch job.
"""
client = self.get_genai_client()
resp = client.batches.create(
model=model,
src=source,
config=create_batch_job_config,
)
return resp
def delete_batch_job(
self,
job_name: str,
) -> DeleteResourceJob:
"""
Delete batch job using Gemini Batch API.
:param job_name: Required. Batch job name.
"""
client = self.get_genai_client()
resp = client.batches.delete(name=job_name)
return resp
def cancel_batch_job(
self,
job_name: str,
) -> None:
"""
Cancel batch job using Gemini Batch API.
:param job_name: Required. Batch job name.
"""
client = self.get_genai_client()
client.batches.cancel(
name=job_name,
)
def create_embeddings(
self,
model: str,
source: dict | str,
create_embeddings_config: CreateBatchJobConfig | dict | None = None,
) -> BatchJob:
"""
Create batch job for embeddings using Gemini Batch API to process large-scale, non-urgent tasks.
:param model: Required. Gemini model name to process requests.
:param source: Required. Requests that will be sent to chosen model.
Can be in format of Inline requests or file name.
:param create_embeddings_config: Optional. Configuration parameters for embeddings batch job.
"""
client = self.get_genai_client()
input_type = "inlined_requests"
if isinstance(source, str):
input_type = "file_name"
self.log.info("Using %s to create embeddings", input_type)
resp = client.batches.create_embeddings(
model=model,
src={input_type: source},
config=create_embeddings_config,
)
return resp
def upload_file(self, path_to_file: str, upload_file_config: dict | None = None) -> File:
"""
Upload file for batch job or embeddings batch job using Gemini Files API.
:param path_to_file: Required. Path to file on local filesystem.
:param upload_file_config: Optional. Configuration for file upload.
"""
client = self.get_genai_client()
if upload_file_config is None:
self.log.info("Default configuration will be used to upload file")
try:
file_name, file_type = path_to_file.split("/")[-1].split(".")
upload_file_config = {"display_name": file_name, "mime_type": file_type}
except ValueError as exc:
raise ValueError(
"Error during unpacking file name or mime type. Please check file path"
) from exc
resp = client.files.upload(
file=path_to_file,
config=upload_file_config,
)
return resp
def get_file(self, file_name: str) -> File:
"""
Get file's metadata for batch job or embeddings batch job using Gemini Files API.
:param file_name: Required. Name of the file in Gemini Files API.
"""
client = self.get_genai_client()
resp = client.files.get(name=file_name)
return resp
def download_file(self, file_name: str) -> bytes:
"""
Download file for batch job or embeddings batch job using Gemini Files API.
:param file_name: Required. Name of the file in Gemini Files API.
"""
client = self.get_genai_client()
resp = client.files.download(file=file_name)
return resp
def list_files(self) -> Pager[File]:
"""List files for stored in Gemini Files API."""
client = self.get_genai_client()
resp = client.files.list()
return resp
def delete_file(self, file_name: str) -> DeleteFileResponse:
"""
Delete file from Gemini Files API storage.
:param file_name: Required. Name of the file in Gemini Files API.
"""
client = self.get_genai_client()
resp = client.files.delete(name=file_name)
return resp
class GenAIGeminiAPIAsyncHook(GoogleBaseAsyncHook):
"""Class for Google Cloud Generative AI Gemini Developer Async API hook."""
sync_hook_class = GenAIGeminiAPIHook
def __init__(self, gemini_api_key: str, **kwargs) -> None:
super().__init__(**kwargs)
self.gemini_api_key = gemini_api_key
async def get_async_client(self):
return genai.Client(
api_key=self.gemini_api_key,
vertexai=False,
).aio
async def create_batch_job(
self,
model: str,
source: list | str,
create_batch_job_config: CreateBatchJobConfig | dict | None = None,
) -> BatchJob:
"""
Create batch job asynchronously using Gemini Batch API to process large-scale, non-urgent tasks.
:param model: Required. Gemini model name to process requests.
:param source: Required. Requests that will be sent to chosen model.
Can be in format of Inline requests or file name.
:param create_batch_job_config: Optional. Configuration parameters for batch job.
"""
async_client = await self.get_async_client()
async_job = await async_client.batches.create(model=model, src=source, config=create_batch_job_config)
return async_job
async def get_batch_job(
self,
job_name: str,
) -> BatchJob:
"""
Get batch job using Gemini Batch API asynchronously.
:param job_name: Required. Batch job name.
"""
async_client = await self.get_async_client()
resp = await async_client.batches.get(name=job_name)
return resp
async def create_embeddings_batch_job(
self,
model: str,
source: dict | str,
create_embeddings_config: CreateBatchJobConfig | dict | None = None,
) -> BatchJob:
"""
Create batch job for embeddings asynchronously using Gemini Batch API to process large-scale, non-urgent tasks.
:param model: Required. Gemini model name to process requests.
:param source: Required. Requests that will be sent to chosen model.
Can be in format of Inline requests or file name.
:param create_embeddings_config: Optional. Configuration parameters for embeddings batch job.
"""
async_client = await self.get_async_client()
input_type = "inlined_requests"
if isinstance(source, str):
input_type = "file_name"
self.log.info("Using %s to create embeddings", input_type)
resp = await async_client.batches.create_embeddings(
model=model,
src={input_type: source},
config=create_embeddings_config,
)
return resp
| {
"repo_id": "apache/airflow",
"file_path": "providers/google/src/airflow/providers/google/cloud/hooks/gen_ai.py",
"license": "Apache License 2.0",
"lines": 404,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/google/src/airflow/providers/google/cloud/operators/gen_ai.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module contains Google Gen AI operators."""
from __future__ import annotations
import os.path
import time
from collections.abc import Sequence
from typing import TYPE_CHECKING, Any
from google.genai.errors import ClientError
from google.genai.types import BatchJob
from airflow.configuration import conf
from airflow.exceptions import AirflowException
from airflow.providers.google.cloud.hooks.gen_ai import (
BatchJobStatus,
GenAIGeminiAPIHook,
GenAIGenerativeModelHook,
)
from airflow.providers.google.cloud.operators.cloud_base import GoogleCloudBaseOperator
from airflow.providers.google.cloud.triggers.gen_ai import (
GenAIGeminiCreateBatchJobTrigger,
GenAIGeminiCreateEmbeddingsBatchJobTrigger,
)
if TYPE_CHECKING:
from google.genai.types import (
ContentListUnion,
ContentListUnionDict,
CountTokensConfigOrDict,
CreateBatchJobConfig,
CreateCachedContentConfigOrDict,
CreateTuningJobConfigOrDict,
EmbedContentConfigOrDict,
GenerateContentConfig,
ListBatchJobsConfig,
TuningDatasetOrDict,
)
from airflow.providers.common.compat.sdk import Context
class GenAIGenerateEmbeddingsOperator(GoogleCloudBaseOperator):
"""
Uses the Gemini AI Embeddings API to generate embeddings for words, phrases, sentences, and code.
:param project_id: Required. The ID of the Google Cloud project that the
service belongs to (templated).
:param location: Required. The ID of the Google Cloud location that the
service belongs to (templated).
:param model: Required. The name of the model to use for content generation,
which can be a text-only or multimodal model. For example, `gemini-pro` or
`gemini-pro-vision`.
:param contents: Optional. The contents to use for embedding.
:param config: Optional. Configuration for embeddings.
:param gcp_conn_id: Optional. The connection ID to use connecting to Google Cloud.
:param impersonation_chain: Optional. Service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields = ("location", "project_id", "impersonation_chain", "contents", "model", "config")
def __init__(
self,
*,
project_id: str,
location: str,
model: str,
contents: ContentListUnion | ContentListUnionDict | list[str],
config: EmbedContentConfigOrDict | None = None,
gcp_conn_id: str = "google_cloud_default",
impersonation_chain: str | Sequence[str] | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.project_id = project_id
self.location = location
self.contents = contents
self.config = config
self.model = model
self.gcp_conn_id = gcp_conn_id
self.impersonation_chain = impersonation_chain
def execute(self, context: Context):
self.hook = GenAIGenerativeModelHook(
gcp_conn_id=self.gcp_conn_id,
impersonation_chain=self.impersonation_chain,
)
self.log.info("Generating text embeddings...")
response = self.hook.embed_content(
project_id=self.project_id,
location=self.location,
contents=self.contents,
model=self.model,
config=self.config,
)
self.log.info("Model response: %s", response)
context["ti"].xcom_push(key="model_response", value=response)
return response
class GenAIGenerateContentOperator(GoogleCloudBaseOperator):
"""
Generate a model response based on given configuration. Input capabilities differ between models, including tuned models.
:param project_id: Required. The ID of the Google Cloud project that the
service belongs to (templated).
:param location: Required. The ID of the Google Cloud location that the
service belongs to (templated).
:param model: Required. The name of the model to use for content generation,
which can be a text-only or multimodal model. For example, `gemini-pro` or
`gemini-pro-vision`.
:param contents: Required. The multi-part content of a message that a user or a program
gives to the generative model, in order to elicit a specific response.
:param generation_config: Optional. Generation configuration settings.
:param gcp_conn_id: The connection ID to use connecting to Google Cloud.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields = (
"generation_config",
"location",
"project_id",
"impersonation_chain",
"contents",
"model",
)
def __init__(
self,
*,
project_id: str,
location: str,
contents: ContentListUnionDict,
model: str,
generation_config: GenerateContentConfig | dict[str, Any] | None = None,
gcp_conn_id: str = "google_cloud_default",
impersonation_chain: str | Sequence[str] | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.project_id = project_id
self.location = location
self.contents = contents
self.generation_config = generation_config
self.model = model
self.gcp_conn_id = gcp_conn_id
self.impersonation_chain = impersonation_chain
def execute(self, context: Context):
self.hook = GenAIGenerativeModelHook(
gcp_conn_id=self.gcp_conn_id,
impersonation_chain=self.impersonation_chain,
)
response = self.hook.generate_content(
project_id=self.project_id,
location=self.location,
model=self.model,
contents=self.contents,
generation_config=self.generation_config,
)
self.log.info("Created Content: %s", response)
context["ti"].xcom_push(key="model_response", value=response)
return response
class GenAISupervisedFineTuningTrainOperator(GoogleCloudBaseOperator):
"""
Create a tuning job to adapt model behavior with a labeled dataset.
:param project_id: Required. The ID of the Google Cloud project that the service belongs to.
:param location: Required. The ID of the Google Cloud location that the service belongs to.
:param source_model: Required. A pre-trained model optimized for performing natural
language tasks such as classification, summarization, extraction, content
creation, and ideation.
:param training_dataset: Required. Cloud Storage URI of your training dataset. The dataset
must be formatted as a JSONL file. For best results, provide at least 100 to 500 examples.
:param tuning_job_config: Optional. Configuration of the Tuning job to be created.
:param gcp_conn_id: The connection ID to use connecting to Google Cloud.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields = (
"location",
"project_id",
"impersonation_chain",
"training_dataset",
"tuning_job_config",
"source_model",
)
def __init__(
self,
*,
project_id: str,
location: str,
source_model: str,
training_dataset: TuningDatasetOrDict,
tuning_job_config: CreateTuningJobConfigOrDict | dict[str, Any] | None = None,
gcp_conn_id: str = "google_cloud_default",
impersonation_chain: str | Sequence[str] | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.project_id = project_id
self.location = location
self.source_model = source_model
self.training_dataset = training_dataset
self.tuning_job_config = tuning_job_config
self.gcp_conn_id = gcp_conn_id
self.impersonation_chain = impersonation_chain
def execute(self, context: Context):
self.hook = GenAIGenerativeModelHook(
gcp_conn_id=self.gcp_conn_id,
impersonation_chain=self.impersonation_chain,
)
response = self.hook.supervised_fine_tuning_train(
project_id=self.project_id,
location=self.location,
source_model=self.source_model,
training_dataset=self.training_dataset,
tuning_job_config=self.tuning_job_config,
)
self.log.info("Tuned Model Name: %s", response.tuned_model.model) # type: ignore[union-attr,arg-type]
self.log.info("Tuned Model EndpointName: %s", response.tuned_model.endpoint) # type: ignore[union-attr,arg-type]
context["ti"].xcom_push(key="tuned_model_name", value=response.tuned_model.model) # type: ignore[union-attr,arg-type]
context["ti"].xcom_push(key="tuned_model_endpoint_name", value=response.tuned_model.endpoint) # type: ignore[union-attr,arg-type]
result = {
"tuned_model_name": response.tuned_model.model, # type: ignore[union-attr,arg-type]
"tuned_model_endpoint_name": response.tuned_model.endpoint, # type: ignore[union-attr,arg-type]
}
return result
class GenAICountTokensOperator(GoogleCloudBaseOperator):
"""
Use Count Tokens API to calculate the number of input tokens before sending a request to Gemini API.
:param project_id: Required. The ID of the Google Cloud project that the
service belongs to (templated).
:param location: Required. The ID of the Google Cloud location that the
service belongs to (templated).
:param contents: Required. The multi-part content of a message that a user or a program
gives to the generative model, in order to elicit a specific response.
:param model: Required. Model, supporting prompts with text-only input,
including natural language tasks, multi-turn text and code chat,
and code generation. It can output text and code.
:param config: Optional. Configuration for Count Tokens.
:param gcp_conn_id: The connection ID to use connecting to Google Cloud.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields = ("location", "project_id", "impersonation_chain", "contents", "model", "config")
def __init__(
self,
*,
project_id: str,
location: str,
contents: ContentListUnion | ContentListUnionDict,
model: str,
config: CountTokensConfigOrDict | None = None,
gcp_conn_id: str = "google_cloud_default",
impersonation_chain: str | Sequence[str] | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.project_id = project_id
self.location = location
self.contents = contents
self.model = model
self.config = config
self.gcp_conn_id = gcp_conn_id
self.impersonation_chain = impersonation_chain
def execute(self, context: Context):
self.hook = GenAIGenerativeModelHook(
gcp_conn_id=self.gcp_conn_id,
impersonation_chain=self.impersonation_chain,
)
response = self.hook.count_tokens(
project_id=self.project_id,
location=self.location,
contents=self.contents,
model=self.model,
config=self.config,
)
self.log.info("Total tokens: %s", response.total_tokens)
context["ti"].xcom_push(key="total_tokens", value=response.total_tokens)
class GenAICreateCachedContentOperator(GoogleCloudBaseOperator):
"""
Create CachedContent resource to reduce the cost of requests that contain repeat content with high input token counts.
:param project_id: Required. The ID of the Google Cloud project that the service belongs to.
:param location: Required. The ID of the Google Cloud location that the service belongs to.
:param model: Required. The name of the publisher model to use for cached content.
:param cached_content_config: Optional. Configuration of the Cached Content.
:param gcp_conn_id: The connection ID to use connecting to Google Cloud.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields = ("location", "project_id", "impersonation_chain", "model", "cached_content_config")
def __init__(
self,
*,
project_id: str,
location: str,
model: str,
cached_content_config: CreateCachedContentConfigOrDict | None = None,
gcp_conn_id: str = "google_cloud_default",
impersonation_chain: str | Sequence[str] | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.project_id = project_id
self.location = location
self.model = model
self.cached_content_config = cached_content_config
self.gcp_conn_id = gcp_conn_id
self.impersonation_chain = impersonation_chain
def execute(self, context: Context):
self.hook = GenAIGenerativeModelHook(
gcp_conn_id=self.gcp_conn_id,
impersonation_chain=self.impersonation_chain,
)
cached_content_name = self.hook.create_cached_content(
project_id=self.project_id,
location=self.location,
model=self.model,
cached_content_config=self.cached_content_config,
)
self.log.info("Cached Content Name: %s", cached_content_name)
context["ti"].xcom_push(key="cached_content", value=cached_content_name)
return cached_content_name
class GenAIGeminiCreateBatchJobOperator(GoogleCloudBaseOperator):
"""
Create Batch job using Gemini Batch API. Use to generate model response for several requests.
:param project_id: Required. The ID of the Google Cloud project that the service belongs to.
:param location: Required. The ID of the Google Cloud location that the service belongs to.
:param model: Required. The name of the publisher model to use for Batch job.
:param gemini_api_key: Required. Key to interact with Gemini Batch API.
:param input_source: Required. Source of requests, could be inline requests or file name.
:param results_folder: Optional. Path to a folder on local machine where file with results will be saved.
:param create_batch_job_config: Optional. Config for batch job creation.
:param wait_until_complete: Optional. Await job completion.
:param retrieve_result: Optional. Push the result to XCom. If the input_source is inline, this pushes
the execution result. If a file name is specified, this pushes the output file path.
:param polling_interval: Optional. The interval, in seconds, to poll the job status.
:param gcp_conn_id: The connection ID to use connecting to Google Cloud.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
:param deferrable: Optional. Run operator in the deferrable mode.
"""
template_fields = (
"location",
"project_id",
"impersonation_chain",
"model",
"create_batch_job_config",
"gemini_api_key",
"input_source",
)
def __init__(
self,
*,
project_id: str,
location: str,
model: str,
input_source: list | str,
gemini_api_key: str,
create_batch_job_config: CreateBatchJobConfig | dict | None = None,
results_folder: str | None = None,
gcp_conn_id: str = "google_cloud_default",
impersonation_chain: str | Sequence[str] | None = None,
retrieve_result: bool = False,
wait_until_complete: bool = False,
polling_interval: int = 30,
deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
**kwargs,
) -> None:
super().__init__(**kwargs)
self.project_id = project_id
self.location = location
self.gcp_conn_id = gcp_conn_id
self.impersonation_chain = impersonation_chain
self.model = model
self.input_source = input_source
self.create_batch_job_config = create_batch_job_config
self.gemini_api_key = gemini_api_key
self.retrieve_result = retrieve_result
self.wait_until_complete = wait_until_complete
self.polling_interval = polling_interval
self.results_folder = results_folder
self.deferrable = deferrable
if self.retrieve_result and not (self.wait_until_complete or self.deferrable):
raise AirflowException(
"Retrieving results is possible only if wait_until_complete set to True or in deferrable mode"
)
if self.results_folder and not isinstance(self.input_source, str):
raise AirflowException("results_folder works only when input_source is file name")
if self.results_folder and not os.path.exists(os.path.abspath(self.results_folder)):
raise AirflowException("path to results_folder does not exist, please provide correct path")
def _wait_until_complete(self, job, polling_interval: int = 30):
try:
while True:
job = self.hook.get_batch_job(job_name=job.name)
if job.state.name == BatchJobStatus.SUCCEEDED.value:
self.log.info("Job execution completed")
break
if job.state.name in [
BatchJobStatus.FAILED.value,
BatchJobStatus.EXPIRED.value,
BatchJobStatus.CANCELLED.value,
]:
self.log.error("Job execution was not completed!")
break
self.log.info(
"Waiting for job execution, polling interval: %s seconds, current state: %s",
self.polling_interval,
job.state.name,
)
time.sleep(polling_interval)
except Exception:
raise AirflowException("Something went wrong during waiting of the batch job.")
return job
def _prepare_results_for_xcom(self, job):
results = []
if job.dest and job.dest.inlined_responses:
self.log.info("Results are inline")
for inline_response in job.dest.inlined_responses:
if inline_response.response:
# Accessing response, structure may vary.
try:
results.append(inline_response.response.text)
except AttributeError:
results.append(inline_response.response)
elif inline_response.error:
self.log.warning("Error found in the inline result")
results.append(inline_response.error)
elif job.dest and job.dest.file_name:
file_content_bytes = self.hook.download_file(file_name=job.dest.file_name)
file_content = file_content_bytes.decode("utf-8")
file_name = job.display_name or job.name.replace("/", "-")
path_to_file = os.path.abspath(f"{self.results_folder}/{file_name}.jsonl")
with open(path_to_file, "w") as file_with_results:
file_with_results.writelines(file_content.splitlines(True))
results = path_to_file
return results
@property
def hook(self):
return GenAIGeminiAPIHook(
gcp_conn_id=self.gcp_conn_id,
impersonation_chain=self.impersonation_chain,
gemini_api_key=self.gemini_api_key,
)
def execute(self, context: Context):
if self.deferrable:
self.defer(
trigger=GenAIGeminiCreateBatchJobTrigger(
project_id=self.project_id,
location=self.location,
gcp_conn_id=self.gcp_conn_id,
impersonation_chain=self.impersonation_chain,
model=self.model,
input_source=self.input_source,
create_batch_job_config=self.create_batch_job_config,
gemini_api_key=self.gemini_api_key,
retrieve_result=self.retrieve_result,
polling_interval=self.polling_interval,
results_folder=self.results_folder,
),
method_name="execute_complete",
)
try:
job = self.hook.create_batch_job(
model=self.model,
source=self.input_source,
create_batch_job_config=self.create_batch_job_config,
)
except Exception as e:
raise AirflowException("Something went wrong during creation of the batch job: %s", e)
self.log.info("Job with name %s was successfully created!", job.name)
context["ti"].xcom_push(key="job_name", value=job.name)
if self.wait_until_complete:
job = self._wait_until_complete(job, self.polling_interval)
if self.retrieve_result and job.error is None:
job_results = self._prepare_results_for_xcom(job)
context["ti"].xcom_push(key="job_results", value=job_results)
return job.model_dump(mode="json")
def execute_complete(self, context: Context, event: dict[str, Any]) -> dict[str, Any]:
if event["status"] == "error":
self.log.info("status: %s, msg: %s", event["status"], event["message"])
raise AirflowException(event["message"])
job = self.hook.get_batch_job(event["job_name"])
if self.retrieve_result and job.error is None:
job_results = self._prepare_results_for_xcom(job)
context["ti"].xcom_push(key="job_results", value=job_results)
return job.model_dump(mode="json")
class GenAIGeminiGetBatchJobOperator(GoogleCloudBaseOperator):
"""
Get Batch job using Gemini API.
:param project_id: Required. The ID of the Google Cloud project that the service belongs to.
:param location: Required. The ID of the Google Cloud location that the service belongs to.
:param model: Required. The name of the publisher model to use for Batch job.
:param gemini_api_key: Required. Key to interact with Gemini Batch API.
:param job_name: Required. Name of the batch job.
:param gcp_conn_id: The connection ID to use connecting to Google Cloud.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields = ("location", "project_id", "impersonation_chain", "job_name", "gemini_api_key")
def __init__(
self,
*,
project_id: str,
location: str,
job_name: str,
gemini_api_key: str,
gcp_conn_id: str = "google_cloud_default",
impersonation_chain: str | Sequence[str] | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.project_id = project_id
self.location = location
self.gcp_conn_id = gcp_conn_id
self.impersonation_chain = impersonation_chain
self.job_name = job_name
self.gemini_api_key = gemini_api_key
def execute(self, context: Context):
self.hook = GenAIGeminiAPIHook(
gcp_conn_id=self.gcp_conn_id,
impersonation_chain=self.impersonation_chain,
gemini_api_key=self.gemini_api_key,
)
try:
job = self.hook.get_batch_job(job_name=self.job_name)
except ValueError:
raise AirflowException("Job with name %s not found", self.job_name)
context["ti"].xcom_push(key="job_status", value=job.state)
return job.model_dump(mode="json")
class GenAIGeminiListBatchJobsOperator(GoogleCloudBaseOperator):
"""
Get list of Batch jobs metadata using Gemini API.
:param project_id: Required. The ID of the Google Cloud project that the service belongs to.
:param location: Required. The ID of the Google Cloud location that the service belongs to.
:param model: Required. The name of the publisher model to use for Batch job.
:param gemini_api_key: Required. Key to interact with Gemini Batch API.
:param gcp_conn_id: The connection ID to use connecting to Google Cloud.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields = (
"location",
"project_id",
"impersonation_chain",
"list_batch_jobs_config",
"gemini_api_key",
)
def __init__(
self,
*,
project_id: str,
location: str,
gemini_api_key: str,
list_batch_jobs_config: ListBatchJobsConfig | dict | None = None,
gcp_conn_id: str = "google_cloud_default",
impersonation_chain: str | Sequence[str] | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.project_id = project_id
self.location = location
self.gcp_conn_id = gcp_conn_id
self.impersonation_chain = impersonation_chain
self.list_batch_jobs_config = list_batch_jobs_config
self.gemini_api_key = gemini_api_key
def execute(self, context: Context):
self.hook = GenAIGeminiAPIHook(
gcp_conn_id=self.gcp_conn_id,
impersonation_chain=self.impersonation_chain,
gemini_api_key=self.gemini_api_key,
)
jobs_list = self.hook.list_batch_jobs(list_batch_jobs_config=self.list_batch_jobs_config)
job_names = []
job_objs = []
try:
for job in jobs_list:
job_names.append(job.name)
job_objs.append(job.model_dump(exclude={"dest"}))
except RuntimeError:
self.log.info("%s jobs found", len(job_names))
context["ti"].xcom_push(key="job_names", value=job_names)
return job_objs
class GenAIGeminiDeleteBatchJobOperator(GoogleCloudBaseOperator):
"""
Queue a batch job for deletion using the Gemini API.
The job will not be deleted immediately. After submitting it for deletion, it will still be available
through GenAIGeminiListBatchJobsOperator or GenAIGeminiGetBatchJobOperator for some time.
:param project_id: Required. The ID of the Google Cloud project that the service belongs to.
:param location: Required. The ID of the Google Cloud location that the service belongs to.
:param model: Required. The name of the publisher model to use for Batch job.
:param gemini_api_key: Required. Key to interact with Gemini Batch API.
:param job_name: Required. Name of the batch job.
:param gcp_conn_id: The connection ID to use connecting to Google Cloud.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields = ("location", "project_id", "impersonation_chain", "job_name", "gemini_api_key")
def __init__(
self,
*,
project_id: str,
location: str,
job_name: str,
gemini_api_key: str,
gcp_conn_id: str = "google_cloud_default",
impersonation_chain: str | Sequence[str] | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.project_id = project_id
self.location = location
self.gcp_conn_id = gcp_conn_id
self.impersonation_chain = impersonation_chain
self.job_name = job_name
self.gemini_api_key = gemini_api_key
def execute(self, context: Context):
self.hook = GenAIGeminiAPIHook(
gcp_conn_id=self.gcp_conn_id,
impersonation_chain=self.impersonation_chain,
gemini_api_key=self.gemini_api_key,
)
try:
delete_response = self.hook.delete_batch_job(job_name=self.job_name)
except ValueError:
raise AirflowException("Job with name %s was not found", self.job_name)
self.log.info("Job with name %s was submitted for deletion.", self.job_name)
if delete_response.error:
raise AirflowException(
"Job with name %s was not deleted due to error: %s", self.job_name, delete_response.error
)
return delete_response.model_dump()
class GenAIGeminiCancelBatchJobOperator(GoogleCloudBaseOperator):
"""
Cancel Batch job using Gemini API.
:param project_id: Required. The ID of the Google Cloud project that the service belongs to.
:param location: Required. The ID of the Google Cloud location that the service belongs to.
:param model: Required. The name of the publisher model to use for Batch job.
:param gemini_api_key: Required. Key to interact with Gemini Batch API.
:param job_name: Required. Name of the batch job.
:param gcp_conn_id: The connection ID to use connecting to Google Cloud.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields = ("location", "project_id", "impersonation_chain", "job_name", "gemini_api_key")
def __init__(
self,
*,
project_id: str,
location: str,
job_name: str,
gemini_api_key: str,
gcp_conn_id: str = "google_cloud_default",
impersonation_chain: str | Sequence[str] | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.project_id = project_id
self.location = location
self.gcp_conn_id = gcp_conn_id
self.impersonation_chain = impersonation_chain
self.job_name = job_name
self.gemini_api_key = gemini_api_key
def execute(self, context: Context):
self.hook = GenAIGeminiAPIHook(
gcp_conn_id=self.gcp_conn_id,
impersonation_chain=self.impersonation_chain,
gemini_api_key=self.gemini_api_key,
)
self.log.info("Cancelling job with name %s ...", self.job_name)
try:
self.hook.cancel_batch_job(job_name=self.job_name)
except ValueError:
raise AirflowException("Job with name %s was not found", self.job_name)
self.log.info("Job with name %s was successfully cancelled", self.job_name)
class GenAIGeminiCreateEmbeddingsBatchJobOperator(GoogleCloudBaseOperator):
"""
Create embeddings Batch job using Gemini Batch API.
Use to generate embeddings for words, phrases, sentences, and code for several requests.
:param project_id: Required. The ID of the Google Cloud project that the service belongs to.
:param location: Required. The ID of the Google Cloud location that the service belongs to.
:param model: Required. The name of the publisher model to use for Batch job.
:param gemini_api_key: Required. Key to interact with Gemini Batch API.
:param input_source: Required. Source of requests, could be inline requests or file name.
:param results_folder: Optional. Path to a folder on local machine where file with results will be saved.
:param create_embeddings_config: Optional. Config for batch job creation.
:param wait_until_complete: Optional. Await job completion.
:param retrieve_result: Optional. Push the result to XCom. If the input_source is inline, this pushes
the execution result. If a file name is specified, this pushes the output file path.
:param polling_interval: Optional. The interval, in seconds, to poll the job status.
:param gcp_conn_id: The connection ID to use connecting to Google Cloud.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
:param deferrable: Optional. Run operator in the deferrable mode.
"""
template_fields = (
"location",
"project_id",
"impersonation_chain",
"model",
"create_embeddings_config",
"gemini_api_key",
"input_source",
)
def __init__(
self,
*,
project_id: str,
location: str,
model: str,
gemini_api_key: str,
input_source: dict | str,
results_folder: str | None = None,
create_embeddings_config: CreateBatchJobConfig | dict | None = None,
gcp_conn_id: str = "google_cloud_default",
impersonation_chain: str | Sequence[str] | None = None,
wait_until_complete: bool = False,
retrieve_result: bool = False,
polling_interval: int = 30,
deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
**kwargs,
) -> None:
super().__init__(**kwargs)
self.project_id = project_id
self.location = location
self.gcp_conn_id = gcp_conn_id
self.impersonation_chain = impersonation_chain
self.model = model
self.input_source = input_source
self.create_embeddings_config = create_embeddings_config
self.gemini_api_key = gemini_api_key
self.wait_until_complete = wait_until_complete
self.retrieve_result = retrieve_result
self.polling_interval = polling_interval
self.results_folder = results_folder
self.deferrable = deferrable
if self.retrieve_result and not (self.wait_until_complete or self.deferrable):
raise AirflowException(
"Retrieving results is possible only if wait_until_complete set to True or in deferrable mode"
)
if self.results_folder and not isinstance(self.input_source, str):
raise AirflowException("results_folder works only when input_source is file name")
if self.results_folder and not os.path.exists(os.path.abspath(self.results_folder)):
raise AirflowException("path to results_folder does not exist, please provide correct path")
def _wait_until_complete(self, job, polling_interval: int = 30):
try:
while True:
job = self.hook.get_batch_job(job_name=job.name)
if job.state.name == BatchJobStatus.SUCCEEDED.value:
self.log.info("Job execution completed")
break
if job.state.name in [
BatchJobStatus.FAILED.value,
BatchJobStatus.EXPIRED.value,
BatchJobStatus.CANCELLED.value,
]:
self.log.error("Job execution was not completed!")
break
self.log.info(
"Waiting for job execution, polling interval: %s seconds, current state: %s",
self.polling_interval,
job.state.name,
)
time.sleep(polling_interval)
except Exception as e:
raise AirflowException("Something went wrong during waiting of the batch job: %s", e)
return job
def _prepare_results_for_xcom(self, job):
results = []
if job.dest and job.dest.inlined_embed_content_responses:
self.log.info("Results are inline")
for inline_embed_response in job.dest.inlined_embed_content_responses:
if inline_embed_response.response:
# Accessing response, structure may vary.
try:
results.append(dict(inline_embed_response.response.embedding))
except AttributeError:
results.append(inline_embed_response.response)
elif inline_embed_response.error:
self.log.warning("Error found in the inline result")
results.append(inline_embed_response.error)
elif job.dest and job.dest.file_name:
file_content_bytes = self.hook.download_file(file_name=job.dest.file_name)
file_content = file_content_bytes.decode("utf-8")
file_name = job.display_name or job.name.replace("/", "-")
path_to_file = os.path.abspath(f"{self.results_folder}/{file_name}.jsonl")
with open(path_to_file, "w") as file_with_results:
file_with_results.writelines(file_content.splitlines(True))
results = path_to_file
return results
@property
def hook(self):
return GenAIGeminiAPIHook(
gcp_conn_id=self.gcp_conn_id,
impersonation_chain=self.impersonation_chain,
gemini_api_key=self.gemini_api_key,
)
def execute(self, context: Context):
if self.deferrable:
self.defer(
trigger=GenAIGeminiCreateEmbeddingsBatchJobTrigger(
project_id=self.project_id,
location=self.location,
gcp_conn_id=self.gcp_conn_id,
impersonation_chain=self.impersonation_chain,
model=self.model,
input_source=self.input_source,
create_embeddings_config=self.create_embeddings_config,
gemini_api_key=self.gemini_api_key,
retrieve_result=self.retrieve_result,
polling_interval=self.polling_interval,
results_folder=self.results_folder,
),
method_name="execute_complete",
)
try:
embeddings_job = self.hook.create_embeddings(
model=self.model,
source=self.input_source,
create_embeddings_config=self.create_embeddings_config,
)
except Exception:
raise AirflowException("Something went wrong during creation of the embeddings job.")
self.log.info("Embeddings Job with name %s was successfully created!", embeddings_job.name)
context["ti"].xcom_push(key="job_name", value=embeddings_job.name)
if self.wait_until_complete:
embeddings_job = self._wait_until_complete(embeddings_job, self.polling_interval)
if self.retrieve_result and embeddings_job.error is None:
job_results = self._prepare_results_for_xcom(embeddings_job)
context["ti"].xcom_push(key="job_results", value=job_results)
return embeddings_job.model_dump()
def execute_complete(self, context: Context, event: dict[str, Any]) -> dict[str, Any]:
if event["status"] == "error":
self.log.info("status: %s, msg: %s", event["status"], event["message"])
raise AirflowException(event["message"])
if self.retrieve_result and event["job"].get("error") is None:
job_results = self._prepare_results_for_xcom(BatchJob(**event["job"]))
context["ti"].xcom_push(key="job_results", value=job_results)
return event["job"]
class GenAIGeminiUploadFileOperator(GoogleCloudBaseOperator):
"""
Upload file to Gemini Files API.
The Files API lets you store up to 20GB of files per project, with each file not exceeding 2GB in size.
Supported types are audio files, images, videos, documents, and others. Files are stored for 48 hours.
:param project_id: Required. The ID of the Google Cloud project that the service belongs to.
:param location: Required. The ID of the Google Cloud location that the service belongs to.
:param gemini_api_key: Required. Key to interact with Gemini Batch API.
:param file_path: Required. Path to file on your local machine.
:param upload_file_config: Optional. Metadata configuration for file upload.
Defaults to display name and mime type parsed from file_path.
:param gcp_conn_id: The connection ID to use connecting to Google Cloud.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields = (
"location",
"project_id",
"impersonation_chain",
"file_path",
"gemini_api_key",
)
def __init__(
self,
*,
project_id: str,
location: str,
file_path: str,
gemini_api_key: str,
upload_file_config: dict | None = None,
gcp_conn_id: str = "google_cloud_default",
impersonation_chain: str | Sequence[str] | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.project_id = project_id
self.location = location
self.gcp_conn_id = gcp_conn_id
self.impersonation_chain = impersonation_chain
self.gemini_api_key = gemini_api_key
self.file_path = file_path
self.upload_file_config = upload_file_config
def execute(self, context: Context):
self.hook = GenAIGeminiAPIHook(
gcp_conn_id=self.gcp_conn_id,
impersonation_chain=self.impersonation_chain,
gemini_api_key=self.gemini_api_key,
)
try:
file = self.hook.upload_file(
path_to_file=self.file_path, upload_file_config=self.upload_file_config
)
except RuntimeError as exc:
raise exc
except ValueError:
raise AirflowException("Error during file upload! Check file name or mime type!")
except FileNotFoundError:
raise AirflowException("Provided file was not found!")
self.log.info("File with name %s successfully uploaded!", file.name)
context["ti"].xcom_push(key="file_name", value=file.name)
return file.model_dump()
class GenAIGeminiGetFileOperator(GoogleCloudBaseOperator):
"""
Get file's metadata uploaded to Gemini Files API by using GenAIGeminiUploadFileOperator.
The Files API lets you store up to 20GB of files per project, with each file not exceeding 2GB in size.
Files are stored for 48 hours.
:param project_id: Required. The ID of the Google Cloud project that the service belongs to.
:param location: Required. The ID of the Google Cloud location that the service belongs to.
:param gemini_api_key: Required. Key to interact with Gemini Batch API.
:param file_name: Required. File name in Gemini Files API to get
:param gcp_conn_id: The connection ID to use connecting to Google Cloud.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields = (
"location",
"project_id",
"impersonation_chain",
"file_name",
"gemini_api_key",
)
def __init__(
self,
*,
project_id: str,
location: str,
file_name: str,
gemini_api_key: str,
gcp_conn_id: str = "google_cloud_default",
impersonation_chain: str | Sequence[str] | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.project_id = project_id
self.location = location
self.gcp_conn_id = gcp_conn_id
self.impersonation_chain = impersonation_chain
self.gemini_api_key = gemini_api_key
self.file_name = file_name
def execute(self, context: Context):
self.hook = GenAIGeminiAPIHook(
gcp_conn_id=self.gcp_conn_id,
impersonation_chain=self.impersonation_chain,
gemini_api_key=self.gemini_api_key,
)
self.log.info("Looking for file with name: %s", self.file_name)
try:
file = self.hook.get_file(file_name=self.file_name)
except ClientError:
raise AirflowException("File with name %s not found", self.file_name)
self.log.info("Find file with name: %s", file.name)
context["ti"].xcom_push(key="file_uri", value=file.uri)
return file.model_dump()
class GenAIGeminiListFilesOperator(GoogleCloudBaseOperator):
"""
List files uploaded to Gemini Files API.
The Files API lets you store up to 20GB of files per project, with each file not exceeding 2GB in size.
Files are stored for 48 hours.
:param project_id: Required. The ID of the Google Cloud project that the service belongs to.
:param location: Required. The ID of the Google Cloud location that the service belongs to.
:param gemini_api_key: Required. Key to interact with Gemini Batch API.
:param gcp_conn_id: The connection ID to use connecting to Google Cloud.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields = (
"location",
"project_id",
"impersonation_chain",
"gemini_api_key",
)
def __init__(
self,
*,
project_id: str,
location: str,
gemini_api_key: str,
gcp_conn_id: str = "google_cloud_default",
impersonation_chain: str | Sequence[str] | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.project_id = project_id
self.location = location
self.gcp_conn_id = gcp_conn_id
self.impersonation_chain = impersonation_chain
self.gemini_api_key = gemini_api_key
def execute(self, context: Context):
self.hook = GenAIGeminiAPIHook(
gcp_conn_id=self.gcp_conn_id,
impersonation_chain=self.impersonation_chain,
gemini_api_key=self.gemini_api_key,
)
files = self.hook.list_files()
if files:
xcom_file_names = []
xcom_files = []
try:
for file in files:
xcom_file_names.append(file.name)
xcom_files.append(file.model_dump())
except RuntimeError:
self.log.info("%s files found", len(xcom_files))
context["ti"].xcom_push(key="file_names", value=xcom_file_names)
return xcom_files
self.log.info("No files found")
class GenAIGeminiDeleteFileOperator(GoogleCloudBaseOperator):
"""
Delete file uploaded to Gemini Files API.
The Files API lets you store up to 20GB of files per project, with each file not exceeding 2GB in size.
Files are stored for 48 hours.
:param project_id: Required. The ID of the Google Cloud project that the service belongs to.
:param location: Required. The ID of the Google Cloud location that the service belongs to.
:param gemini_api_key: Required. Key to interact with Gemini Batch API.
:param file_name: Required. File name in Gemini Files API to delete.
:param gcp_conn_id: The connection ID to use connecting to Google Cloud.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields = (
"location",
"project_id",
"impersonation_chain",
"file_name",
"gemini_api_key",
)
def __init__(
self,
*,
project_id: str,
location: str,
file_name: str,
gemini_api_key: str,
gcp_conn_id: str = "google_cloud_default",
impersonation_chain: str | Sequence[str] | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.project_id = project_id
self.location = location
self.gcp_conn_id = gcp_conn_id
self.impersonation_chain = impersonation_chain
self.file_name = file_name
self.gemini_api_key = gemini_api_key
def execute(self, context: Context):
self.hook = GenAIGeminiAPIHook(
gcp_conn_id=self.gcp_conn_id,
impersonation_chain=self.impersonation_chain,
gemini_api_key=self.gemini_api_key,
)
try:
delete_response = self.hook.delete_file(file_name=self.file_name)
except ClientError:
raise AirflowException("File %s not found!", self.file_name)
self.log.info("File %s was successfully deleted!", self.file_name)
return delete_response.model_dump()
| {
"repo_id": "apache/airflow",
"file_path": "providers/google/src/airflow/providers/google/cloud/operators/gen_ai.py",
"license": "Apache License 2.0",
"lines": 1146,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/google/tests/unit/google/cloud/hooks/test_gen_ai.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from unittest import mock
from unittest.mock import AsyncMock
import pytest
from google.genai.types import (
Content,
CreateCachedContentConfig,
EmbedContentConfig,
GoogleSearch,
Part,
Tool,
)
from airflow.providers.google.cloud.hooks.gen_ai import (
GenAIGeminiAPIAsyncHook,
GenAIGeminiAPIHook,
GenAIGenerativeModelHook,
)
from unit.google.cloud.utils.base_gcp_mock import (
mock_base_gcp_hook_default_project_id,
)
TEST_GCP_CONN_ID: str = "test-gcp-conn-id"
GCP_PROJECT = "test-project"
GCP_LOCATION = "us-central1"
TEST_PROMPT = "In 10 words or less, what is apache airflow?"
TEST_CONTENTS = [TEST_PROMPT]
TEST_LANGUAGE_PRETRAINED_MODEL = "textembedding-gecko"
TEST_TEMPERATURE = 0.0
TEST_MAX_OUTPUT_TOKENS = 256
TEST_TOP_P = 0.8
TEST_TOP_K = 40
TEST_TEXT_EMBEDDING_MODEL = ""
TEST_TEXT_EMBEDDING_CONFIG = EmbedContentConfig(output_dimensionality=10)
TEST_MULTIMODAL_PRETRAINED_MODEL = "gemini-pro"
TEST_GENERATION_CONFIG = {
"max_output_tokens": TEST_MAX_OUTPUT_TOKENS,
"top_p": TEST_TOP_P,
"temperature": TEST_TEMPERATURE,
}
TEST_TOOLS = [Tool(google_search=GoogleSearch())]
TEST_MULTIMODAL_VISION_MODEL = "gemini-pro-vision"
SOURCE_MODEL = "gemini-1.0-pro-002"
TRAIN_DATASET = "gs://cloud-samples-data/ai-platform/generative_ai/sft_train_data.jsonl"
TEST_CACHED_MODEL = "gemini-1.5-pro-002"
TEST_CACHED_SYSTEM_INSTRUCTION = """
You are an expert researcher. You always stick to the facts in the sources provided, and never make up new facts.
Now look at these research papers, and answer the following questions.
"""
CACHED_CONTENT_CONFIG = CreateCachedContentConfig(
contents=[
Content(
role="user",
parts=[
Part.from_uri(
file_uri="gs://cloud-samples-data/generative-ai/pdf/2312.11805v3.pdf",
mime_type="application/pdf",
),
Part.from_uri(
file_uri="gs://cloud-samples-data/generative-ai/pdf/2403.05530.pdf",
mime_type="application/pdf",
),
],
)
],
system_instruction=TEST_CACHED_SYSTEM_INSTRUCTION,
display_name="test-cache",
ttl="3600s",
)
BASE_STRING = "airflow.providers.google.common.hooks.base_google.{}"
GENERATIVE_MODEL_STRING = "airflow.providers.google.cloud.hooks.gen_ai.{}"
TEST_API_KEY = "test-api-key"
TEST_JOB_NAME = "batches/test-job-id"
TEST_MODEL = "models/gemini-2.5-flash"
TEST_BATCH_JOB_SOURCE_INLINE = [
{"contents": [{"parts": [{"text": "Tell me a one-sentence joke."}], "role": "user"}]},
{"contents": [{"parts": [{"text": "Why is the sky blue?"}], "role": "user"}]},
]
TEST_EMBEDDINGS_JOB_SOURCE_INLINE = {
"contents": [{"parts": [{"text": "Why is the sky blue?"}], "role": "user"}]
}
TEST_SOURCE_FILE = "test-bucket/source.jsonl"
TEST_LOCAL_FILE_PATH = "/tmp/data/test_file.json"
TEST_FILE_NAME = "files/test-file-id"
# Mock constants for configuration objects
TEST_LIST_BATCH_JOBS_CONFIG = {"page_size": 10}
TEST_CREATE_BATCH_JOB_CONFIG = {"display_name": "test-job"}
TEST_UPLOAD_FILE_CONFIG = {"display_name": "custom_name", "mime_type": "text/plain"}
def assert_warning(msg: str, warnings):
assert any(msg in str(w) for w in warnings)
class TestGenAIGenerativeModelHookWithDefaultProjectId:
def dummy_get_credentials(self):
pass
def setup_method(self):
with mock.patch(
BASE_STRING.format("GoogleBaseHook.__init__"), new=mock_base_gcp_hook_default_project_id
):
self.hook = GenAIGenerativeModelHook(gcp_conn_id=TEST_GCP_CONN_ID)
self.hook.get_credentials = self.dummy_get_credentials
@mock.patch(GENERATIVE_MODEL_STRING.format("GenAIGenerativeModelHook.get_genai_client"))
def test_text_embedding_model_get_embeddings(self, mock_get_client) -> None:
client_mock = mock_get_client.return_value
client_mock.models = mock.Mock()
self.hook.embed_content(
project_id=GCP_PROJECT,
location=GCP_LOCATION,
contents=TEST_CONTENTS,
model=TEST_TEXT_EMBEDDING_MODEL,
config=TEST_TEXT_EMBEDDING_CONFIG,
)
client_mock.models.embed_content.assert_called_once_with(
model=TEST_TEXT_EMBEDDING_MODEL,
contents=TEST_CONTENTS,
config=TEST_TEXT_EMBEDDING_CONFIG,
)
@mock.patch(GENERATIVE_MODEL_STRING.format("GenAIGenerativeModelHook.get_genai_client"))
def test_generative_model_generate_content(self, mock_get_client) -> None:
client_mock = mock_get_client.return_value
client_mock.models = mock.Mock()
self.hook.generate_content(
project_id=GCP_PROJECT,
location=GCP_LOCATION,
contents=TEST_CONTENTS,
generation_config=TEST_GENERATION_CONFIG,
model=TEST_MULTIMODAL_PRETRAINED_MODEL,
)
client_mock.models.generate_content.assert_called_once_with(
model=TEST_MULTIMODAL_PRETRAINED_MODEL,
contents=TEST_CONTENTS,
config=TEST_GENERATION_CONFIG,
)
@mock.patch(GENERATIVE_MODEL_STRING.format("GenAIGenerativeModelHook.get_genai_client"))
def test_supervised_fine_tuning_train(self, mock_get_client) -> None:
client_mock = mock_get_client.return_value
client_mock.models = mock.Mock()
self.hook.supervised_fine_tuning_train(
project_id=GCP_PROJECT,
location=GCP_LOCATION,
source_model=SOURCE_MODEL,
training_dataset=TRAIN_DATASET,
)
client_mock.tunings.tune.assert_called_once_with(
base_model=SOURCE_MODEL,
training_dataset=TRAIN_DATASET,
config=None,
)
@mock.patch(GENERATIVE_MODEL_STRING.format("GenAIGenerativeModelHook.get_genai_client"))
def test_count_tokens(self, mock_get_client) -> None:
client_mock = mock_get_client.return_value
client_mock.models = mock.Mock()
self.hook.count_tokens(
project_id=GCP_PROJECT,
contents=TEST_CONTENTS,
location=GCP_LOCATION,
model=TEST_MULTIMODAL_PRETRAINED_MODEL,
)
client_mock.models.count_tokens.assert_called_once_with(
model=TEST_MULTIMODAL_PRETRAINED_MODEL,
contents=TEST_CONTENTS,
config=None,
)
@mock.patch(GENERATIVE_MODEL_STRING.format("GenAIGenerativeModelHook.get_genai_client"))
def test_create_cached_content(self, mock_get_client) -> None:
client_mock = mock_get_client.return_value
client_mock.models = mock.Mock()
self.hook.create_cached_content(
project_id=GCP_PROJECT,
location=GCP_LOCATION,
model=TEST_CACHED_MODEL,
cached_content_config=CACHED_CONTENT_CONFIG,
)
client_mock.caches.create.assert_called_once_with(
model=TEST_CACHED_MODEL,
config=CACHED_CONTENT_CONFIG,
)
class TestGenAIGeminiAPIHook:
def setup_method(self):
with mock.patch(
BASE_STRING.format("GoogleBaseHook.__init__"), new=mock_base_gcp_hook_default_project_id
):
self.hook = GenAIGeminiAPIHook(gemini_api_key=TEST_API_KEY)
@mock.patch("google.genai.Client")
def test_get_genai_client(self, mock_client):
"""Test client initialization with correct parameters."""
self.hook.get_genai_client()
mock_client.assert_called_once_with(
api_key=TEST_API_KEY,
vertexai=False,
)
@mock.patch(GENERATIVE_MODEL_STRING.format("GenAIGeminiAPIHook.get_genai_client"))
def test_get_batch_job(self, mock_get_client):
client_mock = mock_get_client.return_value
client_mock.batches = mock.Mock()
self.hook.get_batch_job(job_name=TEST_JOB_NAME)
client_mock.batches.get.assert_called_once_with(name=TEST_JOB_NAME)
@mock.patch(GENERATIVE_MODEL_STRING.format("GenAIGeminiAPIHook.get_genai_client"))
def test_list_batch_jobs(self, mock_get_client):
client_mock = mock_get_client.return_value
client_mock.batches = mock.Mock()
self.hook.list_batch_jobs(list_batch_jobs_config=TEST_LIST_BATCH_JOBS_CONFIG)
client_mock.batches.list.assert_called_once_with(config=TEST_LIST_BATCH_JOBS_CONFIG)
@mock.patch(GENERATIVE_MODEL_STRING.format("GenAIGeminiAPIHook.get_genai_client"))
def test_create_batch_job(self, mock_get_client):
client_mock = mock_get_client.return_value
client_mock.batches = mock.Mock()
self.hook.create_batch_job(
model=TEST_MODEL,
source=TEST_BATCH_JOB_SOURCE_INLINE,
create_batch_job_config=TEST_CREATE_BATCH_JOB_CONFIG,
)
client_mock.batches.create.assert_called_once_with(
model=TEST_MODEL, src=TEST_BATCH_JOB_SOURCE_INLINE, config=TEST_CREATE_BATCH_JOB_CONFIG
)
@mock.patch(GENERATIVE_MODEL_STRING.format("GenAIGeminiAPIHook.get_genai_client"))
def test_delete_batch_job(self, mock_get_client):
client_mock = mock_get_client.return_value
client_mock.batches = mock.Mock()
self.hook.delete_batch_job(job_name=TEST_JOB_NAME)
client_mock.batches.delete.assert_called_once_with(name=TEST_JOB_NAME)
@mock.patch(GENERATIVE_MODEL_STRING.format("GenAIGeminiAPIHook.get_genai_client"))
def test_cancel_batch_job(self, mock_get_client):
client_mock = mock_get_client.return_value
client_mock.batches = mock.Mock()
self.hook.cancel_batch_job(job_name=TEST_JOB_NAME)
client_mock.batches.cancel.assert_called_once_with(name=TEST_JOB_NAME)
@mock.patch(GENERATIVE_MODEL_STRING.format("GenAIGeminiAPIHook.get_genai_client"))
def test_create_embeddings_with_inline_source(self, mock_get_client):
"""Test create_embeddings when source is a dict (inline)."""
client_mock = mock_get_client.return_value
client_mock.batches = mock.Mock()
self.hook.create_embeddings(
model=TEST_MODEL,
source=TEST_EMBEDDINGS_JOB_SOURCE_INLINE,
create_embeddings_config=TEST_CREATE_BATCH_JOB_CONFIG,
)
client_mock.batches.create_embeddings.assert_called_once_with(
model=TEST_MODEL,
src={"inlined_requests": TEST_EMBEDDINGS_JOB_SOURCE_INLINE},
config=TEST_CREATE_BATCH_JOB_CONFIG,
)
@mock.patch(GENERATIVE_MODEL_STRING.format("GenAIGeminiAPIHook.get_genai_client"))
def test_create_embeddings_with_file_source(self, mock_get_client):
"""Test create_embeddings when source is a string (file name)."""
client_mock = mock_get_client.return_value
client_mock.batches = mock.Mock()
# Test with str (File name)
source_file = "/bucket/file.jsonl"
self.hook.create_embeddings(
model=TEST_MODEL, source=source_file, create_embeddings_config=TEST_CREATE_BATCH_JOB_CONFIG
)
client_mock.batches.create_embeddings.assert_called_once_with(
model=TEST_MODEL, src={"file_name": source_file}, config=TEST_CREATE_BATCH_JOB_CONFIG
)
@mock.patch(GENERATIVE_MODEL_STRING.format("GenAIGeminiAPIHook.get_genai_client"))
def test_upload_file_with_provided_config(self, mock_get_client):
"""Test upload_file when explicit config is provided."""
client_mock = mock_get_client.return_value
client_mock.files = mock.Mock()
self.hook.upload_file(path_to_file=TEST_LOCAL_FILE_PATH, upload_file_config=TEST_UPLOAD_FILE_CONFIG)
client_mock.files.upload.assert_called_once_with(
file=TEST_LOCAL_FILE_PATH, config=TEST_UPLOAD_FILE_CONFIG
)
@mock.patch(GENERATIVE_MODEL_STRING.format("GenAIGeminiAPIHook.get_genai_client"))
def test_upload_file_default_config_generation(self, mock_get_client):
"""Test that upload_file generates correct config from filename if config is None."""
client_mock = mock_get_client.return_value
client_mock.files = mock.Mock()
# Path: /tmp/data/test_file.json -> name: test_file, type: json
self.hook.upload_file(path_to_file=TEST_LOCAL_FILE_PATH, upload_file_config=None)
expected_config = {"display_name": "test_file", "mime_type": "json"}
client_mock.files.upload.assert_called_once_with(file=TEST_LOCAL_FILE_PATH, config=expected_config)
@mock.patch(GENERATIVE_MODEL_STRING.format("GenAIGeminiAPIHook.get_genai_client"))
def test_get_file(self, mock_get_client):
client_mock = mock_get_client.return_value
client_mock.files = mock.Mock()
self.hook.get_file(file_name=TEST_FILE_NAME)
client_mock.files.get.assert_called_once_with(name=TEST_FILE_NAME)
@mock.patch(GENERATIVE_MODEL_STRING.format("GenAIGeminiAPIHook.get_genai_client"))
def test_download_file(self, mock_get_client):
client_mock = mock_get_client.return_value
client_mock.files = mock.Mock()
self.hook.download_file(file_name=TEST_FILE_NAME)
client_mock.files.download.assert_called_once_with(file=TEST_FILE_NAME)
@mock.patch(GENERATIVE_MODEL_STRING.format("GenAIGeminiAPIHook.get_genai_client"))
def test_list_files(self, mock_get_client):
client_mock = mock_get_client.return_value
client_mock.files = mock.Mock()
self.hook.list_files()
client_mock.files.list.assert_called_once()
@mock.patch(GENERATIVE_MODEL_STRING.format("GenAIGeminiAPIHook.get_genai_client"))
def test_delete_file(self, mock_get_client):
client_mock = mock_get_client.return_value
client_mock.files = mock.Mock()
self.hook.delete_file(file_name=TEST_FILE_NAME)
client_mock.files.delete.assert_called_once_with(name=TEST_FILE_NAME)
def mock_init(*args, **kwargs):
pass
class TestGenAIGeminiAPIAsyncHook:
def setup_method(self, method):
with mock.patch(BASE_STRING.format("GoogleBaseAsyncHook.__init__"), new=mock_init):
self.hook = GenAIGeminiAPIAsyncHook(gemini_api_key=TEST_API_KEY)
@pytest.mark.asyncio
@mock.patch(GENERATIVE_MODEL_STRING.format("GenAIGeminiAPIAsyncHook.get_async_client"))
async def test_get_job(self, mock_client):
mock_async_client = AsyncMock()
mock_client.return_value = mock_async_client
await self.hook.get_batch_job(
job_name=TEST_JOB_NAME,
)
mock_client.assert_called_once()
mock_async_client.batches.get.assert_called_once_with(
name=TEST_JOB_NAME,
)
@pytest.mark.asyncio
@mock.patch(GENERATIVE_MODEL_STRING.format("GenAIGeminiAPIAsyncHook.get_async_client"))
async def test_create_batch_job(self, mock_client):
mock_async_client = AsyncMock()
mock_client.return_value = mock_async_client
await self.hook.create_batch_job(
model=TEST_MODEL,
source=TEST_BATCH_JOB_SOURCE_INLINE,
create_batch_job_config=TEST_CREATE_BATCH_JOB_CONFIG,
)
mock_client.assert_called_once()
mock_async_client.batches.create.assert_called_once_with(
model=TEST_MODEL,
src=TEST_BATCH_JOB_SOURCE_INLINE,
config=TEST_CREATE_BATCH_JOB_CONFIG,
)
@pytest.mark.asyncio
@mock.patch(GENERATIVE_MODEL_STRING.format("GenAIGeminiAPIAsyncHook.get_async_client"))
async def test_create_embeddings_job(self, mock_client):
mock_async_client = AsyncMock()
mock_client.return_value = mock_async_client
await self.hook.create_embeddings_batch_job(
model=TEST_MODEL,
source=TEST_EMBEDDINGS_JOB_SOURCE_INLINE,
create_embeddings_config=TEST_CREATE_BATCH_JOB_CONFIG,
)
mock_client.assert_called_once()
mock_async_client.batches.create_embeddings.assert_called_once_with(
model=TEST_MODEL,
src={"inlined_requests": TEST_EMBEDDINGS_JOB_SOURCE_INLINE},
config=TEST_CREATE_BATCH_JOB_CONFIG,
)
| {
"repo_id": "apache/airflow",
"file_path": "providers/google/tests/unit/google/cloud/hooks/test_gen_ai.py",
"license": "Apache License 2.0",
"lines": 360,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/google/tests/unit/google/cloud/operators/test_gen_ai.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from unittest import mock
import pytest
from google.genai.errors import ClientError
from google.genai.types import (
Content,
CreateCachedContentConfig,
GenerateContentConfig,
GoogleSearch,
Part,
Tool,
TuningDataset,
)
from airflow.exceptions import AirflowException
from airflow.providers.google.cloud.operators.gen_ai import (
GenAICountTokensOperator,
GenAICreateCachedContentOperator,
GenAIGeminiCancelBatchJobOperator,
GenAIGeminiCreateBatchJobOperator,
GenAIGeminiCreateEmbeddingsBatchJobOperator,
GenAIGeminiDeleteBatchJobOperator,
GenAIGeminiDeleteFileOperator,
GenAIGeminiGetBatchJobOperator,
GenAIGeminiGetFileOperator,
GenAIGeminiListBatchJobsOperator,
GenAIGeminiListFilesOperator,
GenAIGeminiUploadFileOperator,
GenAIGenerateContentOperator,
GenAIGenerateEmbeddingsOperator,
GenAISupervisedFineTuningTrainOperator,
)
GEN_AI_PATH = "airflow.providers.google.cloud.operators.gen_ai.{}"
TASK_ID = "test_task_id"
GCP_PROJECT = "test-project"
GCP_LOCATION = "test-location"
GCP_CONN_ID = "test-conn"
IMPERSONATION_CHAIN = ["ACCOUNT_1", "ACCOUNT_2", "ACCOUNT_3"]
CACHED_SYSTEM_INSTRUCTION = """
You are an expert researcher. You always stick to the facts in the sources provided, and never make up new facts.
Now look at these research papers, and answer the following questions.
"""
CACHED_CONTENT_CONFIG = CreateCachedContentConfig(
contents=[
Content(
role="user",
parts=[
Part.from_uri(
file_uri="gs://cloud-samples-data/generative-ai/pdf/2312.11805v3.pdf",
mime_type="application/pdf",
),
Part.from_uri(
file_uri="gs://cloud-samples-data/generative-ai/pdf/2403.05530.pdf",
mime_type="application/pdf",
),
],
)
],
system_instruction=CACHED_SYSTEM_INSTRUCTION,
display_name="test-cache",
ttl="3600s",
)
EMBEDDING_MODEL = "textembedding-gecko"
GEMINI_MODEL = "gemini-pro"
CONTENTS = ["In 10 words or less, what is Apache Airflow?"]
CONTENT_GENERATION_CONFIG = GenerateContentConfig(
max_output_tokens=256,
top_p=0.95,
temperature=0.0,
tools=[Tool(google_search=GoogleSearch())],
)
TUNING_JOB_CONFIG = TuningDataset(
gcs_uri="gs://cloud-samples-data/ai-platform/generative_ai/gemini-1_5/text/sft_train_data.jsonl",
)
TUNING_TRAINING_DATASET = "gs://cloud-samples-data/ai-platform/generative_ai/sft_train_data.jsonl"
GENERATE_FROM_CACHED_MODEL_CONFIG = {
"cached_content": "cached_name",
}
TEST_BATCH_JOB_INLINED_REQUESTS = [
{"contents": [{"parts": [{"text": "Tell me a one-sentence joke."}], "role": "user"}]},
{"contents": [{"parts": [{"text": "Why is the sky blue?"}], "role": "user"}]},
]
TEST_EMBEDDINGS_JOB_INLINED_REQUESTS = {
"contents": [{"parts": [{"text": "Why is the sky blue?"}], "role": "user"}]
}
TEST_GEMINI_API_KEY = "test-key"
TEST_GEMINI_MODEL = "test-gemini-model"
TEST_BATCH_JOB_NAME = "test-name"
TEST_FILE_NAME = "test-file"
TEST_FILE_PATH = "test/path/to/file"
TEST_BATCH_JOB_RESPONSE = {
"src": None,
"dest": "test-batch-job-destination",
"name": "test-name",
"error": None,
"model": "test-model",
"state": "JOB_STATE_SUCCEEDED",
"end_time": "test-end-datetime",
"start_time": None,
"create_time": "test-create-datetime",
"update_time": "test-update-datetime",
"display_name": "test-display-name",
"completion_stats": None,
}
def assert_warning(msg: str, warnings):
assert any(msg in str(w) for w in warnings)
class TestGenAIGenerateEmbeddingsOperator:
@mock.patch(GEN_AI_PATH.format("GenAIGenerativeModelHook"))
def test_execute(self, mock_hook):
op = GenAIGenerateEmbeddingsOperator(
task_id=TASK_ID,
project_id=GCP_PROJECT,
location=GCP_LOCATION,
contents=CONTENTS,
model=EMBEDDING_MODEL,
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
)
op.execute(context={"ti": mock.MagicMock()})
mock_hook.assert_called_once_with(
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
)
mock_hook.return_value.embed_content.assert_called_once_with(
project_id=GCP_PROJECT,
location=GCP_LOCATION,
contents=CONTENTS,
model=EMBEDDING_MODEL,
config=None,
)
class TestGenAIGenerateContentOperator:
@mock.patch(GEN_AI_PATH.format("GenAIGenerativeModelHook"))
def test_execute(self, mock_hook):
op = GenAIGenerateContentOperator(
task_id=TASK_ID,
project_id=GCP_PROJECT,
location=GCP_LOCATION,
contents=CONTENTS,
generation_config=CONTENT_GENERATION_CONFIG,
model=GEMINI_MODEL,
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
)
op.execute(context={"ti": mock.MagicMock()})
mock_hook.assert_called_once_with(
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
)
mock_hook.return_value.generate_content.assert_called_once_with(
project_id=GCP_PROJECT,
location=GCP_LOCATION,
contents=CONTENTS,
generation_config=CONTENT_GENERATION_CONFIG,
model=GEMINI_MODEL,
)
class TestGenAISupervisedFineTuningTrainOperator:
@mock.patch(GEN_AI_PATH.format("GenAIGenerativeModelHook"))
def test_execute(
self,
mock_hook,
):
op = GenAISupervisedFineTuningTrainOperator(
task_id=TASK_ID,
project_id=GCP_PROJECT,
location=GCP_LOCATION,
source_model=GEMINI_MODEL,
training_dataset=TUNING_TRAINING_DATASET,
tuning_job_config=TUNING_JOB_CONFIG,
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
)
op.execute(context={"ti": mock.MagicMock()})
mock_hook.assert_called_once_with(
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
)
mock_hook.return_value.supervised_fine_tuning_train.assert_called_once_with(
project_id=GCP_PROJECT,
location=GCP_LOCATION,
source_model=GEMINI_MODEL,
training_dataset=TUNING_TRAINING_DATASET,
tuning_job_config=TUNING_JOB_CONFIG,
)
class TestGenAICountTokensOperator:
@mock.patch(GEN_AI_PATH.format("GenAIGenerativeModelHook"))
def test_execute(self, mock_hook):
op = GenAICountTokensOperator(
task_id=TASK_ID,
project_id=GCP_PROJECT,
location=GCP_LOCATION,
contents=CONTENTS,
model=GEMINI_MODEL,
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
)
op.execute(context={"ti": mock.MagicMock()})
mock_hook.assert_called_once_with(
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
)
mock_hook.return_value.count_tokens.assert_called_once_with(
project_id=GCP_PROJECT,
location=GCP_LOCATION,
contents=CONTENTS,
model=GEMINI_MODEL,
config=None,
)
class TestGenAICreateCachedContentOperator:
@mock.patch(GEN_AI_PATH.format("GenAIGenerativeModelHook"))
def test_execute(self, mock_hook):
op = GenAICreateCachedContentOperator(
task_id=TASK_ID,
project_id=GCP_PROJECT,
location=GCP_LOCATION,
model=GEMINI_MODEL,
cached_content_config=CACHED_CONTENT_CONFIG,
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
)
op.execute(context={"ti": mock.MagicMock()})
mock_hook.assert_called_once_with(
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
)
mock_hook.return_value.create_cached_content.assert_called_once_with(
project_id=GCP_PROJECT,
location=GCP_LOCATION,
model=GEMINI_MODEL,
cached_content_config=CACHED_CONTENT_CONFIG,
)
class TestGenAIGenerateFromCachedContentOperator:
@mock.patch(GEN_AI_PATH.format("GenAIGenerativeModelHook"))
def test_execute(self, mock_hook):
op = GenAIGenerateContentOperator(
task_id=TASK_ID,
project_id=GCP_PROJECT,
location=GCP_LOCATION,
model=GEMINI_MODEL,
contents=CONTENTS,
generation_config=GENERATE_FROM_CACHED_MODEL_CONFIG,
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
)
op.execute(context={"ti": mock.MagicMock()})
mock_hook.assert_called_once_with(
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
)
mock_hook.return_value.generate_content.assert_called_once_with(
project_id=GCP_PROJECT,
location=GCP_LOCATION,
model=GEMINI_MODEL,
contents=CONTENTS,
generation_config=GENERATE_FROM_CACHED_MODEL_CONFIG,
)
class TestGenAIGeminiCreateBatchJobOperator:
@mock.patch(GEN_AI_PATH.format("GenAIGeminiAPIHook"))
def test_execute(self, mock_hook):
op = GenAIGeminiCreateBatchJobOperator(
task_id=TASK_ID,
project_id=GCP_PROJECT,
location=GCP_LOCATION,
model=TEST_GEMINI_MODEL,
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
input_source=TEST_BATCH_JOB_INLINED_REQUESTS,
gemini_api_key=TEST_GEMINI_API_KEY,
wait_until_complete=False,
retrieve_result=False,
)
op.execute(context={"ti": mock.MagicMock()})
mock_hook.assert_called_once_with(
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
gemini_api_key=TEST_GEMINI_API_KEY,
)
mock_hook.return_value.create_batch_job.assert_called_once_with(
source=TEST_BATCH_JOB_INLINED_REQUESTS,
model=TEST_GEMINI_MODEL,
create_batch_job_config=None,
)
@mock.patch(GEN_AI_PATH.format("GenAIGeminiAPIHook"))
def test_execute_return_value(self, mock_hook):
expected_return = TEST_BATCH_JOB_RESPONSE
mock_job = mock.MagicMock()
mock_job.model_dump.return_value = expected_return
mock_hook.return_value.create_batch_job.return_value = mock_job
op = GenAIGeminiCreateBatchJobOperator(
task_id=TASK_ID,
project_id=GCP_PROJECT,
location=GCP_LOCATION,
model=TEST_GEMINI_MODEL,
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
input_source=TEST_BATCH_JOB_INLINED_REQUESTS,
gemini_api_key=TEST_GEMINI_API_KEY,
deferrable=False,
wait_until_complete=False,
)
result = op.execute(context={"ti": mock.MagicMock()})
assert result == expected_return
mock_job.model_dump.assert_called_once_with(mode="json")
@mock.patch(GEN_AI_PATH.format("GenAIGeminiAPIHook"))
def test_execute_complete_return_value(self, mock_hook):
expected_return = TEST_BATCH_JOB_RESPONSE
event = {"status": "success", "job_name": "test-name"}
mock_job = mock.MagicMock()
mock_job.model_dump.return_value = expected_return
mock_hook.return_value.get_batch_job.return_value = mock_job
op = GenAIGeminiCreateBatchJobOperator(
task_id=TASK_ID,
project_id=GCP_PROJECT,
location=GCP_LOCATION,
model=TEST_GEMINI_MODEL,
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
input_source=TEST_BATCH_JOB_INLINED_REQUESTS,
gemini_api_key=TEST_GEMINI_API_KEY,
)
result = op.execute_complete(context={"ti": mock.MagicMock()}, event=event)
assert result == expected_return
mock_hook.return_value.get_batch_job.assert_called_once_with("test-name")
mock_job.model_dump.assert_called_once_with(mode="json")
def test_init_retrieve_result_and_not_wait_until_complete_raises_airflow_exception(self):
with pytest.raises(AirflowException):
GenAIGeminiCreateBatchJobOperator(
task_id=TASK_ID,
project_id=GCP_PROJECT,
location=GCP_LOCATION,
model=TEST_GEMINI_MODEL,
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
input_source=TEST_BATCH_JOB_INLINED_REQUESTS,
gemini_api_key=TEST_GEMINI_API_KEY,
wait_until_complete=False,
retrieve_result=True,
)
def test_init_input_source_not_string_raises_airflow_exception(self):
with pytest.raises(AirflowException):
GenAIGeminiCreateBatchJobOperator(
task_id=TASK_ID,
project_id=GCP_PROJECT,
location=GCP_LOCATION,
model=TEST_GEMINI_MODEL,
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
input_source=TEST_BATCH_JOB_INLINED_REQUESTS,
gemini_api_key=TEST_GEMINI_API_KEY,
wait_until_complete=False,
results_folder=TEST_FILE_PATH,
)
def test_init_results_folder_not_exists_raises_airflow_exception(self):
with pytest.raises(AirflowException):
GenAIGeminiCreateBatchJobOperator(
task_id=TASK_ID,
project_id=GCP_PROJECT,
location=GCP_LOCATION,
model=TEST_GEMINI_MODEL,
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
input_source=TEST_FILE_NAME,
gemini_api_key=TEST_GEMINI_API_KEY,
wait_until_complete=False,
results_folder=TEST_FILE_PATH,
)
@mock.patch(GEN_AI_PATH.format("GenAIGeminiAPIHook"))
def test__wait_until_complete_exception_raises_airflow_exception(self, mock_hook):
op = GenAIGeminiCreateBatchJobOperator(
task_id=TASK_ID,
project_id=GCP_PROJECT,
location=GCP_LOCATION,
model=TEST_GEMINI_MODEL,
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
input_source=TEST_BATCH_JOB_INLINED_REQUESTS,
gemini_api_key=TEST_GEMINI_API_KEY,
)
mock_hook.return_value.get_batch_job.side_effect = Exception()
with pytest.raises(AirflowException):
op._wait_until_complete(job=mock.MagicMock())
@mock.patch(GEN_AI_PATH.format("GenAIGeminiAPIHook"))
def test_execute_exception_error_raises_airflow_exception(self, mock_hook):
op = GenAIGeminiCreateBatchJobOperator(
task_id=TASK_ID,
project_id=GCP_PROJECT,
location=GCP_LOCATION,
model=TEST_GEMINI_MODEL,
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
input_source=TEST_BATCH_JOB_INLINED_REQUESTS,
gemini_api_key=TEST_GEMINI_API_KEY,
)
mock_hook.return_value.create_batch_job.side_effect = Exception()
with pytest.raises(AirflowException):
op.execute(context={"ti": mock.MagicMock()})
mock_hook.assert_called_once_with(
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
gemini_api_key=TEST_GEMINI_API_KEY,
)
mock_hook.return_value.create_batch_job.assert_called_once_with(
source=TEST_BATCH_JOB_INLINED_REQUESTS,
model=TEST_GEMINI_MODEL,
create_batch_job_config=None,
)
def test_execute_complete_error_status_raises_airflow_exception(self):
op = GenAIGeminiCreateBatchJobOperator(
task_id=TASK_ID,
project_id=GCP_PROJECT,
location=GCP_LOCATION,
model=TEST_GEMINI_MODEL,
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
input_source=TEST_BATCH_JOB_INLINED_REQUESTS,
gemini_api_key=TEST_GEMINI_API_KEY,
)
event = {"status": "error", "message": "test-message"}
with pytest.raises(AirflowException):
op.execute_complete(context={"ti": mock.MagicMock()}, event=event)
class TestGenAIGeminiGetBatchJobOperator:
@mock.patch(GEN_AI_PATH.format("GenAIGeminiAPIHook"))
def test_execute(self, mock_hook):
op = GenAIGeminiGetBatchJobOperator(
task_id=TASK_ID,
project_id=GCP_PROJECT,
location=GCP_LOCATION,
job_name=TEST_BATCH_JOB_NAME,
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
gemini_api_key=TEST_GEMINI_API_KEY,
)
op.execute(context={"ti": mock.MagicMock()})
mock_hook.assert_called_once_with(
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
gemini_api_key=TEST_GEMINI_API_KEY,
)
mock_hook.return_value.get_batch_job.assert_called_once_with(
job_name=TEST_BATCH_JOB_NAME,
)
@mock.patch(GEN_AI_PATH.format("GenAIGeminiAPIHook"))
def test_execute_return_value(self, mock_hook):
expected_return = TEST_BATCH_JOB_RESPONSE
mock_job = mock.MagicMock()
mock_job.model_dump.return_value = expected_return
mock_hook.return_value.get_batch_job.return_value = mock_job
op = GenAIGeminiGetBatchJobOperator(
task_id=TASK_ID,
project_id=GCP_PROJECT,
location=GCP_LOCATION,
job_name=TEST_BATCH_JOB_NAME,
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
gemini_api_key=TEST_GEMINI_API_KEY,
)
result = op.execute(context={"ti": mock.MagicMock()})
assert result == expected_return
mock_job.model_dump.assert_called_once_with(mode="json")
@mock.patch(GEN_AI_PATH.format("GenAIGeminiAPIHook"))
def test_execute_value_error_raises_airflow_exception(self, mock_hook):
op = GenAIGeminiGetBatchJobOperator(
task_id=TASK_ID,
project_id=GCP_PROJECT,
location=GCP_LOCATION,
job_name=TEST_BATCH_JOB_NAME,
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
gemini_api_key=TEST_GEMINI_API_KEY,
)
mock_hook.return_value.get_batch_job.side_effect = ValueError()
with pytest.raises(AirflowException):
op.execute(context={"ti": mock.MagicMock()})
mock_hook.assert_called_once_with(
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
gemini_api_key=TEST_GEMINI_API_KEY,
)
mock_hook.return_value.get_batch_job.assert_called_once_with(
job_name=TEST_BATCH_JOB_NAME,
)
class TestGenAIGeminiListBatchJobsOperator:
@mock.patch(GEN_AI_PATH.format("GenAIGeminiAPIHook"))
def test_execute(self, mock_hook):
op = GenAIGeminiListBatchJobsOperator(
task_id=TASK_ID,
project_id=GCP_PROJECT,
location=GCP_LOCATION,
gemini_api_key=TEST_GEMINI_API_KEY,
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
)
op.execute(context={"ti": mock.MagicMock()})
mock_hook.assert_called_once_with(
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
gemini_api_key=TEST_GEMINI_API_KEY,
)
mock_hook.return_value.list_batch_jobs.assert_called_once_with(list_batch_jobs_config=None)
class TestGenAIGeminiDeleteBatchJobOperator:
@mock.patch(GEN_AI_PATH.format("GenAIGeminiAPIHook"))
def test_execute(self, mock_hook):
mock_hook.return_value.delete_batch_job.return_value = mock.MagicMock(error=False)
op = GenAIGeminiDeleteBatchJobOperator(
task_id=TASK_ID,
project_id=GCP_PROJECT,
location=GCP_LOCATION,
job_name=TEST_BATCH_JOB_NAME,
gemini_api_key=TEST_GEMINI_API_KEY,
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
)
op.execute(context={"ti": mock.MagicMock()})
mock_hook.assert_called_once_with(
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
gemini_api_key=TEST_GEMINI_API_KEY,
)
mock_hook.return_value.delete_batch_job.assert_called_once_with(
job_name=TEST_BATCH_JOB_NAME,
)
@mock.patch(GEN_AI_PATH.format("GenAIGeminiAPIHook"))
def test_execute_value_error_raises_airflow_exception(self, mock_hook):
op = GenAIGeminiDeleteBatchJobOperator(
task_id=TASK_ID,
project_id=GCP_PROJECT,
location=GCP_LOCATION,
job_name=TEST_BATCH_JOB_NAME,
gemini_api_key=TEST_GEMINI_API_KEY,
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
)
mock_hook.return_value.delete_batch_job.side_effect = ValueError()
with pytest.raises(AirflowException):
op.execute(context={"ti": mock.MagicMock()})
mock_hook.assert_called_once_with(
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
gemini_api_key=TEST_GEMINI_API_KEY,
)
mock_hook.return_value.delete_batch_job.assert_called_once_with(
job_name=TEST_BATCH_JOB_NAME,
)
@mock.patch(GEN_AI_PATH.format("GenAIGeminiAPIHook"))
def test_execute_job_error_raises_airflow_exception(self, mock_hook):
op = GenAIGeminiDeleteBatchJobOperator(
task_id=TASK_ID,
project_id=GCP_PROJECT,
location=GCP_LOCATION,
job_name=TEST_BATCH_JOB_NAME,
gemini_api_key=TEST_GEMINI_API_KEY,
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
)
mock_hook.return_value.delete_batch_job.return_value = mock.MagicMock(error="Test error")
with pytest.raises(AirflowException):
op.execute(context={"ti": mock.MagicMock()})
mock_hook.assert_called_once_with(
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
gemini_api_key=TEST_GEMINI_API_KEY,
)
mock_hook.return_value.delete_batch_job.assert_called_once_with(
job_name=TEST_BATCH_JOB_NAME,
)
class TestGenAIGeminiCancelBatchJobOperator:
@mock.patch(GEN_AI_PATH.format("GenAIGeminiAPIHook"))
def test_execute(self, mock_hook):
op = GenAIGeminiCancelBatchJobOperator(
task_id=TASK_ID,
project_id=GCP_PROJECT,
location=GCP_LOCATION,
job_name=TEST_BATCH_JOB_NAME,
gemini_api_key=TEST_GEMINI_API_KEY,
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
)
op.execute(context={"ti": mock.MagicMock()})
mock_hook.assert_called_once_with(
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
gemini_api_key=TEST_GEMINI_API_KEY,
)
mock_hook.return_value.cancel_batch_job.assert_called_once_with(
job_name=TEST_BATCH_JOB_NAME,
)
@mock.patch(GEN_AI_PATH.format("GenAIGeminiAPIHook"))
def test_execute_value_error_raises_airflow_exception(self, mock_hook):
op = GenAIGeminiCancelBatchJobOperator(
task_id=TASK_ID,
project_id=GCP_PROJECT,
location=GCP_LOCATION,
job_name=TEST_BATCH_JOB_NAME,
gemini_api_key=TEST_GEMINI_API_KEY,
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
)
mock_hook.return_value.cancel_batch_job.side_effect = ValueError()
with pytest.raises(AirflowException):
op.execute(context={"ti": mock.MagicMock()})
mock_hook.assert_called_once_with(
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
gemini_api_key=TEST_GEMINI_API_KEY,
)
mock_hook.return_value.cancel_batch_job.assert_called_once_with(
job_name=TEST_BATCH_JOB_NAME,
)
class TestGenAIGeminiCreateEmbeddingsBatchJobOperator:
@mock.patch(GEN_AI_PATH.format("GenAIGeminiAPIHook"))
def test_execute(self, mock_hook):
op = GenAIGeminiCreateEmbeddingsBatchJobOperator(
task_id=TASK_ID,
project_id=GCP_PROJECT,
location=GCP_LOCATION,
input_source=TEST_EMBEDDINGS_JOB_INLINED_REQUESTS,
model=EMBEDDING_MODEL,
gemini_api_key=TEST_GEMINI_API_KEY,
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
wait_until_complete=False,
)
op.execute(context={"ti": mock.MagicMock()})
mock_hook.assert_called_once_with(
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
gemini_api_key=TEST_GEMINI_API_KEY,
)
mock_hook.return_value.create_embeddings.assert_called_once_with(
source=TEST_EMBEDDINGS_JOB_INLINED_REQUESTS,
model=EMBEDDING_MODEL,
create_embeddings_config=None,
)
def test_init_retrieve_result_and_not_wait_until_complete_raises_airflow_exception(self):
with pytest.raises(AirflowException):
GenAIGeminiCreateEmbeddingsBatchJobOperator(
task_id=TASK_ID,
project_id=GCP_PROJECT,
location=GCP_LOCATION,
input_source=TEST_EMBEDDINGS_JOB_INLINED_REQUESTS,
model=EMBEDDING_MODEL,
gemini_api_key=TEST_GEMINI_API_KEY,
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
wait_until_complete=False,
retrieve_result=True,
)
def test_init_input_source_not_string_raises_airflow_exception(self):
with pytest.raises(AirflowException):
GenAIGeminiCreateEmbeddingsBatchJobOperator(
task_id=TASK_ID,
project_id=GCP_PROJECT,
location=GCP_LOCATION,
input_source=TEST_EMBEDDINGS_JOB_INLINED_REQUESTS,
model=EMBEDDING_MODEL,
gemini_api_key=TEST_GEMINI_API_KEY,
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
wait_until_complete=False,
results_folder=TEST_FILE_PATH,
)
def test_init_results_folder_not_exists_raises_airflow_exception(self):
with pytest.raises(AirflowException):
GenAIGeminiCreateEmbeddingsBatchJobOperator(
task_id=TASK_ID,
project_id=GCP_PROJECT,
location=GCP_LOCATION,
input_source=TEST_FILE_NAME,
model=EMBEDDING_MODEL,
gemini_api_key=TEST_GEMINI_API_KEY,
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
wait_until_complete=False,
results_folder=TEST_FILE_PATH,
)
@mock.patch(GEN_AI_PATH.format("GenAIGeminiAPIHook"))
def test__wait_until_complete_exception_raises_airflow_exception(self, mock_hook):
op = GenAIGeminiCreateEmbeddingsBatchJobOperator(
task_id=TASK_ID,
project_id=GCP_PROJECT,
location=GCP_LOCATION,
input_source=TEST_EMBEDDINGS_JOB_INLINED_REQUESTS,
model=EMBEDDING_MODEL,
gemini_api_key=TEST_GEMINI_API_KEY,
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
)
mock_hook.return_value.get_batch_job.side_effect = Exception()
with pytest.raises(AirflowException):
op._wait_until_complete(job=mock.MagicMock())
@mock.patch(GEN_AI_PATH.format("GenAIGeminiAPIHook"))
def test_execute_exception_error_raises_airflow_exception(self, mock_hook):
op = GenAIGeminiCreateEmbeddingsBatchJobOperator(
task_id=TASK_ID,
project_id=GCP_PROJECT,
location=GCP_LOCATION,
input_source=TEST_EMBEDDINGS_JOB_INLINED_REQUESTS,
model=EMBEDDING_MODEL,
gemini_api_key=TEST_GEMINI_API_KEY,
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
wait_until_complete=False,
)
mock_hook.return_value.create_embeddings.side_effect = Exception()
with pytest.raises(AirflowException):
op.execute(context={"ti": mock.MagicMock()})
mock_hook.assert_called_once_with(
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
gemini_api_key=TEST_GEMINI_API_KEY,
)
mock_hook.return_value.create_embeddings.assert_called_once_with(
source=TEST_EMBEDDINGS_JOB_INLINED_REQUESTS,
model=EMBEDDING_MODEL,
create_embeddings_config=None,
)
def test_execute_complete_error_status_raises_airflow_exception(self):
op = GenAIGeminiCreateEmbeddingsBatchJobOperator(
task_id=TASK_ID,
project_id=GCP_PROJECT,
location=GCP_LOCATION,
input_source=TEST_EMBEDDINGS_JOB_INLINED_REQUESTS,
model=EMBEDDING_MODEL,
gemini_api_key=TEST_GEMINI_API_KEY,
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
)
event = {"status": "error", "message": "test-message"}
with pytest.raises(AirflowException):
op.execute_complete(context={"ti": mock.MagicMock()}, event=event)
class TestGenAIGeminiGetFileOperator:
@mock.patch(GEN_AI_PATH.format("GenAIGeminiAPIHook"))
def test_execute(self, mock_hook):
op = GenAIGeminiGetFileOperator(
task_id=TASK_ID,
project_id=GCP_PROJECT,
location=GCP_LOCATION,
gemini_api_key=TEST_GEMINI_API_KEY,
file_name=TEST_FILE_NAME,
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
)
op.execute(context={"ti": mock.MagicMock()})
mock_hook.assert_called_once_with(
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
gemini_api_key=TEST_GEMINI_API_KEY,
)
mock_hook.return_value.get_file.assert_called_once_with(
file_name=TEST_FILE_NAME,
)
@mock.patch(GEN_AI_PATH.format("GenAIGeminiAPIHook"))
def test_execute_client_error_raises_airflow_exception(self, mock_hook):
op = GenAIGeminiGetFileOperator(
task_id=TASK_ID,
project_id=GCP_PROJECT,
location=GCP_LOCATION,
gemini_api_key=TEST_GEMINI_API_KEY,
file_name=TEST_FILE_NAME,
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
)
mock_hook.return_value.get_file.side_effect = ClientError.__new__(ClientError)
with pytest.raises(AirflowException):
op.execute(context={"ti": mock.MagicMock()})
mock_hook.assert_called_once_with(
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
gemini_api_key=TEST_GEMINI_API_KEY,
)
mock_hook.return_value.get_file.assert_called_once_with(
file_name=TEST_FILE_NAME,
)
class TestGenAIGeminiUploadFileOperator:
@mock.patch(GEN_AI_PATH.format("GenAIGeminiAPIHook"))
def test_execute(self, mock_hook):
op = GenAIGeminiUploadFileOperator(
task_id=TASK_ID,
project_id=GCP_PROJECT,
location=GCP_LOCATION,
file_path=TEST_FILE_PATH,
gemini_api_key=TEST_GEMINI_API_KEY,
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
)
op.execute(context={"ti": mock.MagicMock()})
mock_hook.assert_called_once_with(
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
gemini_api_key=TEST_GEMINI_API_KEY,
)
mock_hook.return_value.upload_file.assert_called_once_with(
path_to_file=TEST_FILE_PATH,
upload_file_config=None,
)
@mock.patch(GEN_AI_PATH.format("GenAIGeminiAPIHook"))
def test_execute_runtime_error_raises_runtime_error(self, mock_hook):
op = GenAIGeminiUploadFileOperator(
task_id=TASK_ID,
project_id=GCP_PROJECT,
location=GCP_LOCATION,
file_path=TEST_FILE_PATH,
gemini_api_key=TEST_GEMINI_API_KEY,
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
)
mock_hook.return_value.upload_file.side_effect = RuntimeError()
with pytest.raises(RuntimeError):
op.execute(context={"ti": mock.MagicMock()})
mock_hook.assert_called_once_with(
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
gemini_api_key=TEST_GEMINI_API_KEY,
)
mock_hook.return_value.upload_file.assert_called_once_with(
path_to_file=TEST_FILE_PATH,
upload_file_config=None,
)
@mock.patch(GEN_AI_PATH.format("GenAIGeminiAPIHook"))
def test_execute_value_error_raises_airflow_exception(self, mock_hook):
op = GenAIGeminiUploadFileOperator(
task_id=TASK_ID,
project_id=GCP_PROJECT,
location=GCP_LOCATION,
file_path=TEST_FILE_PATH,
gemini_api_key=TEST_GEMINI_API_KEY,
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
)
mock_hook.return_value.upload_file.side_effect = ValueError()
with pytest.raises(AirflowException):
op.execute(context={"ti": mock.MagicMock()})
mock_hook.assert_called_once_with(
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
gemini_api_key=TEST_GEMINI_API_KEY,
)
mock_hook.return_value.upload_file.assert_called_once_with(
path_to_file=TEST_FILE_PATH,
upload_file_config=None,
)
@mock.patch(GEN_AI_PATH.format("GenAIGeminiAPIHook"))
def test_execute_file_not_found_error_raises_airflow_exception(self, mock_hook):
op = GenAIGeminiUploadFileOperator(
task_id=TASK_ID,
project_id=GCP_PROJECT,
location=GCP_LOCATION,
file_path=TEST_FILE_PATH,
gemini_api_key=TEST_GEMINI_API_KEY,
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
)
mock_hook.return_value.upload_file.side_effect = FileNotFoundError()
with pytest.raises(AirflowException):
op.execute(context={"ti": mock.MagicMock()})
mock_hook.assert_called_once_with(
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
gemini_api_key=TEST_GEMINI_API_KEY,
)
mock_hook.return_value.upload_file.assert_called_once_with(
path_to_file=TEST_FILE_PATH,
upload_file_config=None,
)
class TestGenAIGeminiListFilesOperator:
@mock.patch(GEN_AI_PATH.format("GenAIGeminiAPIHook"))
def test_execute(self, mock_hook):
op = GenAIGeminiListFilesOperator(
task_id=TASK_ID,
project_id=GCP_PROJECT,
location=GCP_LOCATION,
gemini_api_key=TEST_GEMINI_API_KEY,
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
)
op.execute(context={"ti": mock.MagicMock()})
mock_hook.assert_called_once_with(
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
gemini_api_key=TEST_GEMINI_API_KEY,
)
mock_hook.return_value.list_files.assert_called_once_with()
class TestGenAIGeminiDeleteFileOperator:
@mock.patch(GEN_AI_PATH.format("GenAIGeminiAPIHook"))
def test_execute(self, mock_hook):
op = GenAIGeminiDeleteFileOperator(
task_id=TASK_ID,
project_id=GCP_PROJECT,
location=GCP_LOCATION,
file_name=TEST_FILE_NAME,
gemini_api_key=TEST_GEMINI_API_KEY,
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
)
op.execute(context={"ti": mock.MagicMock()})
mock_hook.assert_called_once_with(
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
gemini_api_key=TEST_GEMINI_API_KEY,
)
mock_hook.return_value.delete_file.assert_called_once_with(
file_name=TEST_FILE_NAME,
)
@mock.patch(GEN_AI_PATH.format("GenAIGeminiAPIHook"))
def test_execute_client_error_raises_airflow_exception(self, mock_hook):
op = GenAIGeminiDeleteFileOperator(
task_id=TASK_ID,
project_id=GCP_PROJECT,
location=GCP_LOCATION,
file_name=TEST_FILE_NAME,
gemini_api_key=TEST_GEMINI_API_KEY,
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
)
mock_hook.return_value.delete_file.side_effect = ClientError.__new__(ClientError)
with pytest.raises(AirflowException):
op.execute(context={"ti": mock.MagicMock()})
mock_hook.assert_called_once_with(
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
gemini_api_key=TEST_GEMINI_API_KEY,
)
mock_hook.return_value.delete_file.assert_called_once_with(
file_name=TEST_FILE_NAME,
)
| {
"repo_id": "apache/airflow",
"file_path": "providers/google/tests/unit/google/cloud/operators/test_gen_ai.py",
"license": "Apache License 2.0",
"lines": 939,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/cncf/kubernetes/src/airflow/providers/cncf/kubernetes/utils/container.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Helper functions for inspecting and interacting with containers in a Kubernetes Pod."""
from __future__ import annotations
from contextlib import suppress
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from kubernetes.client.models.v1_container_status import V1ContainerStatus
from kubernetes.client.models.v1_pod import V1Pod
def get_container_status(pod: V1Pod, container_name: str) -> V1ContainerStatus | None:
"""Retrieve container status."""
if pod and pod.status:
container_statuses = []
if pod.status.container_statuses:
container_statuses.extend(pod.status.container_statuses)
if pod.status.init_container_statuses:
container_statuses.extend(pod.status.init_container_statuses)
else:
container_statuses = None
if container_statuses:
# In general the variable container_statuses can store multiple items matching different containers.
# The following generator expression yields all items that have name equal to the container_name.
# The function next() here calls the generator to get only the first value. If there's nothing found
# then None is returned.
return next((x for x in container_statuses if x.name == container_name), None)
return None
def container_is_running(pod: V1Pod, container_name: str) -> bool:
"""
Examine V1Pod ``pod`` to determine whether ``container_name`` is running.
If that container is present and running, returns True. Returns False otherwise.
"""
container_status = get_container_status(pod, container_name)
if not container_status:
return False
return container_status.state.running is not None
def container_is_completed(pod: V1Pod, container_name: str) -> bool:
"""
Examine V1Pod ``pod`` to determine whether ``container_name`` is completed.
If that container is present and completed, returns True. Returns False otherwise.
"""
container_status = get_container_status(pod, container_name)
if not container_status:
return False
return container_status.state.terminated is not None
def container_is_succeeded(pod: V1Pod, container_name: str) -> bool:
"""
Examine V1Pod ``pod`` to determine whether ``container_name`` is completed and succeeded.
If that container is present and completed and succeeded, returns True. Returns False otherwise.
"""
container_status = get_container_status(pod, container_name)
if not container_status or container_status.state.terminated is None:
return False
return container_status.state.terminated.exit_code == 0
def container_is_wait(pod: V1Pod, container_name: str) -> bool:
"""
Examine V1Pod ``pod`` to determine whether ``container_name`` is waiting.
If that container is present and waiting, returns True. Returns False otherwise.
"""
container_status = get_container_status(pod, container_name)
if not container_status:
return False
return container_status.state.waiting is not None
def container_is_terminated(pod: V1Pod, container_name: str) -> bool:
"""
Examine V1Pod ``pod`` to determine whether ``container_name`` is terminated.
If that container is present and terminated, returns True. Returns False otherwise.
"""
container_statuses = pod.status.container_statuses if pod and pod.status else None
if not container_statuses:
return False
container_status = next((x for x in container_statuses if x.name == container_name), None)
if not container_status:
return False
return container_status.state.terminated is not None
def get_container_termination_message(pod: V1Pod, container_name: str):
with suppress(AttributeError, TypeError):
container_statuses = pod.status.container_statuses
container_status = next((x for x in container_statuses if x.name == container_name), None)
return container_status.state.terminated.message if container_status else None
| {
"repo_id": "apache/airflow",
"file_path": "providers/cncf/kubernetes/src/airflow/providers/cncf/kubernetes/utils/container.py",
"license": "Apache License 2.0",
"lines": 93,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.