sample_id stringlengths 21 196 | text stringlengths 105 936k | metadata dict | category stringclasses 6
values |
|---|---|---|---|
apache/airflow:providers/apache/kafka/src/airflow/providers/apache/kafka/triggers/msg_queue.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from collections.abc import Sequence
from typing import Any
from airflow.providers.common.messaging.triggers.msg_queue import MessageQueueTrigger
class KafkaMessageQueueTrigger(MessageQueueTrigger):
"""
A dedicated trigger for Kafka message queues that extends ``MessageQueueTrigger``.
This trigger extends the common ``MessageQueueTrigger`` and is designed to work
with the ``KafkaMessageQueueProvider``. It provides a more specific interface
for Kafka message queue operations while leveraging the unified messaging framework.
:param topics: The topic (or topic regex) that should be searched for messages
:param kafka_config_id: The Kafka connection configuration ID, defaults to "kafka_default"
:param apply_function: the location of the function to apply to messages for determination of matching
criteria. (In python dot notation as a string)
:param apply_function_args: A set of arguments to apply to the callable, defaults to None
:param apply_function_kwargs: A set of key word arguments to apply to the callable, defaults to None,
defaults to None
:param poll_timeout: How long the Kafka client should wait before returning from a poll request to
Kafka (seconds), defaults to 1
:param poll_interval: How long the trigger should sleep after reaching the end of the Kafka log
(seconds), defaults to 5
"""
def __init__(
self,
*,
topics: Sequence[str],
kafka_config_id: str = "kafka_default",
apply_function: str,
apply_function_args: list[Any] | None = None,
apply_function_kwargs: dict[Any, Any] | None = None,
poll_timeout: float = 1,
poll_interval: float = 5,
**kwargs: Any,
) -> None:
# Pass all required parameters to MessageQueueTrigger
super().__init__(
scheme="kafka",
topics=topics,
apply_function=apply_function,
kafka_config_id=kafka_config_id,
apply_function_args=apply_function_args or [],
apply_function_kwargs=apply_function_kwargs or {},
poll_timeout=poll_timeout,
poll_interval=poll_interval,
**kwargs,
)
| {
"repo_id": "apache/airflow",
"file_path": "providers/apache/kafka/src/airflow/providers/apache/kafka/triggers/msg_queue.py",
"license": "Apache License 2.0",
"lines": 62,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/apache/kafka/tests/system/apache/kafka/example_dag_kafka_message_queue_trigger.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import json
# [START howto_trigger_message_queue]
from airflow.providers.apache.kafka.triggers.msg_queue import KafkaMessageQueueTrigger
from airflow.providers.standard.operators.empty import EmptyOperator
from airflow.sdk import DAG, Asset, AssetWatcher
def apply_function(message):
val = json.loads(message.value())
print(f"Value in message is {val}")
return True
# Define a trigger that listens to an Apache Kafka message queue
trigger = KafkaMessageQueueTrigger(
topics=["test"],
apply_function="example_dag_kafka_message_queue_trigger.apply_function",
kafka_config_id="kafka_default",
apply_function_args=None,
apply_function_kwargs=None,
poll_timeout=1,
poll_interval=5,
)
# Define an asset that watches for messages on the queue
asset = Asset("kafka_queue_asset_1", watchers=[AssetWatcher(name="kafka_watcher_1", trigger=trigger)])
with DAG(dag_id="example_kafka_watcher_1", schedule=[asset]) as dag:
EmptyOperator(task_id="task")
# [END howto_trigger_message_queue]
from tests_common.test_utils.system_tests import get_test_run # noqa: E402
# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest)
test_run = get_test_run(dag)
| {
"repo_id": "apache/airflow",
"file_path": "providers/apache/kafka/tests/system/apache/kafka/example_dag_kafka_message_queue_trigger.py",
"license": "Apache License 2.0",
"lines": 44,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/apache/kafka/tests/unit/apache/kafka/triggers/test_msg_queue.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import asyncio
import json
import pytest
from airflow.models import Connection
from airflow.providers.apache.kafka.hooks.consume import KafkaConsumerHook
from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS, get_base_airflow_version_tuple
if AIRFLOW_V_3_0_PLUS:
from airflow.providers.apache.kafka.triggers.msg_queue import KafkaMessageQueueTrigger
from airflow.providers.common.messaging.triggers.msg_queue import MessageQueueTrigger
def apply_function_false(message):
return False
def apply_function_true(message):
return True
class MockedMessage:
def __init__(*args, **kwargs):
pass
def error(*args, **kwargs):
return False
class MockedConsumer:
def __init__(*args, **kwargs) -> None:
pass
def poll(*args, **kwargs):
return MockedMessage()
def commit(*args, **kwargs):
return True
class TestMessageQueueTrigger:
@pytest.fixture(autouse=True)
def setup_connections(self, create_connection_without_db):
create_connection_without_db(
Connection(
conn_id="kafka_d",
conn_type="kafka",
extra=json.dumps(
{"socket.timeout.ms": 10, "bootstrap.servers": "localhost:9092", "group.id": "test_group"}
),
)
)
create_connection_without_db(
Connection(
conn_id="kafka_multi_bootstraps",
conn_type="kafka",
extra=json.dumps(
{
"socket.timeout.ms": 10,
"bootstrap.servers": "localhost:9091,localhost:9092,localhost:9093",
"group.id": "test_groups",
}
),
)
)
@pytest.mark.skipif(not AIRFLOW_V_3_0_PLUS, reason="Requires Airflow 3.0.+")
def test_trigger_serialization(self):
if get_base_airflow_version_tuple() < (3, 0, 1):
pytest.skip("This test is only for Airflow 3.0.1+")
trigger = KafkaMessageQueueTrigger(
kafka_config_id="kafka_d",
apply_function="test.noop",
topics=["noop"],
apply_function_args=[1, 2],
apply_function_kwargs={"one": 1, "two": 2},
poll_timeout=10,
poll_interval=5,
)
assert isinstance(trigger, KafkaMessageQueueTrigger)
assert isinstance(trigger, MessageQueueTrigger)
classpath, kwargs = trigger.serialize()
assert classpath == "airflow.providers.apache.kafka.triggers.await_message.AwaitMessageTrigger"
assert kwargs == {
"kafka_config_id": "kafka_d",
"apply_function": "test.noop",
"topics": ["noop"],
"apply_function_args": [1, 2],
"apply_function_kwargs": {"one": 1, "two": 2},
"poll_timeout": 10,
"poll_interval": 5,
}
@pytest.mark.skipif(not AIRFLOW_V_3_0_PLUS, reason="Requires Airflow 3.0.+")
@pytest.mark.asyncio
async def test_trigger_run_good(self, mocker):
if get_base_airflow_version_tuple() < (3, 0, 1):
pytest.skip("This test is only for Airflow 3.0.1+")
mocker.patch.object(KafkaConsumerHook, "get_consumer", return_value=MockedConsumer)
trigger = KafkaMessageQueueTrigger(
kafka_config_id="kafka_d",
apply_function="unit.apache.kafka.triggers.test_msg_queue.apply_function_true",
topics=["noop"],
poll_timeout=0.0001,
poll_interval=5,
)
task = asyncio.create_task(trigger.run().__anext__())
await asyncio.sleep(1.0)
assert task.done() is True
@pytest.mark.skipif(not AIRFLOW_V_3_0_PLUS, reason="Requires Airflow 3.0.+")
@pytest.mark.asyncio
async def test_trigger_run_bad(self, mocker):
mocker.patch.object(KafkaConsumerHook, "get_consumer", return_value=MockedConsumer)
trigger = KafkaMessageQueueTrigger(
kafka_config_id="kafka_d",
apply_function="unit.apache.kafka.triggers.test_msg_queue.apply_function_false",
topics=["noop"],
poll_timeout=0.0001,
poll_interval=5,
)
task = asyncio.create_task(trigger.run().__anext__())
await asyncio.sleep(1.0)
assert task.done() is False
| {
"repo_id": "apache/airflow",
"file_path": "providers/apache/kafka/tests/unit/apache/kafka/triggers/test_msg_queue.py",
"license": "Apache License 2.0",
"lines": 123,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/google/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_ray.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Example Airflow DAG for creating GKE cluster with Ray enabled.
"""
from __future__ import annotations
import os
from datetime import datetime
from airflow.models.dag import DAG
from airflow.providers.google.cloud.operators.kubernetes_engine import (
GKECreateClusterOperator,
GKEDeleteClusterOperator,
)
try:
from airflow.sdk import TriggerRule
except ImportError:
# Compatibility for Airflow < 3.1
from airflow.utils.trigger_rule import TriggerRule # type: ignore[no-redef,attr-defined]
from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
DAG_ID = "kubernetes_engine_ray"
GCP_PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
GCP_LOCATION = "europe-west1"
CLUSTER_NAME_BASE = f"cluster-{DAG_ID}".replace("_", "-")
CLUSTER_NAME_FULL = CLUSTER_NAME_BASE + f"-{ENV_ID}".replace("_", "-")
CLUSTER_NAME = CLUSTER_NAME_BASE if len(CLUSTER_NAME_FULL) >= 33 else CLUSTER_NAME_FULL
# [START howto_operator_gcp_gke_create_cluster_definition_with_ray]
CLUSTER = {
"name": CLUSTER_NAME,
"node_pools": [
{
"name": f"{CLUSTER_NAME}-node",
"initial_node_count": 1,
},
],
"autopilot": {"enabled": True},
"addons_config": {
"ray_operator_config": {"enabled": True},
},
}
# [END howto_operator_gcp_gke_create_cluster_definition_with_ray]
with DAG(
DAG_ID,
schedule="@once", # Override to match your needs
start_date=datetime(2021, 1, 1),
catchup=False,
tags=["example"],
) as dag:
create_cluster = GKECreateClusterOperator(
task_id="create_cluster",
project_id=GCP_PROJECT_ID,
location=GCP_LOCATION,
body=CLUSTER,
)
delete_cluster = GKEDeleteClusterOperator(
task_id="delete_cluster",
cluster_name=CLUSTER_NAME,
project_id=GCP_PROJECT_ID,
location=GCP_LOCATION,
)
delete_cluster.trigger_rule = TriggerRule.ALL_DONE
create_cluster >> delete_cluster
from tests_common.test_utils.watcher import watcher
# This test needs watcher in order to properly mark success/failure
# when "teardown" task with trigger rule is part of the DAG
list(dag.tasks) >> watcher()
from tests_common.test_utils.system_tests import get_test_run # noqa: E402
# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest)
test_run = get_test_run(dag)
| {
"repo_id": "apache/airflow",
"file_path": "providers/google/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_ray.py",
"license": "Apache License 2.0",
"lines": 84,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/google/tests/unit/google/cloud/links/test_base_link.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import Any
from unittest import mock
import pytest
from airflow.providers.common.compat.sdk import XCom
from airflow.providers.google.cloud.links.base import BaseGoogleLink
from airflow.providers.google.cloud.operators.cloud_base import GoogleCloudBaseOperator
from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS
if AIRFLOW_V_3_0_PLUS:
from airflow.sdk.execution_time.comms import XComResult
TEST_LOCATION = "test-location"
TEST_CLUSTER_ID = "test-cluster-id"
TEST_PROJECT_ID = "test-project-id"
EXPECTED_GOOGLE_LINK_KEY = "google_link_for_test"
EXPECTED_GOOGLE_LINK_NAME = "Google Link for Test"
EXPECTED_GOOGLE_LINK_FORMAT = "/services/locations/{location}/clusters/{cluster_id}?project={project_id}"
EXPECTED_GOOGLE_LINK = "https://console.cloud.google.com" + EXPECTED_GOOGLE_LINK_FORMAT.format(
location=TEST_LOCATION, cluster_id=TEST_CLUSTER_ID, project_id=TEST_PROJECT_ID
)
class GoogleLink(BaseGoogleLink):
key = EXPECTED_GOOGLE_LINK_KEY
name = EXPECTED_GOOGLE_LINK_NAME
format_str = EXPECTED_GOOGLE_LINK_FORMAT
class TestBaseGoogleLink:
def test_class_attributes(self):
assert GoogleLink.key == EXPECTED_GOOGLE_LINK_KEY
assert GoogleLink.name == EXPECTED_GOOGLE_LINK_NAME
assert GoogleLink.format_str == EXPECTED_GOOGLE_LINK_FORMAT
def test_persist(self):
mock_context = mock.MagicMock()
if AIRFLOW_V_3_0_PLUS:
GoogleLink.persist(
context=mock_context,
location=TEST_LOCATION,
cluster_id=TEST_CLUSTER_ID,
project_id=TEST_PROJECT_ID,
)
mock_context["ti"].xcom_push.assert_called_once_with(
key=EXPECTED_GOOGLE_LINK_KEY,
value={
"location": TEST_LOCATION,
"cluster_id": TEST_CLUSTER_ID,
"project_id": TEST_PROJECT_ID,
},
)
else:
GoogleLink.persist(
context=mock_context,
location=TEST_LOCATION,
cluster_id=TEST_CLUSTER_ID,
project_id=TEST_PROJECT_ID,
)
class MyOperator(GoogleCloudBaseOperator):
operator_extra_links = (GoogleLink(),)
def __init__(self, project_id: str, location: str, cluster_id: str, **kwargs):
super().__init__(**kwargs)
self.project_id = project_id
self.location = location
self.cluster_id = cluster_id
@property
def extra_links_params(self) -> dict[str, Any]:
return {
"project_id": self.project_id,
"cluster_id": self.cluster_id,
"location": self.location,
}
def execute(self, context) -> Any:
GoogleLink.persist(context=context)
class TestOperatorWithBaseGoogleLink:
@pytest.mark.db_test
def test_get_link(self, dag_maker, create_task_instance_of_operator, session, mock_supervisor_comms):
expected_url = EXPECTED_GOOGLE_LINK
link = GoogleLink()
ti = create_task_instance_of_operator(
MyOperator,
dag_id="test_link_dag",
task_id="test_link_task",
location=TEST_LOCATION,
cluster_id=TEST_CLUSTER_ID,
project_id=TEST_PROJECT_ID,
)
task = dag_maker.dag.get_task(ti.task_id)
if AIRFLOW_V_3_0_PLUS and mock_supervisor_comms:
mock_supervisor_comms.send.return_value = XComResult(
key="key",
value={
"cluster_id": task.cluster_id,
"location": task.location,
"project_id": task.project_id,
},
)
actual_url = link.get_link(operator=task, ti_key=ti.key)
assert actual_url == expected_url
@pytest.mark.db_test
@mock.patch.object(XCom, "get_value")
def test_get_link_uses_xcom_url_and_skips_get_config(
self,
mock_get_value,
dag_maker,
create_task_instance_of_operator,
session,
):
xcom_url = "https://console.cloud.google.com/some/service?project=test-proj"
mock_get_value.return_value = xcom_url
link = GoogleLink()
ti = create_task_instance_of_operator(
MyOperator,
dag_id="test_link_dag",
task_id="test_link_task",
location=TEST_LOCATION,
cluster_id=TEST_CLUSTER_ID,
project_id=TEST_PROJECT_ID,
)
with mock.patch.object(GoogleLink, "get_config", autospec=True) as m_get_config:
actual_url = link.get_link(operator=dag_maker.dag.get_task(ti.task_id), ti_key=ti.key)
assert actual_url == xcom_url
m_get_config.assert_not_called()
@pytest.mark.db_test
@mock.patch.object(XCom, "get_value")
def test_get_link_falls_back_to_get_config_when_xcom_not_http(
self,
mock_get_value,
dag_maker,
create_task_instance_of_operator,
session,
):
mock_get_value.return_value = "gs://bucket/path"
link = GoogleLink()
ti = create_task_instance_of_operator(
MyOperator,
dag_id="test_link_dag",
task_id="test_link_task",
location=TEST_LOCATION,
cluster_id=TEST_CLUSTER_ID,
project_id=TEST_PROJECT_ID,
)
task = dag_maker.dag.get_task(ti.task_id)
expected_formatted = "https://console.cloud.google.com/expected/link?project=test-proj"
with (
mock.patch.object(
GoogleLink,
"get_config",
return_value={
"project_id": task.project_id,
"location": task.location,
"cluster_id": task.cluster_id,
},
) as m_get_config,
mock.patch.object(GoogleLink, "_format_link", return_value=expected_formatted) as m_fmt,
):
actual_url = link.get_link(operator=task, ti_key=ti.key)
assert actual_url == expected_formatted
m_get_config.assert_called_once()
m_fmt.assert_called_once()
| {
"repo_id": "apache/airflow",
"file_path": "providers/google/tests/unit/google/cloud/links/test_base_link.py",
"license": "Apache License 2.0",
"lines": 171,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-ctl/src/airflowctl/ctl/commands/version_command.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import rich
from airflowctl import __version__ as airflowctl_version
from airflowctl.api.client import NEW_API_CLIENT, ClientKind, provide_api_client
@provide_api_client(kind=ClientKind.CLI)
def version_info(arg, api_client=NEW_API_CLIENT):
"""Get version information."""
version_dict = {"airflowctl_version": airflowctl_version}
if arg.remote:
version_response = api_client.version.get()
version_dict.update(version_response.model_dump())
rich.print(version_dict)
else:
rich.print(version_dict)
| {
"repo_id": "apache/airflow",
"file_path": "airflow-ctl/src/airflowctl/ctl/commands/version_command.py",
"license": "Apache License 2.0",
"lines": 30,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:airflow-ctl/tests/airflow_ctl/ctl/commands/test_version_command.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from contextlib import redirect_stdout
from io import StringIO
from unittest import mock
import pytest
from airflowctl.api.client import Client
from airflowctl.ctl import cli_parser
from airflowctl.ctl.commands.version_command import version_info
@pytest.fixture
def mock_client():
"""create a mock client"""
with mock.patch("airflowctl.api.client.get_client") as mock_get_client:
client = mock.MagicMock(spec=Client)
mock_get_client.return_value.__enter__.return_value = client
client.version.get.return_value.model_dump.return_value = {
"version": "3.1.0",
"git_version": None,
"airflowctl_version": "0.1.0",
}
return client
class TestVersionCommand:
"""Test the version command."""
parser = cli_parser.get_parser()
def test_ctl_version_remote(self, mock_client):
with redirect_stdout(StringIO()) as stdout:
version_info(self.parser.parse_args(["version", "--remote"]), api_client=mock_client)
assert "version" in stdout.getvalue()
assert "git_version" in stdout.getvalue()
assert "airflowctl_version" in stdout.getvalue()
def test_ctl_version_only_local_version(self, mock_client):
"""Test the version command with an exception."""
with redirect_stdout(StringIO()) as stdout:
version_info(self.parser.parse_args(["version"]), api_client=mock_client)
output = stdout.getvalue()
assert "airflowctl_version" in output
| {
"repo_id": "apache/airflow",
"file_path": "airflow-ctl/tests/airflow_ctl/ctl/commands/test_version_command.py",
"license": "Apache License 2.0",
"lines": 51,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:task-sdk/tests/task_sdk/execution_time/test_comms.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import threading
import uuid
from socket import socketpair
import msgspec
import pytest
from airflow.sdk import timezone
from airflow.sdk.execution_time.comms import BundleInfo, MaskSecret, StartupDetails, _ResponseFrame
from airflow.sdk.execution_time.task_runner import CommsDecoder
class TestCommsModels:
"""Test Pydantic models used in task communication for proper validation."""
@pytest.mark.parametrize(
"object_to_mask",
[
{
"key_path": "/files/airflow-breeze-config/keys2/keys.json",
"scope": "https://www.googleapis.com/auth/cloud-platform",
"project": "project_id",
"num_retries": 6,
},
["iter1", "iter2", {"key": "value"}],
"string",
{
"key1": "value1",
},
],
)
def test_mask_secret_with_objects(self, object_to_mask):
mask_secret_object = MaskSecret(value=object_to_mask, name="test_secret")
assert mask_secret_object.value == object_to_mask
def test_mask_secret_with_list(self):
example_dict = ["test"]
mask_secret_object = MaskSecret(value=example_dict, name="test_secret")
assert mask_secret_object.value == example_dict
def test_mask_secret_with_iterable(self):
example_dict = ["test"]
mask_secret_object = MaskSecret(value=example_dict, name="test_secret")
assert mask_secret_object.value == example_dict
class TestCommsDecoder:
"""Test the communication between the subprocess and the "supervisor"."""
@pytest.mark.usefixtures("disable_capturing")
def test_recv_StartupDetails(self):
r, w = socketpair()
msg = {
"type": "StartupDetails",
"ti": {
"id": uuid.UUID("4d828a62-a417-4936-a7a6-2b3fabacecab"),
"task_id": "a",
"try_number": 1,
"run_id": "b",
"dag_id": "c",
"dag_version_id": uuid.UUID("4d828a62-a417-4936-a7a6-2b3fabacecab"),
},
"ti_context": {
"dag_run": {
"dag_id": "c",
"run_id": "b",
"logical_date": "2024-12-01T01:00:00Z",
"data_interval_start": "2024-12-01T00:00:00Z",
"data_interval_end": "2024-12-01T01:00:00Z",
"start_date": "2024-12-01T01:00:00Z",
"run_after": "2024-12-01T01:00:00Z",
"end_date": None,
"run_type": "manual",
"state": "success",
"conf": None,
"consumed_asset_events": [],
},
"max_tries": 0,
"should_retry": False,
"variables": None,
"connections": None,
},
"file": "/dev/null",
"start_date": "2024-12-01T01:00:00Z",
"dag_rel_path": "/dev/null",
"bundle_info": {"name": "any-name", "version": "any-version"},
"sentry_integration": "",
}
bytes = msgspec.msgpack.encode(_ResponseFrame(0, msg, None))
w.sendall(len(bytes).to_bytes(4, byteorder="big") + bytes)
decoder = CommsDecoder(socket=r, log=None)
msg = decoder._get_response()
assert isinstance(msg, StartupDetails)
assert msg.ti.id == uuid.UUID("4d828a62-a417-4936-a7a6-2b3fabacecab")
assert msg.ti.task_id == "a"
assert msg.ti.dag_id == "c"
assert msg.dag_rel_path == "/dev/null"
assert msg.bundle_info == BundleInfo(name="any-name", version="any-version")
assert msg.start_date == timezone.datetime(2024, 12, 1, 1)
def test_huge_payload(self):
r, w = socketpair()
msg = {
"type": "XComResult",
"key": "a",
"value": ("a" * 10 * 1024 * 1024) + "b", # A 10mb xcom value
}
w.settimeout(1.0)
bytes = msgspec.msgpack.encode(_ResponseFrame(0, msg, None))
# Since `sendall` blocks, we need to do the send in another thread, so we can perform the read here
t = threading.Thread(target=w.sendall, args=(len(bytes).to_bytes(4, byteorder="big") + bytes,))
t.start()
decoder = CommsDecoder(socket=r, log=None)
try:
msg = decoder._get_response()
finally:
t.join(2)
assert msg is not None
# It actually failed to read at all for large values, but lets just make sure we get it all
assert len(msg.value) == 10 * 1024 * 1024 + 1
assert msg.value[-1] == "b"
| {
"repo_id": "apache/airflow",
"file_path": "task-sdk/tests/task_sdk/execution_time/test_comms.py",
"license": "Apache License 2.0",
"lines": 127,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-ctl/src/airflowctl/ctl/commands/config_command.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import sys
from dataclasses import dataclass
from typing import TYPE_CHECKING, Any, NamedTuple
import rich
from airflowctl.api.client import NEW_API_CLIENT, ClientKind, provide_api_client
if TYPE_CHECKING:
from airflowctl.api.datamodels.generated import Config
class ConfigParameter(NamedTuple):
"""Represents a configuration parameter."""
section: str
option: str
@dataclass
class ConfigChange:
"""
Class representing the configuration changes in Airflow 3.0.
:param config: The configuration parameter being changed.
:param default_change: If the change is a default value change.
:param old_default: The old default value (valid only if default_change is True).
:param new_default: The new default value for the configuration parameter.
:param suggestion: A suggestion for replacing or handling the removed configuration.
:param renamed_to: The new section and option if the configuration is renamed.
:param was_deprecated: If the config is removed, whether the old config was deprecated.
:param was_removed: If the config is removed.
:param is_invalid_if: If the current config value is invalid in the future.
:param breaking: Mark if this change is known to be breaking and causing errors/ warnings / deprecations.
:param remove_if_equals: For removal rules, remove the option only if its current value equals this value.
"""
config: ConfigParameter
default_change: bool = False
old_default: str | bool | int | float | None = None
new_default: str | bool | int | float | None = None
suggestion: str = ""
renamed_to: ConfigParameter | None = None
was_deprecated: bool = True
was_removed: bool = True
is_invalid_if: Any = None
breaking: bool = False
remove_if_equals: str | bool | int | float | None = None
def message(self, api_client=NEW_API_CLIENT) -> str | None:
"""Generate a message for this configuration change."""
if self.default_change:
value = self._get_option_value(api_client.configs.list())
if value != self.new_default:
return (
f"Changed default value of `{self.config.option}` in `{self.config.section}` "
f"from `{self.old_default}` to `{self.new_default}`."
)
if self.renamed_to:
if self.config.section != self.renamed_to.section:
return (
f"`{self.config.option}` configuration parameter moved from `{self.config.section}` section to "
f"`{self.renamed_to.section}` section as `{self.renamed_to.option}`."
)
return (
f"`{self.config.option}` configuration parameter renamed to `{self.renamed_to.option}` "
f"in the `{self.config.section}` section."
)
if self.was_removed and not self.remove_if_equals:
return (
f"Removed{' deprecated' if self.was_deprecated else ''} `{self.config.option}` configuration parameter "
f"from `{self.config.section}` section."
f"{self.suggestion}"
)
if self.is_invalid_if is not None:
value = self._get_option_value(api_client.configs.list())
if value == self.is_invalid_if:
return (
f"Invalid value `{self.is_invalid_if}` set for `{self.config.option}` configuration parameter "
f"in `{self.config.section}` section. {self.suggestion}"
)
return None
def _get_option_value(self, config_resp: Config) -> str | None:
for section in config_resp.sections:
if section.name == self.config.section:
for option in section.options:
if option.key == self.config.option:
return option.value if isinstance(option.value, str) else str(option.value)
return None
CONFIGS_CHANGES = [
# admin
ConfigChange(
config=ConfigParameter("admin", "hide_sensitive_variable_fields"),
renamed_to=ConfigParameter("core", "hide_sensitive_var_conn_fields"),
),
ConfigChange(
config=ConfigParameter("admin", "sensitive_variable_fields"),
renamed_to=ConfigParameter("core", "sensitive_var_conn_names"),
),
# core
ConfigChange(
config=ConfigParameter("core", "executor"),
default_change=True,
old_default="SequentialExecutor",
new_default="LocalExecutor",
was_removed=False,
breaking=True,
),
ConfigChange(
config=ConfigParameter("core", "hostname"),
was_removed=True,
remove_if_equals=":",
),
ConfigChange(
config=ConfigParameter("core", "check_slas"),
suggestion="The SLA feature is removed in Airflow 3.0, to be replaced with Airflow Alerts in future",
),
ConfigChange(
config=ConfigParameter("core", "strict_dataset_uri_validation"),
suggestion="Dataset URI with a defined scheme will now always be validated strictly, "
"raising a hard error on validation failure.",
),
ConfigChange(
config=ConfigParameter("core", "dag_default_view"),
was_deprecated=False,
),
ConfigChange(
config=ConfigParameter("core", "dag_orientation"),
was_deprecated=False,
),
ConfigChange(
config=ConfigParameter("core", "dataset_manager_class"),
renamed_to=ConfigParameter("core", "asset_manager_class"),
),
ConfigChange(
config=ConfigParameter("core", "dataset_manager_kwargs"),
renamed_to=ConfigParameter("core", "asset_manager_kwargs"),
),
ConfigChange(
config=ConfigParameter("core", "worker_precheck"),
renamed_to=ConfigParameter("celery", "worker_precheck"),
),
ConfigChange(
config=ConfigParameter("core", "non_pooled_task_slot_count"),
renamed_to=ConfigParameter("core", "default_pool_task_slot_count"),
),
ConfigChange(
config=ConfigParameter("core", "dag_concurrency"),
renamed_to=ConfigParameter("core", "max_active_tasks_per_dag"),
),
ConfigChange(
config=ConfigParameter("core", "sql_alchemy_conn"),
renamed_to=ConfigParameter("database", "sql_alchemy_conn"),
),
ConfigChange(
config=ConfigParameter("core", "sql_engine_encoding"),
renamed_to=ConfigParameter("database", "sql_engine_encoding"),
),
ConfigChange(
config=ConfigParameter("core", "sql_engine_collation_for_ids"),
renamed_to=ConfigParameter("database", "sql_engine_collation_for_ids"),
),
ConfigChange(
config=ConfigParameter("core", "sql_alchemy_pool_enabled"),
renamed_to=ConfigParameter("database", "sql_alchemy_pool_enabled"),
),
ConfigChange(
config=ConfigParameter("core", "sql_alchemy_pool_size"),
renamed_to=ConfigParameter("database", "sql_alchemy_pool_size"),
),
ConfigChange(
config=ConfigParameter("core", "sql_alchemy_max_overflow"),
renamed_to=ConfigParameter("database", "sql_alchemy_max_overflow"),
),
ConfigChange(
config=ConfigParameter("core", "sql_alchemy_pool_recycle"),
renamed_to=ConfigParameter("database", "sql_alchemy_pool_recycle"),
),
ConfigChange(
config=ConfigParameter("core", "sql_alchemy_pool_pre_ping"),
renamed_to=ConfigParameter("database", "sql_alchemy_pool_pre_ping"),
),
ConfigChange(
config=ConfigParameter("core", "sql_alchemy_schema"),
renamed_to=ConfigParameter("database", "sql_alchemy_schema"),
),
ConfigChange(
config=ConfigParameter("core", "sql_alchemy_connect_args"),
renamed_to=ConfigParameter("database", "sql_alchemy_connect_args"),
),
ConfigChange(
config=ConfigParameter("core", "load_default_connections"),
renamed_to=ConfigParameter("database", "load_default_connections"),
),
ConfigChange(
config=ConfigParameter("core", "max_db_retries"),
renamed_to=ConfigParameter("database", "max_db_retries"),
),
ConfigChange(config=ConfigParameter("core", "task_runner")),
ConfigChange(config=ConfigParameter("core", "enable_xcom_pickling")),
ConfigChange(
config=ConfigParameter("core", "dag_file_processor_timeout"),
renamed_to=ConfigParameter("dag_processor", "dag_file_processor_timeout"),
),
ConfigChange(
config=ConfigParameter("core", "dag_processor_manager_log_location"),
),
ConfigChange(
config=ConfigParameter("core", "log_processor_filename_template"),
),
ConfigChange(
config=ConfigParameter("core", "parallelism"),
was_removed=False,
is_invalid_if="0",
suggestion="Please set the `parallelism` configuration parameter to a value greater than 0.",
),
# api
ConfigChange(
config=ConfigParameter("api", "access_control_allow_origin"),
renamed_to=ConfigParameter("api", "access_control_allow_origins"),
),
ConfigChange(
config=ConfigParameter("api", "auth_backend"),
renamed_to=ConfigParameter("fab", "auth_backends"),
),
ConfigChange(
config=ConfigParameter("api", "auth_backends"),
renamed_to=ConfigParameter("fab", "auth_backends"),
),
# logging
ConfigChange(
config=ConfigParameter("logging", "enable_task_context_logger"),
suggestion="Remove TaskContextLogger: Replaced by the Log table for better handling of task log "
"messages outside the execution context.",
),
ConfigChange(
config=ConfigParameter("logging", "dag_processor_manager_log_location"),
was_deprecated=False,
),
ConfigChange(
config=ConfigParameter("logging", "dag_processor_manager_log_stdout"),
was_deprecated=False,
),
ConfigChange(
config=ConfigParameter("logging", "log_processor_filename_template"),
was_deprecated=False,
),
ConfigChange(
config=ConfigParameter("logging", "log_filename_template"),
was_removed=True,
remove_if_equals="{{ ti.dag_id }}/{{ ti.task_id }}/{{ ts }}/{{ try_number }}.log",
breaking=True,
),
ConfigChange(
config=ConfigParameter("logging", "log_filename_template"),
was_removed=True,
remove_if_equals="dag_id={{ ti.dag_id }}/run_id={{ ti.run_id }}/task_id={{ ti.task_id }}/{% if ti.map_index >= 0 %}map_index={{ ti.map_index }}/{% endif %}attempt={{ try_number }}.log",
breaking=True,
),
# metrics
ConfigChange(
config=ConfigParameter("metrics", "metrics_use_pattern_match"),
),
ConfigChange(
config=ConfigParameter("metrics", "timer_unit_consistency"),
suggestion="In Airflow 3.0, the `timer_unit_consistency` setting in the `metrics` section is "
"removed as it is now the default behaviour. This is done to standardize all timer and "
"timing metrics to milliseconds across all metric loggers",
),
ConfigChange(
config=ConfigParameter("metrics", "statsd_allow_list"),
renamed_to=ConfigParameter("metrics", "metrics_allow_list"),
),
ConfigChange(
config=ConfigParameter("metrics", "statsd_block_list"),
renamed_to=ConfigParameter("metrics", "metrics_block_list"),
),
# traces
ConfigChange(
config=ConfigParameter("traces", "otel_task_log_event"),
),
# operators
ConfigChange(
config=ConfigParameter("operators", "allow_illegal_arguments"),
),
# webserver
ConfigChange(
config=ConfigParameter("webserver", "allow_raw_html_descriptions"),
),
ConfigChange(
config=ConfigParameter("webserver", "cookie_samesite"),
),
ConfigChange(
config=ConfigParameter("webserver", "update_fab_perms"),
renamed_to=ConfigParameter("fab", "update_fab_perms"),
),
ConfigChange(
config=ConfigParameter("webserver", "auth_rate_limited"),
renamed_to=ConfigParameter("fab", "auth_rate_limited"),
),
ConfigChange(
config=ConfigParameter("webserver", option="auth_rate_limit"),
renamed_to=ConfigParameter("fab", "auth_rate_limit"),
),
ConfigChange(
config=ConfigParameter("webserver", "config_file"),
renamed_to=ConfigParameter("fab", "config_file"),
),
ConfigChange(
config=ConfigParameter("webserver", "session_backend"),
renamed_to=ConfigParameter("fab", "session_backend"),
),
ConfigChange(
config=ConfigParameter("webserver", "session_lifetime_days"),
renamed_to=ConfigParameter("fab", "session_lifetime_minutes"),
),
ConfigChange(
config=ConfigParameter("webserver", "force_log_out_after"),
renamed_to=ConfigParameter("fab", "session_lifetime_minutes"),
),
ConfigChange(
config=ConfigParameter("webserver", "session_lifetime_minutes"),
renamed_to=ConfigParameter("fab", "session_lifetime_minutes"),
),
ConfigChange(
config=ConfigParameter("webserver", "base_url"),
renamed_to=ConfigParameter("api", "base_url"),
),
ConfigChange(
config=ConfigParameter("webserver", "web_server_host"),
renamed_to=ConfigParameter("api", "host"),
breaking=True,
),
ConfigChange(
config=ConfigParameter("webserver", "web_server_port"),
renamed_to=ConfigParameter("api", "port"),
breaking=True,
),
ConfigChange(
config=ConfigParameter("webserver", "workers"),
renamed_to=ConfigParameter("api", "workers"),
breaking=True,
),
ConfigChange(
config=ConfigParameter("webserver", "web_server_worker_timeout"),
renamed_to=ConfigParameter("api", "worker_timeout"),
),
ConfigChange(
config=ConfigParameter("webserver", "web_server_ssl_cert"),
renamed_to=ConfigParameter("api", "ssl_cert"),
breaking=True,
),
ConfigChange(
config=ConfigParameter("webserver", "web_server_ssl_key"),
renamed_to=ConfigParameter("api", "ssl_key"),
breaking=True,
),
ConfigChange(
config=ConfigParameter("webserver", "access_logfile"),
renamed_to=ConfigParameter("api", "access_logfile"),
breaking=True,
),
ConfigChange(
config=ConfigParameter("webserver", "error_logfile"),
was_deprecated=False,
),
ConfigChange(
config=ConfigParameter("webserver", "access_logformat"),
was_deprecated=False,
),
ConfigChange(
config=ConfigParameter("webserver", "web_server_master_timeout"),
was_deprecated=False,
),
ConfigChange(
config=ConfigParameter("webserver", "worker_refresh_batch_size"),
was_deprecated=False,
),
ConfigChange(
config=ConfigParameter("webserver", "worker_refresh_interval"),
was_deprecated=False,
),
ConfigChange(
config=ConfigParameter("webserver", "reload_on_plugin_change"),
was_deprecated=False,
),
ConfigChange(
config=ConfigParameter("webserver", "worker_class"),
was_deprecated=False,
),
ConfigChange(
config=ConfigParameter("webserver", "expose_stacktrace"),
was_deprecated=False,
),
ConfigChange(
config=ConfigParameter("webserver", "log_fetch_delay_sec"),
was_deprecated=False,
),
ConfigChange(
config=ConfigParameter("webserver", "log_auto_tailing_offset"),
was_deprecated=False,
),
ConfigChange(
config=ConfigParameter("webserver", "log_animation_speed"),
was_deprecated=False,
),
ConfigChange(
config=ConfigParameter("webserver", "default_dag_run_display_number"),
was_deprecated=False,
),
ConfigChange(
config=ConfigParameter("webserver", "enable_proxy_fix"),
renamed_to=ConfigParameter("fab", "enable_proxy_fix"),
breaking=True,
),
ConfigChange(
config=ConfigParameter("webserver", "proxy_fix_x_for"),
renamed_to=ConfigParameter("fab", "proxy_fix_x_for"),
breaking=True,
),
ConfigChange(
config=ConfigParameter("webserver", "proxy_fix_x_proto"),
renamed_to=ConfigParameter("fab", "proxy_fix_x_proto"),
breaking=True,
),
ConfigChange(
config=ConfigParameter("webserver", "proxy_fix_x_host"),
renamed_to=ConfigParameter("fab", "proxy_fix_x_host"),
breaking=True,
),
ConfigChange(
config=ConfigParameter("webserver", "proxy_fix_x_port"),
renamed_to=ConfigParameter("fab", "proxy_fix_x_port"),
breaking=True,
),
ConfigChange(
config=ConfigParameter("webserver", "proxy_fix_x_prefix"),
renamed_to=ConfigParameter("fab", "proxy_fix_x_prefix"),
breaking=True,
),
ConfigChange(
config=ConfigParameter("webserver", "expose_config"),
renamed_to=ConfigParameter("api", "expose_config"),
),
ConfigChange(
config=ConfigParameter("webserver", "cookie_secure"),
was_deprecated=False,
),
ConfigChange(
config=ConfigParameter("webserver", "analytics_tool"),
was_deprecated=False,
),
ConfigChange(
config=ConfigParameter("webserver", "analytics_id"),
was_deprecated=False,
),
ConfigChange(
config=ConfigParameter("webserver", "analytics_url"),
was_deprecated=False,
),
ConfigChange(
config=ConfigParameter("webserver", "show_recent_stats_for_completed_runs"),
was_deprecated=False,
),
ConfigChange(
config=ConfigParameter("webserver", "run_internal_api"),
was_deprecated=False,
),
ConfigChange(
config=ConfigParameter("webserver", "caching_hash_method"),
was_deprecated=False,
),
ConfigChange(
config=ConfigParameter("webserver", "show_trigger_form_if_no_params"),
was_deprecated=False,
),
ConfigChange(
config=ConfigParameter("webserver", "num_recent_configurations_for_trigger"),
was_deprecated=False,
),
ConfigChange(
config=ConfigParameter("webserver", "allowed_payload_size"),
was_deprecated=False,
),
ConfigChange(
config=ConfigParameter("webserver", "max_form_memory_size"),
was_deprecated=False,
),
ConfigChange(
config=ConfigParameter("webserver", "max_form_parts"),
was_deprecated=False,
),
ConfigChange(
config=ConfigParameter("webserver", "default_ui_timezone"),
was_deprecated=False,
),
# policy
ConfigChange(
config=ConfigParameter("policy", "airflow_local_settings"),
renamed_to=ConfigParameter("policy", "task_policy"),
),
ConfigChange(
config=ConfigParameter("webserver", "navbar_logo_text_color"),
was_deprecated=False,
),
# scheduler
ConfigChange(
config=ConfigParameter("scheduler", "dependency_detector"),
),
ConfigChange(
config=ConfigParameter("scheduler", "allow_trigger_in_future"),
),
ConfigChange(
config=ConfigParameter("scheduler", "catchup_by_default"),
default_change=True,
old_default="True",
was_removed=False,
new_default="False",
suggestion="In Airflow 3.0 the default value for `catchup_by_default` is set to `False`. "
"This means that DAGs without explicit definition of the `catchup` parameter will not "
"catchup by default. "
"If your DAGs rely on catchup behavior, not explicitly defined in the DAG definition, "
"set this configuration parameter to `True` in the `scheduler` section of your `airflow.cfg` "
"to enable the behavior from Airflow 2.x.",
breaking=True,
),
ConfigChange(
config=ConfigParameter("scheduler", "create_cron_data_intervals"),
default_change=True,
old_default="True",
new_default="False",
was_removed=False,
breaking=True,
),
ConfigChange(
config=ConfigParameter("scheduler", "create_delta_data_intervals"),
default_change=True,
old_default="True",
new_default="False",
was_removed=False,
breaking=True,
),
ConfigChange(
config=ConfigParameter("scheduler", "processor_poll_interval"),
renamed_to=ConfigParameter("scheduler", "scheduler_idle_sleep_time"),
),
ConfigChange(
config=ConfigParameter("scheduler", "deactivate_stale_dags_interval"),
renamed_to=ConfigParameter("scheduler", "parsing_cleanup_interval"),
),
ConfigChange(
config=ConfigParameter("scheduler", "statsd_on"), renamed_to=ConfigParameter("metrics", "statsd_on")
),
ConfigChange(
config=ConfigParameter("scheduler", "max_threads"),
renamed_to=ConfigParameter("dag_processor", "parsing_processes"),
),
ConfigChange(
config=ConfigParameter("scheduler", "statsd_host"),
renamed_to=ConfigParameter("metrics", "statsd_host"),
),
ConfigChange(
config=ConfigParameter("scheduler", "statsd_port"),
renamed_to=ConfigParameter("metrics", "statsd_port"),
),
ConfigChange(
config=ConfigParameter("scheduler", "statsd_prefix"),
renamed_to=ConfigParameter("metrics", "statsd_prefix"),
),
ConfigChange(
config=ConfigParameter("scheduler", "statsd_allow_list"),
renamed_to=ConfigParameter("metrics", "statsd_allow_list"),
),
ConfigChange(
config=ConfigParameter("scheduler", "stat_name_handler"),
renamed_to=ConfigParameter("metrics", "stat_name_handler"),
),
ConfigChange(
config=ConfigParameter("scheduler", "statsd_datadog_enabled"),
renamed_to=ConfigParameter("metrics", "statsd_datadog_enabled"),
),
ConfigChange(
config=ConfigParameter("scheduler", "statsd_datadog_tags"),
renamed_to=ConfigParameter("metrics", "statsd_datadog_tags"),
),
ConfigChange(
config=ConfigParameter("scheduler", "statsd_datadog_metrics_tags"),
renamed_to=ConfigParameter("metrics", "statsd_datadog_metrics_tags"),
),
ConfigChange(
config=ConfigParameter("scheduler", "statsd_custom_client_path"),
renamed_to=ConfigParameter("metrics", "statsd_custom_client_path"),
),
ConfigChange(
config=ConfigParameter("scheduler", "parsing_processes"),
renamed_to=ConfigParameter("dag_processor", "parsing_processes"),
),
ConfigChange(
config=ConfigParameter("scheduler", "file_parsing_sort_mode"),
renamed_to=ConfigParameter("dag_processor", "file_parsing_sort_mode"),
),
ConfigChange(
config=ConfigParameter("scheduler", "max_callbacks_per_loop"),
renamed_to=ConfigParameter("dag_processor", "max_callbacks_per_loop"),
),
ConfigChange(
config=ConfigParameter("scheduler", "min_file_process_interval"),
renamed_to=ConfigParameter("dag_processor", "min_file_process_interval"),
),
ConfigChange(
config=ConfigParameter("scheduler", "stale_dag_threshold"),
renamed_to=ConfigParameter("dag_processor", "stale_dag_threshold"),
),
ConfigChange(
config=ConfigParameter("scheduler", "print_stats_interval"),
renamed_to=ConfigParameter("dag_processor", "print_stats_interval"),
),
ConfigChange(
config=ConfigParameter("scheduler", "dag_dir_list_interval"),
renamed_to=ConfigParameter("dag_processor", "refresh_interval"),
breaking=True,
),
ConfigChange(
config=ConfigParameter("scheduler", "local_task_job_heartbeat_sec"),
renamed_to=ConfigParameter("scheduler", "task_instance_heartbeat_sec"),
),
ConfigChange(
config=ConfigParameter("scheduler", "scheduler_zombie_task_threshold"),
renamed_to=ConfigParameter("scheduler", "task_instance_heartbeat_timeout"),
),
ConfigChange(
config=ConfigParameter("scheduler", "zombie_detection_interval"),
renamed_to=ConfigParameter("scheduler", "task_instance_heartbeat_timeout_detection_interval"),
),
ConfigChange(
config=ConfigParameter("scheduler", "child_process_log_directory"),
renamed_to=ConfigParameter("logging", "dag_processor_child_process_log_directory"),
),
# celery
ConfigChange(
config=ConfigParameter("celery", "stalled_task_timeout"),
renamed_to=ConfigParameter("scheduler", "task_queued_timeout"),
),
ConfigChange(
config=ConfigParameter("celery", "default_queue"),
renamed_to=ConfigParameter("operators", "default_queue"),
),
ConfigChange(
config=ConfigParameter("celery", "task_adoption_timeout"),
renamed_to=ConfigParameter("scheduler", "task_queued_timeout"),
),
# kubernetes_executor
ConfigChange(
config=ConfigParameter("kubernetes_executor", "worker_pods_pending_timeout"),
renamed_to=ConfigParameter("scheduler", "task_queued_timeout"),
),
ConfigChange(
config=ConfigParameter("kubernetes_executor", "worker_pods_pending_timeout_check_interval"),
renamed_to=ConfigParameter("scheduler", "task_queued_timeout_check_interval"),
),
# smtp
ConfigChange(
config=ConfigParameter("smtp", "smtp_user"),
suggestion="Please use the SMTP connection (`smtp_default`).",
),
ConfigChange(
config=ConfigParameter("smtp", "smtp_password"),
suggestion="Please use the SMTP connection (`smtp_default`).",
),
# database
ConfigChange(
config=ConfigParameter("database", "load_default_connections"),
),
# triggerer
ConfigChange(
config=ConfigParameter("triggerer", "default_capacity"),
renamed_to=ConfigParameter("triggerer", "capacity"),
breaking=True,
),
# email
ConfigChange(
config=ConfigParameter("email", "email_backend"),
was_removed=True,
remove_if_equals="airflow.contrib.utils.sendgrid.send_email",
),
# elasticsearch
ConfigChange(
config=ConfigParameter("elasticsearch", "log_id_template"),
was_removed=True,
remove_if_equals="{dag_id}-{task_id}-{logical_date}-{try_number}",
breaking=True,
),
]
@provide_api_client(kind=ClientKind.CLI)
def lint(args, api_client=NEW_API_CLIENT) -> None:
"""
Lint the airflow.cfg file for removed, or renamed configurations.
This function scans the Airflow configuration file for parameters that are removed or renamed in
Airflow 3.0. It provides suggestions for alternative parameters or settings where applicable.
CLI Arguments:
--section: str (optional)
The specific section of the configuration to lint.
Example: --section core
--option: str (optional)
The specific option within a section to lint.
Example: --option check_slas
--ignore-section: str (optional)
A section to ignore during linting.
Example: --ignore-section webserver
--ignore-option: str (optional)
An option to ignore during linting.
Example: --ignore-option smtp_user
--verbose: flag (optional)
Enables detailed output, including the list of ignored sections and options.
Example: --verbose
Examples:
1. Lint all sections and options:
airflowctl config lint
2. Lint a specific section:
airflowctl config lint --section core,webserver
3. Lint specific sections and options:
airflowctl config lint --section smtp --option smtp_user
4. Ignore a section:
airflowctl config lint --ignore-section webserver,api
5. Ignore an options:
airflowctl config lint --ignore-option smtp_user,session_lifetime_days
6. Enable verbose output:
airflowctl config lint --verbose
:param args: The CLI arguments for linting configurations.
"""
lint_issues = []
section_to_check_if_provided = args.section or []
option_to_check_if_provided = args.option or []
ignore_sections = args.ignore_section or []
ignore_options = args.ignore_option or []
try:
all_configs = api_client.configs.list()
for configuration in CONFIGS_CHANGES:
if (
section_to_check_if_provided
and configuration.config.section not in section_to_check_if_provided
):
continue
if option_to_check_if_provided and configuration.config.option not in option_to_check_if_provided:
continue
if (
configuration.config.section in ignore_sections
or configuration.config.option in ignore_options
):
continue
target_section = next(
(section for section in all_configs.sections if section.name == configuration.config.section),
None,
)
if target_section:
target_option = next(
(
option
for option in target_section.options
if option.key == configuration.config.option
),
None,
)
if target_option:
if configuration.message(api_client=api_client) is not None:
lint_issues.append(configuration.message(api_client=api_client))
if lint_issues:
rich.print("[red]Found issues in your airflow.cfg:[/red]")
for issue in lint_issues:
rich.print(f" - [yellow]{issue}[/yellow]")
if args.verbose:
rich.print("\n[blue]Detailed Information:[/blue]")
rich.print(f"Ignored sections: [green]{', '.join(ignore_sections)}[/green]")
rich.print(f"Ignored options: [green]{', '.join(ignore_options)}[/green]")
rich.print("\n[red]Please update your configuration file accordingly.[/red]")
else:
rich.print("[green]No issues found in your airflow.cfg. It is ready for Airflow 3![/green]")
except Exception as e:
rich.print(f"[red]Lint configs failed: {e}")
sys.exit(1)
| {
"repo_id": "apache/airflow",
"file_path": "airflow-ctl/src/airflowctl/ctl/commands/config_command.py",
"license": "Apache License 2.0",
"lines": 787,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:airflow-ctl/tests/airflow_ctl/ctl/commands/test_config_command.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import os
from unittest.mock import patch
from airflowctl.api.client import ClientKind
from airflowctl.api.datamodels.generated import Config, ConfigOption, ConfigSection
from airflowctl.ctl import cli_parser
from airflowctl.ctl.commands import config_command
from airflowctl.ctl.commands.config_command import ConfigChange, ConfigParameter
class TestCliConfigCommands:
parser = cli_parser.get_parser()
@patch("rich.print")
def test_lint_no_issues(self, mock_rich_print, api_client_maker):
response_config = Config(
sections=[
ConfigSection(
name="test_section",
options=[
ConfigOption(
key="test_key",
value="test_value",
)
],
)
]
)
api_client = api_client_maker(
path="/api/v2/config",
response_json=response_config.model_dump(),
expected_http_status_code=200,
kind=ClientKind.CLI,
)
config_command.lint(
self.parser.parse_args(["config", "lint"]),
api_client=api_client,
)
calls = [call[0][0] for call in mock_rich_print.call_args_list]
assert "[green]No issues found in your airflow.cfg. It is ready for Airflow 3![/green]" in calls[0]
@patch("airflowctl.api.client.Credentials.load")
@patch("rich.print")
@patch.dict(os.environ, {"AIRFLOW_CLI_TOKEN": "TEST_TOKEN"})
@patch.dict(os.environ, {"AIRFLOW_CLI_ENVIRONMENT": "TEST_CONFIG"})
@patch(
"airflowctl.ctl.commands.config_command.CONFIGS_CHANGES",
[
ConfigChange(
config=ConfigParameter("test_section", "test_option"),
default_change=True,
old_default="old_default",
new_default="new_default",
),
],
)
def test_lint_detects_default_changed_configs(self, mock_rich_print, api_client_maker):
response_config = Config(
sections=[
ConfigSection(
name="test_section",
options=[
ConfigOption(
key="test_option",
value="old_default",
)
],
)
]
)
api_client = api_client_maker(
path="/api/v2/config",
response_json=response_config.model_dump(),
expected_http_status_code=200,
kind=ClientKind.CLI,
)
api_client.configs.list.return_value = response_config
config_command.lint(
self.parser.parse_args(["config", "lint"]),
api_client=api_client,
)
calls = [call[0][0] for call in mock_rich_print.call_args_list]
assert "[red]Found issues in your airflow.cfg:[/red]" in calls[0]
assert (
" - [yellow]Changed default value of `test_option` in `test_section` from `old_default` to `new_default`.[/yellow]"
in calls[1]
)
@patch("airflowctl.api.client.Credentials.load")
@patch.dict(os.environ, {"AIRFLOW_CLI_TOKEN": "TEST_TOKEN"})
@patch.dict(os.environ, {"AIRFLOW_CLI_ENVIRONMENT": "TEST_CONFIG"})
@patch("rich.print")
@patch(
"airflowctl.ctl.commands.config_command.CONFIGS_CHANGES",
[
ConfigChange(
config=ConfigParameter("test_section", "test_option"),
was_removed=True,
),
],
)
def test_lint_detects_removed_configs(self, mock_rich_print, api_client_maker):
response_config = Config(
sections=[
ConfigSection(
name="test_section",
options=[
ConfigOption(
key="test_option",
value="test_value",
)
],
)
]
)
api_client = api_client_maker(
path="/api/v2/config",
response_json=response_config.model_dump(),
expected_http_status_code=200,
kind=ClientKind.CLI,
)
api_client.configs.list.return_value = response_config
config_command.lint(
self.parser.parse_args(["config", "lint"]),
api_client=api_client,
)
calls = [call[0][0] for call in mock_rich_print.call_args_list]
assert "[red]Found issues in your airflow.cfg:[/red]" in calls[0]
assert (
"- [yellow]Removed deprecated `test_option` configuration parameter from `test_section` section.[/yellow]"
in calls[1]
)
@patch("airflowctl.api.client.Credentials.load")
@patch.dict(os.environ, {"AIRFLOW_CLI_TOKEN": "TEST_TOKEN"})
@patch.dict(os.environ, {"AIRFLOW_CLI_ENVIRONMENT": "TEST_CONFIG"})
@patch("rich.print")
@patch(
"airflowctl.ctl.commands.config_command.CONFIGS_CHANGES",
[
ConfigChange(
config=ConfigParameter("test_section_1", "test_option"),
renamed_to=ConfigParameter("test_section_2", "test_option"),
),
],
)
def test_lint_detects_renamed_configs_different_section(self, mock_rich_print, api_client_maker):
response_config = Config(
sections=[
ConfigSection(
name="test_section_1",
options=[
ConfigOption(
key="test_option",
value="test_value",
)
],
),
ConfigSection(
name="test_section_2",
options=[
ConfigOption(
key="test_option",
value="test_value",
)
],
),
]
)
api_client = api_client_maker(
path="/api/v2/config",
response_json=response_config.model_dump(),
expected_http_status_code=200,
kind=ClientKind.CLI,
)
api_client.configs.list.return_value = response_config
config_command.lint(
self.parser.parse_args(["config", "lint"]),
api_client=api_client,
)
calls = [call[0][0] for call in mock_rich_print.call_args_list]
assert "[red]Found issues in your airflow.cfg:[/red]" in calls[0]
assert (
"- [yellow]`test_option` configuration parameter moved from `test_section_1` section to `test_section_2` section as `test_option`.[/yellow]"
in calls[1]
)
@patch("airflowctl.api.client.Credentials.load")
@patch.dict(os.environ, {"AIRFLOW_CLI_TOKEN": "TEST_TOKEN"})
@patch.dict(os.environ, {"AIRFLOW_CLI_ENVIRONMENT": "TEST_CONFIG"})
@patch("rich.print")
@patch(
"airflowctl.ctl.commands.config_command.CONFIGS_CHANGES",
[
ConfigChange(
config=ConfigParameter("test_section", "test_option_1"),
renamed_to=ConfigParameter("test_section", "test_option_2"),
),
],
)
def test_lint_detects_renamed_configs_same_section(self, mock_rich_print, api_client_maker):
response_config = Config(
sections=[
ConfigSection(
name="test_section",
options=[
ConfigOption(
key="test_option_1",
value="test_value_1",
),
ConfigOption(
key="test_option_2",
value="test_value_2",
),
],
)
]
)
api_client = api_client_maker(
path="/api/v2/config",
response_json=response_config.model_dump(),
expected_http_status_code=200,
kind=ClientKind.CLI,
)
api_client.configs.list.return_value = response_config
config_command.lint(
self.parser.parse_args(["config", "lint"]),
api_client=api_client,
)
calls = [call[0][0] for call in mock_rich_print.call_args_list]
assert "[red]Found issues in your airflow.cfg:[/red]" in calls[0]
assert (
"- [yellow]`test_option_1` configuration parameter renamed to `test_option_2` in the `test_section` section.[/yellow]"
in calls[1]
)
@patch("airflowctl.api.client.Credentials.load")
@patch.dict(os.environ, {"AIRFLOW_CLI_TOKEN": "TEST_TOKEN"})
@patch.dict(os.environ, {"AIRFLOW_CLI_ENVIRONMENT": "TEST_CONFIG"})
@patch("rich.print")
@patch(
"airflowctl.ctl.commands.config_command.CONFIGS_CHANGES",
[
ConfigChange(
config=ConfigParameter("test_section", "test_option"),
was_removed=False,
is_invalid_if="0",
suggestion="Please set the `test_option` configuration parameter to a value greater than 0.",
),
],
)
def test_lint_detects_invalid_values(self, mock_rich_print, api_client_maker):
response_config = Config(
sections=[
ConfigSection(
name="test_section",
options=[
ConfigOption(
key="test_option",
value="0",
)
],
)
]
)
api_client = api_client_maker(
path="/api/v2/config",
response_json=response_config.model_dump(),
expected_http_status_code=200,
kind=ClientKind.CLI,
)
api_client.configs.list.return_value = response_config
config_command.lint(
self.parser.parse_args(["config", "lint"]),
api_client=api_client,
)
calls = [call[0][0] for call in mock_rich_print.call_args_list]
assert "[red]Found issues in your airflow.cfg:[/red]" in calls[0]
assert (
"- [yellow]Invalid value `0` set for `test_option` configuration parameter in `test_section` section. "
"Please set the `test_option` configuration parameter to a value greater than 0.[/yellow]"
in calls[1]
)
@patch("airflowctl.api.client.Credentials.load")
@patch.dict(os.environ, {"AIRFLOW_CLI_TOKEN": "TEST_TOKEN"})
@patch.dict(os.environ, {"AIRFLOW_CLI_ENVIRONMENT": "TEST_CONFIG"})
@patch("rich.print")
@patch(
"airflowctl.ctl.commands.config_command.CONFIGS_CHANGES",
[
ConfigChange(
config=ConfigParameter("test_section", "test_option"),
suggestion="This is a test suggestion.",
),
],
)
def test_lint_detects_configs_with_suggestions(self, mock_rich_print, api_client_maker):
response_config = Config(
sections=[
ConfigSection(
name="test_section",
options=[
ConfigOption(
key="test_option",
value="test_value",
)
],
)
]
)
api_client = api_client_maker(
path="/api/v2/config",
response_json=response_config.model_dump(),
expected_http_status_code=200,
kind=ClientKind.CLI,
)
api_client.configs.list.return_value = response_config
config_command.lint(
self.parser.parse_args(["config", "lint"]),
api_client=api_client,
)
calls = [call[0][0] for call in mock_rich_print.call_args_list]
assert "[red]Found issues in your airflow.cfg:[/red]" in calls[0]
assert "This is a test suggestion." in calls[1]
@patch("airflowctl.api.client.Credentials.load")
@patch.dict(os.environ, {"AIRFLOW_CLI_TOKEN": "TEST_TOKEN"})
@patch.dict(os.environ, {"AIRFLOW_CLI_ENVIRONMENT": "TEST_CONFIG"})
@patch("rich.print")
def test_config_list_masking_preservation(
self, mock_rich_print, _mock_credentials, api_client_maker, capsys
):
"""
Verify that sensitive values masked by the API (like '< hidden >') are preserved
and displayed correctly by the CLI list command.
"""
response_config = Config(
sections=[
ConfigSection(
name="core",
options=[
ConfigOption(key="parallelism", value="32"),
ConfigOption(key="fernet_key", value="< hidden >"),
],
),
ConfigSection(
name="database",
options=[
ConfigOption(key="sql_alchemy_conn", value="< hidden >"),
],
),
]
)
api_client = api_client_maker(
path="/api/v2/config",
response_json=response_config.model_dump(),
expected_http_status_code=200,
kind=ClientKind.CLI,
)
args = self.parser.parse_args(["config", "list"])
args.func(
args,
api_client=api_client,
)
# Output is printed to stdout by AirflowConsole (using rich)
captured = capsys.readouterr()
output_str = captured.out
# Check output contains masked vales
assert "fernet_key" in output_str
assert "< hidden >" in output_str
assert "sql_alchemy_conn" in output_str
| {
"repo_id": "apache/airflow",
"file_path": "airflow-ctl/tests/airflow_ctl/ctl/commands/test_config_command.py",
"license": "Apache License 2.0",
"lines": 378,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/keycloak/src/airflow/providers/keycloak/auth_manager/cli/commands.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import json
import logging
from enum import Enum
from typing import get_args
from keycloak import KeycloakAdmin, KeycloakError
from keycloak.exceptions import KeycloakGetError, KeycloakPostError, raise_error_from_response
from airflow.api_fastapi.auth.managers.base_auth_manager import ResourceMethod
from airflow.api_fastapi.common.types import MenuItem
from airflow.providers.common.compat.sdk import conf
from airflow.providers.keycloak.auth_manager.cli.utils import dry_run_message_wrap, dry_run_preview
from airflow.providers.keycloak.auth_manager.constants import (
CONF_CLIENT_ID_KEY,
CONF_REALM_KEY,
CONF_SECTION_NAME,
CONF_SERVER_URL_KEY,
)
from airflow.providers.keycloak.auth_manager.resources import KeycloakResource
from airflow.utils import cli as cli_utils
from airflow.utils.providers_configuration_loader import providers_configuration_loaded
try:
from airflow.api_fastapi.auth.managers.base_auth_manager import ExtendedResourceMethod
except ImportError:
# Fallback for older Airflow versions where ExtendedResourceMethod doesn't exist
from airflow.api_fastapi.auth.managers.base_auth_manager import (
ResourceMethod as ExtendedResourceMethod, # type: ignore[assignment]
)
log = logging.getLogger(__name__)
TEAM_SCOPED_RESOURCE_NAMES = {
KeycloakResource.CONNECTION.value,
KeycloakResource.DAG.value,
KeycloakResource.POOL.value,
KeycloakResource.TEAM.value,
KeycloakResource.VARIABLE.value,
}
GLOBAL_SCOPED_RESOURCE_NAMES = {
KeycloakResource.ASSET.value,
KeycloakResource.ASSET_ALIAS.value,
KeycloakResource.CONFIGURATION.value,
}
TEAM_MENU_ITEMS = {
MenuItem.DAGS,
MenuItem.ASSETS,
MenuItem.DOCS,
}
TEAM_ADMIN_MENU_ITEMS = TEAM_MENU_ITEMS | {
MenuItem.CONNECTIONS,
MenuItem.POOLS,
MenuItem.VARIABLES,
MenuItem.XCOMS,
}
TEAM_ROLE_NAMES = ("Viewer", "User", "Op", "Admin")
SUPER_ADMIN_ROLE_NAME = "SuperAdmin"
def _get_resource_methods() -> list[str]:
"""
Get list of resource method values.
Provides backwards compatibility for Airflow <3.2 where ResourceMethod
was a Literal type, and Airflow >=3.2 where it's an Enum.
"""
if isinstance(ResourceMethod, type) and issubclass(ResourceMethod, Enum):
return [method.value for method in ResourceMethod]
return list(get_args(ResourceMethod))
def _get_extended_resource_methods() -> list[str]:
"""
Get list of extended resource method values.
Provides backwards compatibility for Airflow <3.2 where ExtendedResourceMethod
was a Literal type, and Airflow >=3.2 where it's an Enum.
"""
if isinstance(ExtendedResourceMethod, type) and issubclass(ExtendedResourceMethod, Enum):
return [method.value for method in ExtendedResourceMethod]
return list(get_args(ExtendedResourceMethod))
@cli_utils.action_cli
@providers_configuration_loaded
@dry_run_message_wrap
def create_scopes_command(args):
"""Create Keycloak auth manager scopes in Keycloak."""
client = _get_client(args)
client_uuid = _get_client_uuid(args)
_create_scopes(client, client_uuid, _dry_run=args.dry_run)
@cli_utils.action_cli
@providers_configuration_loaded
@dry_run_message_wrap
def create_resources_command(args):
"""Create Keycloak auth manager resources in Keycloak."""
client = _get_client(args)
client_uuid = _get_client_uuid(args)
teams = _parse_teams(args.teams)
_ensure_multi_team_enabled(teams=teams, command_name="create-resources")
_create_resources(client, client_uuid, teams=teams, _dry_run=args.dry_run)
@cli_utils.action_cli
@providers_configuration_loaded
@dry_run_message_wrap
def create_permissions_command(args):
"""Create Keycloak auth manager permissions in Keycloak."""
client = _get_client(args)
client_uuid = _get_client_uuid(args)
teams = _parse_teams(args.teams)
_ensure_multi_team_enabled(teams=teams, command_name="create-permissions")
if teams:
# Role policies are only needed for team-scoped (group+role) authorization.
for role_name in TEAM_ROLE_NAMES:
_ensure_role_policy(client, client_uuid, role_name, _dry_run=args.dry_run)
_ensure_role_policy(client, client_uuid, SUPER_ADMIN_ROLE_NAME, _dry_run=args.dry_run)
_create_permissions(client, client_uuid, teams=teams, _dry_run=args.dry_run)
@cli_utils.action_cli
@providers_configuration_loaded
@dry_run_message_wrap
def create_all_command(args):
"""Create all Keycloak auth manager entities in Keycloak."""
client = _get_client(args)
client_uuid = _get_client_uuid(args)
teams = _parse_teams(args.teams)
_ensure_multi_team_enabled(teams=teams, command_name="create-all")
_create_scopes(client, client_uuid, _dry_run=args.dry_run)
_create_resources(client, client_uuid, teams=teams, _dry_run=args.dry_run)
_create_group_membership_mapper(client, client_uuid, _dry_run=args.dry_run)
if teams:
# Role policies are only needed for team-scoped (group+role) authorization.
for role_name in TEAM_ROLE_NAMES:
_ensure_role_policy(client, client_uuid, role_name, _dry_run=args.dry_run)
_ensure_role_policy(client, client_uuid, SUPER_ADMIN_ROLE_NAME, _dry_run=args.dry_run)
_create_permissions(client, client_uuid, teams=teams, _dry_run=args.dry_run)
def _get_client(args):
server_url = conf.get(CONF_SECTION_NAME, CONF_SERVER_URL_KEY)
realm = conf.get(CONF_SECTION_NAME, CONF_REALM_KEY)
return KeycloakAdmin(
server_url=server_url,
username=args.username,
password=args.password,
realm_name=realm,
user_realm_name=args.user_realm,
client_id=args.client_id,
verify=True,
)
def _get_client_uuid(args):
client = _get_client(args)
clients = client.get_clients()
client_id = conf.get(CONF_SECTION_NAME, CONF_CLIENT_ID_KEY)
matches = [client for client in clients if client["clientId"] == client_id]
if not matches:
raise ValueError(f"Client with ID='{client_id}' not found in realm '{client.realm_name}'")
return matches[0]["id"]
def _create_group_membership_mapper(
client: KeycloakAdmin, client_uuid: str, *, _dry_run: bool = False
) -> None:
realm = client.connection.realm_name
url = f"admin/realms/{realm}/clients/{client_uuid}/protocol-mappers/models"
data_raw = client.connection.raw_get(url)
mappers = json.loads(data_raw.text)
for mapper in mappers:
if mapper.get("name") == "groups":
return
if mapper.get("protocolMapper") == "oidc-group-membership-mapper":
if mapper.get("config", {}).get("claim.name") == "groups":
return
if _dry_run:
print("Would create protocol mapper 'groups'.")
return
payload = {
"name": "groups",
"protocol": "openid-connect",
"protocolMapper": "oidc-group-membership-mapper",
"consentRequired": False,
"config": {
"full.path": "false",
"id.token.claim": "false",
"access.token.claim": "true",
"userinfo.token.claim": "false",
"claim.name": "groups",
"jsonType.label": "String",
},
}
data_raw = client.connection.raw_post(url, data=json.dumps(payload), max=-1)
raise_error_from_response(data_raw, KeycloakPostError, expected_codes=[201])
def _get_scopes_to_create() -> list[dict]:
"""Get the list of scopes to be created."""
scopes = [{"name": method} for method in _get_resource_methods()]
scopes.extend([{"name": "MENU"}, {"name": "LIST"}])
return scopes
def _parse_teams(teams: str | None) -> list[str]:
if not teams:
return []
return [team.strip() for team in teams.split(",") if team.strip()]
def _ensure_multi_team_enabled(*, teams: list[str], command_name: str) -> None:
if not teams:
return
if not conf.getboolean("core", "multi_team", fallback=False):
raise SystemExit(f"{command_name} requires core.multi_team=True when --teams is used.")
def _preview_scopes(*args, **kwargs):
"""Preview scopes that would be created."""
scopes = _get_scopes_to_create()
print("Scopes to be created:")
for scope in scopes:
print(f" - {scope['name']}")
print()
@dry_run_preview(_preview_scopes)
def _create_scopes(client: KeycloakAdmin, client_uuid: str, *, _dry_run: bool = False):
"""Create Keycloak scopes."""
scopes = _get_scopes_to_create()
for scope in scopes:
client.create_client_authz_scopes(client_id=client_uuid, payload=scope)
print("Scopes created successfully.")
def _get_resources_to_create(
client: KeycloakAdmin,
client_uuid: str,
teams: list[str],
) -> tuple[list[tuple[str, list[dict]]], list[tuple[str, list[dict]]]]:
"""
Get the list of resources to be created.
Returns a tuple of (standard_resources, menu_resources).
Each is a list of tuples (resource_name, scopes_list).
"""
all_scopes = client.get_client_authz_scopes(client_uuid)
scopes = [
{"id": scope["id"], "name": scope["name"]}
for scope in all_scopes
if scope["name"] in ["GET", "POST", "PUT", "DELETE", "LIST"]
]
menu_scopes = [
{"id": scope["id"], "name": scope["name"]} for scope in all_scopes if scope["name"] == "MENU"
]
standard_resources = [(resource.value, scopes) for resource in KeycloakResource]
if teams:
existing = {resource_name for resource_name, _ in standard_resources}
for team in teams:
for resource_name in TEAM_SCOPED_RESOURCE_NAMES:
name = f"{resource_name}:{team}"
if name in existing:
continue
standard_resources.append((name, scopes))
existing.add(name)
menu_resources = [(item.value, menu_scopes) for item in MenuItem]
return standard_resources, menu_resources
def _preview_resources(client: KeycloakAdmin, client_uuid: str, teams: list[str]):
"""Preview resources that would be created."""
standard_resources, menu_resources = _get_resources_to_create(client, client_uuid, teams=teams)
print("Resources to be created:")
if standard_resources:
for resource_name, resource_scopes in standard_resources:
actual_scope_names = ", ".join([s["name"] for s in resource_scopes])
print(f" - {resource_name} (scopes: {actual_scope_names})")
print("\nMenu item resources to be created:")
for resource_name, resource_scopes in menu_resources:
actual_scope_names = ", ".join([s["name"] for s in resource_scopes])
print(f" - {resource_name} (scopes: {actual_scope_names})")
print()
@dry_run_preview(_preview_resources)
def _create_resources(client: KeycloakAdmin, client_uuid: str, *, teams: list[str], _dry_run: bool = False):
"""Create Keycloak resources."""
standard_resources, menu_resources = _get_resources_to_create(client, client_uuid, teams=teams)
for resource_name, scopes in standard_resources:
client.create_client_authz_resource(
client_id=client_uuid,
payload={
"name": resource_name,
"scopes": scopes,
},
skip_exists=True,
)
# Create menu item resources
for resource_name, scopes in menu_resources:
client.create_client_authz_resource(
client_id=client_uuid,
payload={
"name": resource_name,
"scopes": scopes,
},
skip_exists=True,
)
print("Resources created successfully.")
def _get_permissions_to_create(
client: KeycloakAdmin,
client_uuid: str,
teams: list[str],
*,
include_global_admin: bool = True,
) -> list[dict]:
"""
Get the actual permissions to be created with filtered scopes/resources.
Returns a list of permission descriptors with actual filtered data.
"""
if not teams:
perm_configs = [
{
"name": "ReadOnly",
"type": "scope-based",
"scope_names": ["GET", "MENU", "LIST"],
},
{
"name": "Admin",
"type": "scope-based",
"scope_names": _get_extended_resource_methods() + ["LIST"],
},
{
"name": "User",
"type": "resource-based",
"resources": [KeycloakResource.DAG.value, KeycloakResource.ASSET.value],
},
{
"name": "Op",
"type": "resource-based",
"resources": [
KeycloakResource.CONNECTION.value,
KeycloakResource.POOL.value,
KeycloakResource.VARIABLE.value,
KeycloakResource.BACKFILL.value,
],
},
]
else:
perm_configs = []
for team in teams:
perm_configs.extend(
[
{
"name": f"Admin-{team}",
"type": "scope-based",
"scope_names": _get_extended_resource_methods() + ["LIST"],
"resources": [f"{resource}:{team}" for resource in TEAM_SCOPED_RESOURCE_NAMES],
},
{
"name": f"ReadOnly-{team}",
"type": "scope-based",
"scope_names": ["GET", "LIST"],
"resources": [
f"{KeycloakResource.DAG.value}:{team}",
f"{KeycloakResource.TEAM.value}:{team}",
],
},
{
"name": f"User-{team}",
"type": "resource-based",
"resources": [
f"{KeycloakResource.DAG.value}:{team}",
],
},
{
"name": f"Op-{team}",
"type": "resource-based",
"resources": [
f"{KeycloakResource.CONNECTION.value}:{team}",
f"{KeycloakResource.POOL.value}:{team}",
f"{KeycloakResource.VARIABLE.value}:{team}",
],
},
]
)
if include_global_admin:
perm_configs.append(
{
"name": "Admin",
"type": "scope-based",
"scope_names": _get_extended_resource_methods() + ["LIST"],
"resources": [
f"{resource}:{team}" for team in teams for resource in TEAM_SCOPED_RESOURCE_NAMES
],
}
)
perm_configs.append(
{
"name": "GlobalList",
"type": "scope-based",
"scope_names": ["LIST"],
"resources": list(TEAM_SCOPED_RESOURCE_NAMES) + list(GLOBAL_SCOPED_RESOURCE_NAMES),
}
)
perm_configs.append(
{
"name": "ViewAccess",
"type": "scope-based",
"scope_names": ["GET"],
"resources": [KeycloakResource.VIEW.value],
}
)
perm_configs.append(
{
"name": "MenuAccess",
"type": "scope-based",
"scope_names": ["MENU"],
"resources": [item.value for item in MenuItem],
}
)
all_scopes = client.get_client_authz_scopes(client_uuid)
all_resources = client.get_client_authz_resources(client_uuid)
result = []
for config in perm_configs:
perm = {"name": config["name"], "type": config["type"]}
if config["type"] == "scope-based":
# Filter to get actual scope IDs that exist and match
filtered_scope_ids = [s["id"] for s in all_scopes if s["name"] in config["scope_names"]]
filtered_scope_names = [s["name"] for s in all_scopes if s["name"] in config["scope_names"]]
perm["scope_ids"] = filtered_scope_ids
perm["scope_names"] = filtered_scope_names
if "resources" in config:
filtered_resource_ids = [r["_id"] for r in all_resources if r["name"] in config["resources"]]
filtered_resource_names = [
r["name"] for r in all_resources if r["name"] in config["resources"]
]
perm["resource_ids"] = filtered_resource_ids
perm["resource_names"] = filtered_resource_names
else: # resource-based
# Filter to get actual resource IDs that exist and match
filtered_resource_ids = [r["_id"] for r in all_resources if r["name"] in config["resources"]]
filtered_resource_names = [r["name"] for r in all_resources if r["name"] in config["resources"]]
perm["resource_ids"] = filtered_resource_ids
perm["resource_names"] = filtered_resource_names
result.append(perm)
return result
def _preview_permissions(client: KeycloakAdmin, client_uuid: str, teams: list[str]):
"""Preview permissions that would be created."""
permissions = _get_permissions_to_create(client, client_uuid, teams=teams)
print("Permissions to be created:")
for perm in permissions:
if perm["type"] == "scope-based":
scope_names = ", ".join(perm["scope_names"])
resource_names = ", ".join(perm.get("resource_names", []))
resource_suffix = f", resources: {resource_names}" if resource_names else ""
print(f" - {perm['name']} (type: scope-based, scopes: {scope_names}{resource_suffix})")
else: # resource-based
resource_names = ", ".join(perm["resource_names"])
print(f" - {perm['name']} (type: resource-based, resources: {resource_names})")
print()
@dry_run_preview(_preview_permissions)
def _create_permissions(
client: KeycloakAdmin,
client_uuid: str,
*,
teams: list[str],
include_global_admin: bool = True,
_dry_run: bool = False,
):
"""Create Keycloak permissions."""
permissions = _get_permissions_to_create(
client, client_uuid, teams=teams, include_global_admin=include_global_admin
)
for perm in permissions:
if perm["type"] == "scope-based":
_create_scope_based_permission(
client, client_uuid, perm["name"], perm["scope_ids"], perm.get("resource_ids", [])
)
else: # resource-based
_create_resource_based_permission(client, client_uuid, perm["name"], perm["resource_ids"])
print("Permissions created successfully.")
def _create_scope_based_permission(
client: KeycloakAdmin,
client_uuid: str,
name: str,
scope_ids: list[str],
resource_ids: list[str] | None = None,
decision_strategy: str = "UNANIMOUS",
):
payload = {
"name": name,
"type": "scope",
"logic": "POSITIVE",
"decisionStrategy": decision_strategy,
"scopes": scope_ids,
}
if resource_ids:
payload["resources"] = resource_ids
try:
client.create_client_authz_scope_permission(
client_id=client_uuid,
payload=payload,
)
except KeycloakError as e:
if e.response_body:
error = json.loads(e.response_body.decode("utf-8"))
if error.get("error_description") == "Conflicting policy":
print(f"Policy creation skipped. {error.get('error')}")
def _create_resource_based_permission(
client: KeycloakAdmin, client_uuid: str, name: str, resource_ids: list[str]
):
payload = {
"name": name,
"type": "scope",
"logic": "POSITIVE",
"decisionStrategy": "UNANIMOUS",
"resources": resource_ids,
}
client.create_client_authz_resource_based_permission(
client_id=client_uuid,
payload=payload,
skip_exists=True,
)
def _ensure_scope_permission(
client: KeycloakAdmin,
client_uuid: str,
*,
name: str,
scope_names: list[str],
resource_names: list[str],
decision_strategy: str = "UNANIMOUS",
_dry_run: bool = False,
) -> None:
if _dry_run:
print(f"Would create scope permission '{name}'.")
return
permissions = client.get_client_authz_permissions(client_uuid)
if any(perm.get("name") == name for perm in permissions):
return
scopes = client.get_client_authz_scopes(client_uuid)
resources = client.get_client_authz_resources(client_uuid)
scope_ids = [s["id"] for s in scopes if s["name"] in scope_names]
resource_ids = [r["_id"] for r in resources if r["name"] in resource_names]
_create_scope_based_permission(
client,
client_uuid,
name,
scope_ids,
resource_ids,
decision_strategy=decision_strategy,
)
def _update_admin_permission_resources(
client: KeycloakAdmin, client_uuid: str, *, _dry_run: bool = False
) -> None:
if _dry_run:
print("Would update permission 'Admin' with team-scoped and global resources.")
return
permissions = client.get_client_authz_permissions(client_uuid)
match = next((perm for perm in permissions if perm.get("name") == "Admin"), None)
if not match:
return
permission_id = match["id"]
scopes = client.get_client_authz_scopes(client_uuid)
resources = client.get_client_authz_resources(client_uuid)
scope_names = _get_extended_resource_methods() + ["LIST"]
scope_ids = [s["id"] for s in scopes if s["name"] in scope_names]
resource_ids = [
r["_id"]
for r in resources
if any(r["name"].startswith(f"{resource}:") for resource in TEAM_SCOPED_RESOURCE_NAMES)
or r["name"] in GLOBAL_SCOPED_RESOURCE_NAMES
]
policy_ids = _get_permission_policy_ids(client, client_uuid, permission_id)
payload = {
"id": permission_id,
"name": "Admin",
"type": "scope",
"logic": "POSITIVE",
"decisionStrategy": "UNANIMOUS",
"scopes": scope_ids,
"resources": resource_ids,
"policies": policy_ids,
}
client.update_client_authz_scope_permission(
payload=payload, client_id=client_uuid, scope_id=permission_id
)
@cli_utils.action_cli
@providers_configuration_loaded
@dry_run_message_wrap
def create_team_command(args):
"""Create team resources, permissions, and Keycloak group."""
client = _get_client(args)
client_uuid = _get_client_uuid(args)
team = args.team
_ensure_multi_team_enabled(teams=[team], command_name="create-team")
_create_resources(client, client_uuid, teams=[team], _dry_run=args.dry_run)
_create_group_membership_mapper(client, client_uuid, _dry_run=args.dry_run)
_create_permissions(client, client_uuid, teams=[team], include_global_admin=False, _dry_run=args.dry_run)
_ensure_group(client, team, _dry_run=args.dry_run)
_ensure_team_policies(client, client_uuid, team, _dry_run=args.dry_run)
_attach_team_permissions(client, client_uuid, team, _dry_run=args.dry_run)
_attach_team_menu_permissions(client, client_uuid, team, _dry_run=args.dry_run)
_attach_superadmin_permissions(client, client_uuid, team, _dry_run=args.dry_run)
_update_admin_permission_resources(client, client_uuid, _dry_run=args.dry_run)
@cli_utils.action_cli
@providers_configuration_loaded
@dry_run_message_wrap
def add_user_to_team_command(args):
"""Add a user to a Keycloak team group."""
client = _get_client(args)
team = args.team
username = args.target_username
_ensure_multi_team_enabled(teams=[team], command_name="add-user-to-team")
dry_run = getattr(args, "dry_run", False)
_ensure_group(client, team, _dry_run=dry_run)
_add_user_to_group(client, username=username, team=team, _dry_run=dry_run)
def _ensure_team_policies(
client: KeycloakAdmin, client_uuid: str, team: str, *, _dry_run: bool = False
) -> None:
_ensure_group_policy(
client,
client_uuid,
team,
_dry_run=_dry_run,
)
for role_name in TEAM_ROLE_NAMES:
_ensure_aggregate_policy(
client,
client_uuid,
_team_role_policy_name(team, role_name),
[
(_team_group_policy_name(team), "group"),
(_role_policy_name(role_name), "role"),
],
_dry_run=_dry_run,
)
def _attach_team_permissions(
client: KeycloakAdmin, client_uuid: str, team: str, *, _dry_run: bool = False
) -> None:
team_dag_resources = [
f"{KeycloakResource.DAG.value}:{team}",
]
team_readable_resources = [
f"{KeycloakResource.DAG.value}:{team}",
f"{KeycloakResource.TEAM.value}:{team}",
]
team_scoped_resources = [f"{resource}:{team}" for resource in sorted(TEAM_SCOPED_RESOURCE_NAMES)]
_attach_policy_to_scope_permission(
client,
client_uuid,
permission_name=f"ReadOnly-{team}",
policy_name=_team_role_policy_name(team, "Viewer"),
scope_names=["GET", "LIST"],
resource_names=team_readable_resources,
_dry_run=_dry_run,
)
for role_name in ("User", "Op", "Admin"):
_attach_policy_to_scope_permission(
client,
client_uuid,
permission_name=f"ReadOnly-{team}",
policy_name=_team_role_policy_name(team, role_name),
scope_names=["GET", "LIST"],
resource_names=team_readable_resources,
_dry_run=_dry_run,
)
_attach_policy_to_scope_permission(
client,
client_uuid,
permission_name=f"Admin-{team}",
policy_name=_team_role_policy_name(team, "Admin"),
scope_names=_get_extended_resource_methods() + ["LIST"],
resource_names=team_scoped_resources,
_dry_run=_dry_run,
)
_attach_policy_to_resource_permission(
client,
client_uuid,
permission_name=f"User-{team}",
policy_name=_team_role_policy_name(team, "User"),
resource_names=team_dag_resources,
_dry_run=_dry_run,
)
_attach_policy_to_resource_permission(
client,
client_uuid,
permission_name=f"Op-{team}",
policy_name=_team_role_policy_name(team, "Op"),
resource_names=[
f"{KeycloakResource.CONNECTION.value}:{team}",
f"{KeycloakResource.POOL.value}:{team}",
f"{KeycloakResource.VARIABLE.value}:{team}",
],
_dry_run=_dry_run,
)
for role_name in TEAM_ROLE_NAMES:
_attach_policy_to_scope_permission(
client,
client_uuid,
permission_name="ViewAccess",
policy_name=_team_role_policy_name(team, role_name),
scope_names=["GET"],
resource_names=[KeycloakResource.VIEW.value],
decision_strategy="AFFIRMATIVE",
_dry_run=_dry_run,
)
_attach_global_list_permissions(client, client_uuid, _dry_run=_dry_run)
def _attach_global_list_permissions(
client: KeycloakAdmin, client_uuid: str, *, _dry_run: bool = False
) -> None:
resource_names = list(TEAM_SCOPED_RESOURCE_NAMES) + list(GLOBAL_SCOPED_RESOURCE_NAMES)
for role_name in (*TEAM_ROLE_NAMES, SUPER_ADMIN_ROLE_NAME):
_attach_policy_to_scope_permission(
client,
client_uuid,
permission_name="GlobalList",
policy_name=_role_policy_name(role_name),
scope_names=["LIST"],
resource_names=resource_names,
decision_strategy="AFFIRMATIVE",
_dry_run=_dry_run,
)
def _attach_team_menu_permissions(
client: KeycloakAdmin, client_uuid: str, team: str, *, _dry_run: bool = False
) -> None:
menu_permission_name = f"MenuAccess-{team}"
menu_admin_permission_name = f"MenuAccess-Admin-{team}"
team_menu_resources = [item.value for item in sorted(TEAM_MENU_ITEMS, key=lambda item: item.value)]
team_admin_menu_resources = [
item.value for item in sorted(TEAM_ADMIN_MENU_ITEMS, key=lambda item: item.value)
]
_ensure_scope_permission(
client,
client_uuid,
name=menu_permission_name,
scope_names=["MENU"],
resource_names=team_menu_resources,
decision_strategy="AFFIRMATIVE",
_dry_run=_dry_run,
)
_ensure_scope_permission(
client,
client_uuid,
name=menu_admin_permission_name,
scope_names=["MENU"],
resource_names=team_admin_menu_resources,
decision_strategy="AFFIRMATIVE",
_dry_run=_dry_run,
)
for role_name in TEAM_ROLE_NAMES:
_attach_policy_to_scope_permission(
client,
client_uuid,
permission_name=menu_permission_name,
policy_name=_team_role_policy_name(team, role_name),
scope_names=["MENU"],
resource_names=team_menu_resources,
decision_strategy="AFFIRMATIVE",
_dry_run=_dry_run,
)
_attach_policy_to_scope_permission(
client,
client_uuid,
permission_name=menu_admin_permission_name,
policy_name=_team_role_policy_name(team, "Admin"),
scope_names=["MENU"],
resource_names=team_admin_menu_resources,
decision_strategy="AFFIRMATIVE",
_dry_run=_dry_run,
)
def _attach_superadmin_permissions(
client: KeycloakAdmin, client_uuid: str, team: str, *, _dry_run: bool = False
) -> None:
team_scoped_resources = [f"{resource}:{team}" for resource in sorted(TEAM_SCOPED_RESOURCE_NAMES)]
_attach_policy_to_scope_permission(
client,
client_uuid,
permission_name="Admin",
policy_name=_role_policy_name(SUPER_ADMIN_ROLE_NAME),
scope_names=_get_extended_resource_methods() + ["LIST"],
resource_names=team_scoped_resources,
_dry_run=_dry_run,
)
_attach_policy_to_scope_permission(
client,
client_uuid,
permission_name="ViewAccess",
policy_name=_role_policy_name(SUPER_ADMIN_ROLE_NAME),
scope_names=["GET"],
resource_names=[KeycloakResource.VIEW.value],
decision_strategy="AFFIRMATIVE",
_dry_run=_dry_run,
)
_attach_policy_to_scope_permission(
client,
client_uuid,
permission_name="MenuAccess",
policy_name=_role_policy_name(SUPER_ADMIN_ROLE_NAME),
scope_names=["MENU"],
resource_names=[item.value for item in sorted(MenuItem, key=lambda item: item.value)],
decision_strategy="AFFIRMATIVE",
_dry_run=_dry_run,
)
def _team_group_name(team: str) -> str:
return team
def _team_group_policy_name(team: str) -> str:
return f"Allow-Team-{team}"
def _role_policy_name(role_name: str) -> str:
return f"Allow-{role_name}"
def _team_role_policy_name(team: str, role_name: str) -> str:
return f"Allow-{role_name}-{team}"
def _ensure_group(client: KeycloakAdmin, team: str, *, _dry_run: bool = False) -> dict | None:
group_name = _team_group_name(team)
group_path = f"/{group_name}"
try:
group = client.get_group_by_path(group_path)
if group:
return group
except KeycloakError:
pass
if _dry_run:
print(f"Would create group '{group_name}'.")
return None
group_id = client.create_group(payload={"name": group_name}, skip_exists=True)
if not group_id:
group = client.get_group_by_path(group_path)
return group
return {"id": group_id, "name": group_name, "path": group_path}
def _ensure_group_policy(
client: KeycloakAdmin, client_uuid: str, team: str, *, _dry_run: bool = False
) -> None:
group_name = _team_group_name(team)
group_path = f"/{group_name}"
policy_name = _team_group_policy_name(team)
try:
group = client.get_group_by_path(group_path)
except KeycloakError:
group = None
if not group:
raise ValueError(f"Group '{group_name}' not found.")
if _get_policy_id(client, client_uuid, policy_name, policy_type="group"):
return
payload = {
"name": policy_name,
"type": "group",
"logic": "POSITIVE",
"decisionStrategy": "UNANIMOUS",
"groups": [{"id": group["id"], "path": group_path}],
}
if _dry_run:
print(f"Would create group policy '{policy_name}'.")
return
url = _policy_url(client, client_uuid, policy_type="group")
data_raw = client.connection.raw_post(url, data=json.dumps(payload), max=-1, permission=False)
try:
raise_error_from_response(data_raw, KeycloakPostError, expected_codes=[201])
except KeycloakPostError as exc:
if exc.response_body:
error = json.loads(exc.response_body.decode("utf-8"))
if "Conflicting policy" in error.get("error_description", ""):
return
raise
def _policy_url(client: KeycloakAdmin, client_uuid: str, *, policy_type: str | None = None) -> str:
realm = client.connection.realm_name
if policy_type:
return f"admin/realms/{realm}/clients/{client_uuid}/authz/resource-server/policy/{policy_type}"
return f"admin/realms/{realm}/clients/{client_uuid}/authz/resource-server/policy"
def _get_policy_id(
client: KeycloakAdmin, client_uuid: str, policy_name: str, *, policy_type: str | None = None
) -> str | None:
url = _policy_url(client, client_uuid, policy_type=policy_type)
data_raw = client.connection.raw_get(url)
policies = json.loads(data_raw.text)
match = next((policy for policy in policies if policy.get("name") == policy_name), None)
return match.get("id") if match else None
def _get_role_id(client: KeycloakAdmin, client_uuid: str, role_name: str) -> str:
try:
role = client.get_realm_role(role_name)
return role["id"]
except KeycloakGetError:
role = client.get_client_role(client_id=client_uuid, role_name=role_name)
return role["id"]
def _ensure_role_policy(
client: KeycloakAdmin, client_uuid: str, role_name: str, *, _dry_run: bool = False
) -> None:
policy_name = _role_policy_name(role_name)
if _get_policy_id(client, client_uuid, policy_name, policy_type="role"):
return
role_id = _get_role_id(client, client_uuid, role_name)
payload = {
"name": policy_name,
"type": "role",
"logic": "POSITIVE",
"decisionStrategy": "UNANIMOUS",
"roles": [{"id": role_id}],
}
if _dry_run:
print(f"Would create role policy '{policy_name}'.")
return
try:
client.create_client_authz_role_based_policy(client_id=client_uuid, payload=payload, skip_exists=True)
except KeycloakError as exc:
if exc.response_body:
error = json.loads(exc.response_body.decode("utf-8"))
if "Conflicting policy" in error.get("error_description", ""):
return
raise
def _ensure_aggregate_policy(
client: KeycloakAdmin,
client_uuid: str,
policy_name: str,
policy_refs: list[tuple[str, str | None]],
*,
_dry_run: bool = False,
) -> None:
if _get_policy_id(client, client_uuid, policy_name, policy_type="aggregate"):
return
policy_ids = []
for name, policy_type in policy_refs:
# Aggregate policy enforces group+role; missing inputs should fail fast.
policy_id = _get_policy_id(client, client_uuid, name, policy_type=policy_type)
if not policy_id:
policy_label = f"{policy_type} policy '{name}'" if policy_type else f"policy '{name}'"
raise ValueError(f"{policy_label} not found.")
policy_ids.append(policy_id)
payload = {
"name": policy_name,
"type": "aggregate",
"logic": "POSITIVE",
"decisionStrategy": "UNANIMOUS",
"policies": policy_ids,
}
if _dry_run:
print(f"Would create aggregate policy '{policy_name}'.")
return
url = _policy_url(client, client_uuid, policy_type="aggregate")
data_raw = client.connection.raw_post(url, data=json.dumps(payload), max=-1, permission=False)
try:
raise_error_from_response(data_raw, KeycloakPostError, expected_codes=[201])
except KeycloakPostError as exc:
if exc.response_body:
error = json.loads(exc.response_body.decode("utf-8"))
if "Conflicting policy" in error.get("error_description", ""):
return
raise
def _attach_policy_to_scope_permission(
client: KeycloakAdmin,
client_uuid: str,
*,
permission_name: str,
policy_name: str,
scope_names: list[str],
resource_names: list[str],
decision_strategy: str = "UNANIMOUS",
_dry_run: bool = False,
) -> None:
if _dry_run:
print(f"Would attach policy '{policy_name}' to permission '{permission_name}'.")
return
permissions = client.get_client_authz_permissions(client_uuid)
match = next((perm for perm in permissions if perm.get("name") == permission_name), None)
if not match:
raise ValueError(f"Permission '{permission_name}' not found.")
permission_id = match["id"]
policy_id = _get_policy_id(client, client_uuid, policy_name)
if not policy_id:
raise ValueError(f"Policy '{policy_name}' not found.")
existing_policy_ids = _get_permission_policy_ids(client, client_uuid, permission_id)
policy_ids = list(dict.fromkeys([*existing_policy_ids, policy_id]))
scopes = client.get_client_authz_scopes(client_uuid)
resources = client.get_client_authz_resources(client_uuid)
scope_ids = [s["id"] for s in scopes if s["name"] in scope_names]
resource_ids = [r["_id"] for r in resources if r["name"] in resource_names]
payload = {
"id": permission_id,
"name": permission_name,
"type": "scope",
"logic": "POSITIVE",
"decisionStrategy": decision_strategy,
"scopes": scope_ids,
"resources": resource_ids,
"policies": policy_ids,
}
client.update_client_authz_scope_permission(
payload=payload, client_id=client_uuid, scope_id=permission_id
)
def _get_permission_policy_ids(client: KeycloakAdmin, client_uuid: str, permission_id: str) -> list[str]:
realm = client.connection.realm_name
url = (
f"admin/realms/{realm}/clients/{client_uuid}/authz/resource-server/permission/scope/"
f"{permission_id}/associatedPolicies"
)
data_raw = client.connection.raw_get(url)
policies = json.loads(data_raw.text)
return [policy.get("id") for policy in policies if policy.get("id")]
def _attach_policy_to_resource_permission(
client: KeycloakAdmin,
client_uuid: str,
*,
permission_name: str,
policy_name: str,
resource_names: list[str],
decision_strategy: str = "UNANIMOUS",
_dry_run: bool = False,
) -> None:
if _dry_run:
print(f"Would attach policy '{policy_name}' to permission '{permission_name}'.")
return
permissions = client.get_client_authz_permissions(client_uuid)
match = next((perm for perm in permissions if perm.get("name") == permission_name), None)
if not match:
raise ValueError(f"Permission '{permission_name}' not found.")
permission_id = match["id"]
policy_id = _get_policy_id(client, client_uuid, policy_name)
if not policy_id:
raise ValueError(f"Policy '{policy_name}' not found.")
existing_policy_ids = _get_resource_permission_policy_ids(client, client_uuid, permission_id)
policy_ids = list(dict.fromkeys([*existing_policy_ids, policy_id]))
resources = client.get_client_authz_resources(client_uuid)
resource_ids = [r["_id"] for r in resources if r["name"] in resource_names]
payload = {
"id": permission_id,
"name": permission_name,
"type": "resource",
"logic": "POSITIVE",
"decisionStrategy": decision_strategy,
"resources": resource_ids,
"scopes": [],
"policies": policy_ids,
}
client.update_client_authz_resource_permission(
payload=payload, client_id=client_uuid, resource_id=permission_id
)
def _get_resource_permission_policy_ids(
client: KeycloakAdmin, client_uuid: str, permission_id: str
) -> list[str]:
realm = client.connection.realm_name
url = (
f"admin/realms/{realm}/clients/{client_uuid}/authz/resource-server/permission/resource/"
f"{permission_id}/associatedPolicies"
)
data_raw = client.connection.raw_get(url)
policies = json.loads(data_raw.text)
return [policy.get("id") for policy in policies if policy.get("id")]
def _add_user_to_group(client: KeycloakAdmin, *, username: str, team: str, _dry_run: bool = False) -> None:
group_name = _team_group_name(team)
group_path = f"/{group_name}"
group = client.get_group_by_path(group_path)
if not group:
raise ValueError(f"Group '{group_name}' not found.")
users = client.get_users(query={"username": username})
user = next((u for u in users if u.get("username") == username), None)
if not user:
raise ValueError(f"User '{username}' not found.")
existing_groups = client.get_user_groups(user_id=user["id"])
if any(g.get("id") == group["id"] for g in existing_groups):
print(f"User '{username}' is already in group '{group_name}'.")
return
if _dry_run:
print(f"Would add user '{username}' to group '{group_name}'.")
return
client.group_user_add(user_id=user["id"], group_id=group["id"])
| {
"repo_id": "apache/airflow",
"file_path": "providers/keycloak/src/airflow/providers/keycloak/auth_manager/cli/commands.py",
"license": "Apache License 2.0",
"lines": 1040,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/keycloak/src/airflow/providers/keycloak/auth_manager/constants.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# Configuration keys
from __future__ import annotations
CONF_SECTION_NAME = "keycloak_auth_manager"
CONF_CLIENT_ID_KEY = "client_id"
CONF_CLIENT_SECRET_KEY = "client_secret"
CONF_REALM_KEY = "realm"
CONF_SERVER_URL_KEY = "server_url"
CONF_REQUESTS_POOL_SIZE_KEY = "requests_pool_size"
CONF_REQUESTS_RETRIES_KEY = "requests_retries"
| {
"repo_id": "apache/airflow",
"file_path": "providers/keycloak/src/airflow/providers/keycloak/auth_manager/constants.py",
"license": "Apache License 2.0",
"lines": 25,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/keycloak/tests/unit/keycloak/auth_manager/cli/test_commands.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import importlib
from unittest.mock import Mock, call, patch
import pytest
from airflow.api_fastapi.common.types import MenuItem
from airflow.cli import cli_parser
from airflow.providers.keycloak.auth_manager.cli.commands import (
TEAM_SCOPED_RESOURCE_NAMES,
_get_extended_resource_methods,
_get_resource_methods,
add_user_to_team_command,
create_all_command,
create_permissions_command,
create_resources_command,
create_scopes_command,
create_team_command,
)
from airflow.providers.keycloak.auth_manager.resources import KeycloakResource
from tests_common.test_utils.config import conf_vars
from tests_common.test_utils.version_compat import AIRFLOW_V_3_2_PLUS
@pytest.mark.db_test
class TestCommands:
@pytest.fixture(autouse=True)
def setup_parser(self):
if AIRFLOW_V_3_2_PLUS:
importlib.reload(cli_parser)
self.arg_parser = cli_parser.get_parser()
else:
with conf_vars(
{
(
"core",
"auth_manager",
): "airflow.providers.keycloak.auth_manager.keycloak_auth_manager.KeycloakAuthManager",
}
):
importlib.reload(cli_parser)
self.arg_parser = cli_parser.get_parser()
@patch("airflow.providers.keycloak.auth_manager.cli.commands._get_client")
def test_create_scopes(self, mock_get_client):
client = Mock()
mock_get_client.return_value = client
client.get_clients.return_value = [
{"id": "dummy-id", "clientId": "dummy-client"},
{"id": "test-id", "clientId": "test_client_id"},
]
params = [
"keycloak-auth-manager",
"create-scopes",
"--username",
"test",
"--password",
"test",
]
with conf_vars(
{
("keycloak_auth_manager", "client_id"): "test_client_id",
}
):
create_scopes_command(self.arg_parser.parse_args(params))
client.get_clients.assert_called_once_with()
scopes = [{"name": method} for method in _get_resource_methods()]
calls = [call(client_id="test-id", payload=scope) for scope in scopes]
client.create_client_authz_scopes.assert_has_calls(calls)
@patch("airflow.providers.keycloak.auth_manager.cli.commands._get_client")
def test_create_scopes_with_client_not_found(self, mock_get_client):
client = Mock()
mock_get_client.return_value = client
client.get_clients.return_value = [
{"id": "dummy-id", "clientId": "dummy-client"},
]
params = [
"keycloak-auth-manager",
"create-scopes",
"--username",
"test",
"--password",
"test",
]
with conf_vars(
{
("keycloak_auth_manager", "client_id"): "test_client_id",
}
):
with pytest.raises(ValueError, match="Client with ID='test_client_id' not found in realm"):
create_scopes_command(self.arg_parser.parse_args(params))
client.get_clients.assert_called_once_with()
client.create_client_authz_scopes.assert_not_called()
@patch("airflow.providers.keycloak.auth_manager.cli.commands._get_client")
def test_create_resources(self, mock_get_client):
client = Mock()
mock_get_client.return_value = client
scopes = [{"id": "1", "name": "GET"}, {"id": "2", "name": "MENU"}]
client.get_clients.return_value = [
{"id": "dummy-id", "clientId": "dummy-client"},
{"id": "test-id", "clientId": "test_client_id"},
]
client.get_client_authz_scopes.return_value = scopes
params = [
"keycloak-auth-manager",
"create-resources",
"--username",
"test",
"--password",
"test",
]
with conf_vars(
{
("keycloak_auth_manager", "client_id"): "test_client_id",
("core", "multi_team"): "True",
}
):
create_resources_command(self.arg_parser.parse_args(params))
client.get_clients.assert_called_once_with()
client.get_client_authz_scopes.assert_called_once_with("test-id")
calls = []
for resource in KeycloakResource:
calls.append(
call(
client_id="test-id",
payload={
"name": resource.value,
"scopes": [{"id": "1", "name": "GET"}],
},
skip_exists=True,
)
)
client.create_client_authz_resource.assert_has_calls(calls)
calls = []
for item in MenuItem:
calls.append(
call(
client_id="test-id",
payload={
"name": item.value,
"scopes": [{"id": "2", "name": "MENU"}],
},
skip_exists=True,
)
)
client.create_client_authz_resource.assert_has_calls(calls)
@patch("airflow.providers.keycloak.auth_manager.cli.commands._get_client")
def test_create_resources_with_teams(self, mock_get_client):
client = Mock()
mock_get_client.return_value = client
scopes = [{"id": "1", "name": "GET"}, {"id": "2", "name": "MENU"}]
client.get_clients.return_value = [
{"id": "dummy-id", "clientId": "dummy-client"},
{"id": "test-id", "clientId": "test_client_id"},
]
client.get_client_authz_scopes.return_value = scopes
params = [
"keycloak-auth-manager",
"create-resources",
"--username",
"test",
"--password",
"test",
"--teams",
"team-a",
]
with conf_vars(
{
("keycloak_auth_manager", "client_id"): "test_client_id",
("core", "multi_team"): "True",
}
):
create_resources_command(self.arg_parser.parse_args(params))
expected_team_resources = {f"{name}:team-a" for name in TEAM_SCOPED_RESOURCE_NAMES}
created_resource_names = {
call.kwargs["payload"]["name"]
for call in client.create_client_authz_resource.mock_calls
if "payload" in call.kwargs
}
assert expected_team_resources.issubset(created_resource_names)
@patch("airflow.providers.keycloak.auth_manager.cli.commands._get_client")
def test_create_permissions(
self,
mock_get_client,
):
client = Mock()
mock_get_client.return_value = client
scopes = [{"id": "1", "name": "GET"}, {"id": "2", "name": "MENU"}, {"id": "3", "name": "LIST"}]
resources = [
{"_id": "r1", "name": "Dag"},
{"_id": "r2", "name": "Asset"},
{"_id": "r3", "name": "Connection"},
]
client.get_clients.return_value = [
{"id": "dummy-id", "clientId": "dummy-client"},
{"id": "test-id", "clientId": "test_client_id"},
]
client.get_client_authz_scopes.return_value = scopes
client.get_client_authz_resources.return_value = resources
client.connection = Mock()
client.connection.raw_get = Mock(return_value=Mock(text="[]"))
client.connection.realm_name = "test-realm"
client.get_realm_role.return_value = {"id": "role-id"}
params = [
"keycloak-auth-manager",
"create-permissions",
"--username",
"test",
"--password",
"test",
]
with conf_vars(
{
("keycloak_auth_manager", "client_id"): "test_client_id",
("core", "multi_team"): "True",
}
):
create_permissions_command(self.arg_parser.parse_args(params))
client.get_clients.assert_called_once_with()
client.get_client_authz_scopes.assert_called_once_with("test-id")
client.get_client_authz_resources.assert_called_once_with("test-id")
# Verify scope-based permissions are created with correct payloads
scope_calls = [
call(
client_id="test-id",
payload={
"name": "ReadOnly",
"type": "scope",
"logic": "POSITIVE",
"decisionStrategy": "UNANIMOUS",
"scopes": ["1", "2", "3"], # GET, MENU, LIST
},
),
call(
client_id="test-id",
payload={
"name": "Admin",
"type": "scope",
"logic": "POSITIVE",
"decisionStrategy": "UNANIMOUS",
"scopes": ["1", "2", "3"], # GET, MENU, LIST (only these exist in mock)
},
),
]
client.create_client_authz_scope_permission.assert_has_calls(scope_calls, any_order=True)
# Verify resource-based permissions are created with correct payloads
resource_calls = [
call(
client_id="test-id",
payload={
"name": "User",
"type": "scope",
"logic": "POSITIVE",
"decisionStrategy": "UNANIMOUS",
"resources": ["r1", "r2"], # Dag, Asset
},
skip_exists=True,
),
call(
client_id="test-id",
payload={
"name": "Op",
"type": "scope",
"logic": "POSITIVE",
"decisionStrategy": "UNANIMOUS",
"resources": ["r3"], # Connection
},
skip_exists=True,
),
]
client.create_client_authz_resource_based_permission.assert_has_calls(resource_calls, any_order=True)
@patch("airflow.providers.keycloak.auth_manager.cli.commands._get_client")
def test_create_permissions_with_teams(self, mock_get_client):
client = Mock()
mock_get_client.return_value = client
scopes = [
{"id": "1", "name": "GET"},
{"id": "2", "name": "MENU"},
{"id": "3", "name": "LIST"},
]
resources = [
{"_id": "r1", "name": "Dag:team-a"},
{"_id": "r2", "name": "Connection:team-a"},
{"_id": "r3", "name": "Pool:team-a"},
{"_id": "r4", "name": "Variable:team-a"},
{"_id": "r5", "name": "View"},
{"_id": "r6", "name": "Dag"},
{"_id": "r7", "name": "Connection"},
{"_id": "r8", "name": "Pool"},
{"_id": "r9", "name": "Variable"},
{"_id": "r10", "name": "Asset"},
{"_id": "r11", "name": "AssetAlias"},
{"_id": "r12", "name": "Configuration"},
]
client.get_clients.return_value = [
{"id": "dummy-id", "clientId": "dummy-client"},
{"id": "test-id", "clientId": "test_client_id"},
]
client.get_client_authz_scopes.return_value = scopes
client.get_client_authz_resources.return_value = resources
client.connection = Mock()
client.connection.raw_get = Mock(return_value=Mock(text="[]"))
client.connection.realm_name = "test-realm"
client.get_realm_role.return_value = {"id": "role-id"}
params = [
"keycloak-auth-manager",
"create-permissions",
"--username",
"test",
"--password",
"test",
"--teams",
"team-a",
]
with conf_vars(
{
("keycloak_auth_manager", "client_id"): "test_client_id",
("core", "multi_team"): "True",
}
):
create_permissions_command(self.arg_parser.parse_args(params))
client.create_client_authz_scope_permission.assert_any_call(
client_id="test-id",
payload={
"name": "Admin-team-a",
"type": "scope",
"logic": "POSITIVE",
"decisionStrategy": "UNANIMOUS",
"scopes": ["1", "2", "3"],
"resources": ["r1", "r2", "r3", "r4"],
},
)
client.create_client_authz_scope_permission.assert_any_call(
client_id="test-id",
payload={
"name": "GlobalList",
"type": "scope",
"logic": "POSITIVE",
"decisionStrategy": "UNANIMOUS",
"scopes": ["3"],
"resources": ["r6", "r7", "r8", "r9", "r10", "r11", "r12"],
},
)
client.create_client_authz_scope_permission.assert_any_call(
client_id="test-id",
payload={
"name": "ViewAccess",
"type": "scope",
"logic": "POSITIVE",
"decisionStrategy": "UNANIMOUS",
"scopes": ["1"],
"resources": ["r5"],
},
)
client.create_client_authz_scope_permission.assert_any_call(
client_id="test-id",
payload={
"name": "ReadOnly-team-a",
"type": "scope",
"logic": "POSITIVE",
"decisionStrategy": "UNANIMOUS",
"scopes": ["1", "3"],
"resources": ["r1"],
},
)
client.create_client_authz_resource_based_permission.assert_any_call(
client_id="test-id",
payload={
"name": "User-team-a",
"type": "scope",
"logic": "POSITIVE",
"decisionStrategy": "UNANIMOUS",
"resources": ["r1"],
},
skip_exists=True,
)
@patch("airflow.providers.keycloak.auth_manager.cli.commands._update_admin_permission_resources")
@patch("airflow.providers.keycloak.auth_manager.cli.commands._ensure_scope_permission")
@patch("airflow.providers.keycloak.auth_manager.cli.commands._attach_policy_to_resource_permission")
@patch("airflow.providers.keycloak.auth_manager.cli.commands._attach_policy_to_scope_permission")
@patch("airflow.providers.keycloak.auth_manager.cli.commands._ensure_aggregate_policy")
@patch("airflow.providers.keycloak.auth_manager.cli.commands._ensure_group_policy")
@patch("airflow.providers.keycloak.auth_manager.cli.commands._create_permissions")
@patch("airflow.providers.keycloak.auth_manager.cli.commands._create_resources")
@patch("airflow.providers.keycloak.auth_manager.cli.commands._ensure_group")
@patch("airflow.providers.keycloak.auth_manager.cli.commands._get_client")
def test_create_team_command(
self,
mock_get_client,
mock_ensure_group,
mock_create_resources,
mock_create_permissions,
mock_ensure_group_policy,
mock_ensure_aggregate_policy,
mock_attach_policy,
mock_attach_resource_policy,
mock_ensure_scope_permission,
mock_update_admin_permission_resources,
):
client = Mock()
mock_get_client.return_value = client
client.get_clients.return_value = [
{"id": "test-id", "clientId": "test_client_id"},
]
client.connection = Mock()
client.connection.raw_get = Mock(return_value=Mock(text="[]"))
client.connection.raw_post = Mock(
return_value=Mock(status_code=201, json=Mock(return_value={"message": ""}), text="{}")
)
client.connection.realm_name = "test-realm"
params = [
"keycloak-auth-manager",
"create-team",
"team-a",
"--username",
"test",
"--password",
"test",
]
with conf_vars(
{
("keycloak_auth_manager", "client_id"): "test_client_id",
("core", "multi_team"): "True",
}
):
create_team_command(self.arg_parser.parse_args(params))
mock_ensure_group.assert_called_once_with(client, "team-a", _dry_run=False)
mock_create_resources.assert_called_once_with(client, "test-id", teams=["team-a"], _dry_run=False)
mock_create_permissions.assert_called_once_with(
client, "test-id", teams=["team-a"], include_global_admin=False, _dry_run=False
)
mock_update_admin_permission_resources.assert_called_once_with(client, "test-id", _dry_run=False)
mock_ensure_group_policy.assert_called_once_with(client, "test-id", "team-a", _dry_run=False)
assert mock_ensure_aggregate_policy.call_count == 4
mock_attach_policy.assert_any_call(
client,
"test-id",
permission_name="ReadOnly-team-a",
policy_name="Allow-Viewer-team-a",
scope_names=["GET", "LIST"],
resource_names=["Dag:team-a", "Team:team-a"],
_dry_run=False,
)
mock_attach_policy.assert_any_call(
client,
"test-id",
permission_name="Admin-team-a",
policy_name="Allow-Admin-team-a",
scope_names=_get_extended_resource_methods() + ["LIST"],
resource_names=[
"Connection:team-a",
"Dag:team-a",
"Pool:team-a",
"Team:team-a",
"Variable:team-a",
],
_dry_run=False,
)
mock_attach_resource_policy.assert_any_call(
client,
"test-id",
permission_name="User-team-a",
policy_name="Allow-User-team-a",
resource_names=["Dag:team-a"],
_dry_run=False,
)
mock_attach_resource_policy.assert_any_call(
client,
"test-id",
permission_name="Op-team-a",
policy_name="Allow-Op-team-a",
resource_names=[
"Connection:team-a",
"Pool:team-a",
"Variable:team-a",
],
_dry_run=False,
)
mock_attach_policy.assert_any_call(
client,
"test-id",
permission_name="MenuAccess-team-a",
policy_name="Allow-Viewer-team-a",
scope_names=["MENU"],
resource_names=["Assets", "Dags", "Docs"],
decision_strategy="AFFIRMATIVE",
_dry_run=False,
)
mock_attach_policy.assert_any_call(
client,
"test-id",
permission_name="MenuAccess-Admin-team-a",
policy_name="Allow-Admin-team-a",
scope_names=["MENU"],
resource_names=["Assets", "Connections", "Dags", "Docs", "Pools", "Variables", "XComs"],
decision_strategy="AFFIRMATIVE",
_dry_run=False,
)
mock_attach_policy.assert_any_call(
client,
"test-id",
permission_name="Admin",
policy_name="Allow-SuperAdmin",
scope_names=_get_extended_resource_methods() + ["LIST"],
resource_names=[
"Connection:team-a",
"Dag:team-a",
"Pool:team-a",
"Team:team-a",
"Variable:team-a",
],
_dry_run=False,
)
mock_attach_policy.assert_any_call(
client,
"test-id",
permission_name="MenuAccess",
policy_name="Allow-SuperAdmin",
scope_names=["MENU"],
resource_names=[item.value for item in sorted(MenuItem, key=lambda item: item.value)],
decision_strategy="AFFIRMATIVE",
_dry_run=False,
)
mock_attach_policy.assert_any_call(
client,
"test-id",
permission_name="ViewAccess",
policy_name="Allow-Viewer-team-a",
scope_names=["GET"],
resource_names=["View"],
decision_strategy="AFFIRMATIVE",
_dry_run=False,
)
mock_ensure_scope_permission.assert_any_call(
client,
"test-id",
name="MenuAccess-team-a",
scope_names=["MENU"],
resource_names=["Assets", "Dags", "Docs"],
decision_strategy="AFFIRMATIVE",
_dry_run=False,
)
mock_ensure_scope_permission.assert_any_call(
client,
"test-id",
name="MenuAccess-Admin-team-a",
scope_names=["MENU"],
resource_names=["Assets", "Connections", "Dags", "Docs", "Pools", "Variables", "XComs"],
decision_strategy="AFFIRMATIVE",
_dry_run=False,
)
@patch("airflow.providers.keycloak.auth_manager.cli.commands._add_user_to_group")
@patch("airflow.providers.keycloak.auth_manager.cli.commands._ensure_group")
@patch("airflow.providers.keycloak.auth_manager.cli.commands._get_client")
def test_add_user_to_team_command(self, mock_get_client, mock_ensure_group, mock_add_user):
client = Mock()
mock_get_client.return_value = client
client.get_clients.return_value = [
{"id": "test-id", "clientId": "test_client_id"},
]
params = [
"keycloak-auth-manager",
"add-user-to-team",
"user-a",
"team-a",
"--username",
"admin",
"--password",
"admin",
]
with conf_vars(
{
("keycloak_auth_manager", "client_id"): "test_client_id",
("core", "multi_team"): "True",
}
):
add_user_to_team_command(self.arg_parser.parse_args(params))
mock_ensure_group.assert_called_once_with(client, "team-a", _dry_run=False)
mock_add_user.assert_called_once_with(client, username="user-a", team="team-a", _dry_run=False)
@patch("airflow.providers.keycloak.auth_manager.cli.commands._create_permissions")
@patch("airflow.providers.keycloak.auth_manager.cli.commands._create_resources")
@patch("airflow.providers.keycloak.auth_manager.cli.commands._create_scopes")
@patch("airflow.providers.keycloak.auth_manager.cli.commands._get_client")
def test_create_all(
self,
mock_get_client,
mock_create_scopes,
mock_create_resources,
mock_create_permissions,
):
client = Mock()
mock_get_client.return_value = client
scopes = [{"id": "1", "name": "GET"}]
client.get_clients.return_value = [
{"id": "dummy-id", "clientId": "dummy-client"},
{"id": "test-id", "clientId": "test_client_id"},
]
client.get_client_authz_scopes.return_value = scopes
client.connection = Mock()
client.connection.raw_get = Mock(return_value=Mock(text="[]"))
client.connection.raw_post = Mock(
return_value=Mock(status_code=201, json=Mock(return_value={"message": ""}), text="{}")
)
client.connection.realm_name = "test-realm"
params = [
"keycloak-auth-manager",
"create-all",
"--username",
"test",
"--password",
"test",
]
with conf_vars(
{
("keycloak_auth_manager", "client_id"): "test_client_id",
}
):
create_all_command(self.arg_parser.parse_args(params))
client.get_clients.assert_called_once_with()
mock_create_scopes.assert_called_once_with(client, "test-id", _dry_run=False)
mock_create_resources.assert_called_once_with(client, "test-id", teams=[], _dry_run=False)
mock_create_permissions.assert_called_once_with(client, "test-id", teams=[], _dry_run=False)
@patch("airflow.providers.keycloak.auth_manager.cli.commands._get_client")
def test_create_scopes_dry_run(self, mock_get_client):
client = Mock()
mock_get_client.return_value = client
client.get_clients.return_value = [
{"id": "dummy-id", "clientId": "dummy-client"},
{"id": "test-id", "clientId": "test_client_id"},
]
params = [
"keycloak-auth-manager",
"create-scopes",
"--username",
"test",
"--password",
"test",
"--dry-run",
]
with conf_vars(
{
("keycloak_auth_manager", "client_id"): "test_client_id",
}
):
create_scopes_command(self.arg_parser.parse_args(params))
client.get_clients.assert_called_once_with()
# In dry-run mode, no scopes should be created
client.create_client_authz_scopes.assert_not_called()
@patch("airflow.providers.keycloak.auth_manager.cli.commands._get_client")
def test_create_resources_dry_run(self, mock_get_client):
client = Mock()
mock_get_client.return_value = client
scopes = [{"id": "1", "name": "GET"}, {"id": "2", "name": "MENU"}]
client.get_clients.return_value = [
{"id": "dummy-id", "clientId": "dummy-client"},
{"id": "test-id", "clientId": "test_client_id"},
]
client.get_client_authz_scopes.return_value = scopes
params = [
"keycloak-auth-manager",
"create-resources",
"--username",
"test",
"--password",
"test",
"--dry-run",
]
with conf_vars(
{
("keycloak_auth_manager", "client_id"): "test_client_id",
}
):
create_resources_command(self.arg_parser.parse_args(params))
client.get_clients.assert_called_once_with()
client.get_client_authz_scopes.assert_called_once_with("test-id")
# In dry-run mode, no resources should be created
client.create_client_authz_resource.assert_not_called()
@patch("airflow.providers.keycloak.auth_manager.cli.commands._get_client")
def test_create_permissions_dry_run(self, mock_get_client):
client = Mock()
mock_get_client.return_value = client
scopes = [{"id": "1", "name": "GET"}, {"id": "2", "name": "MENU"}, {"id": "3", "name": "LIST"}]
resources = [
{"_id": "r1", "name": "Dag"},
{"_id": "r2", "name": "Asset"},
]
client.get_clients.return_value = [
{"id": "dummy-id", "clientId": "dummy-client"},
{"id": "test-id", "clientId": "test_client_id"},
]
client.get_client_authz_scopes.return_value = scopes
client.get_client_authz_resources.return_value = resources
params = [
"keycloak-auth-manager",
"create-permissions",
"--username",
"test",
"--password",
"test",
"--dry-run",
]
with conf_vars(
{
("keycloak_auth_manager", "client_id"): "test_client_id",
}
):
create_permissions_command(self.arg_parser.parse_args(params))
client.get_clients.assert_called_once_with()
client.get_client_authz_scopes.assert_called_once_with("test-id")
client.get_client_authz_resources.assert_called_once_with("test-id")
# In dry-run mode, no permissions should be created
client.create_client_authz_scope_permission.assert_not_called()
client.create_client_authz_resource_based_permission.assert_not_called()
@patch("airflow.providers.keycloak.auth_manager.cli.commands._create_permissions")
@patch("airflow.providers.keycloak.auth_manager.cli.commands._create_resources")
@patch("airflow.providers.keycloak.auth_manager.cli.commands._create_scopes")
@patch("airflow.providers.keycloak.auth_manager.cli.commands._get_client")
def test_create_all_dry_run(
self,
mock_get_client,
mock_create_scopes,
mock_create_resources,
mock_create_permissions,
):
client = Mock()
mock_get_client.return_value = client
client.get_clients.return_value = [
{"id": "dummy-id", "clientId": "dummy-client"},
{"id": "test-id", "clientId": "test_client_id"},
]
client.connection = Mock()
client.connection.raw_get = Mock(return_value=Mock(text="[]"))
client.connection.raw_post = Mock(
return_value=Mock(status_code=201, json=Mock(return_value={"message": ""}), text="{}")
)
client.connection.realm_name = "test-realm"
params = [
"keycloak-auth-manager",
"create-all",
"--username",
"test",
"--password",
"test",
"--dry-run",
]
with conf_vars(
{
("keycloak_auth_manager", "client_id"): "test_client_id",
}
):
create_all_command(self.arg_parser.parse_args(params))
client.get_clients.assert_called_once_with()
# In dry-run mode, all helper functions should be called with dry_run=True
mock_create_scopes.assert_called_once_with(client, "test-id", _dry_run=True)
mock_create_resources.assert_called_once_with(client, "test-id", teams=[], _dry_run=True)
mock_create_permissions.assert_called_once_with(client, "test-id", teams=[], _dry_run=True)
| {
"repo_id": "apache/airflow",
"file_path": "providers/keycloak/tests/unit/keycloak/auth_manager/cli/test_commands.py",
"license": "Apache License 2.0",
"lines": 767,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/keycloak/tests/unit/keycloak/auth_manager/test_constants.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from airflow.providers.keycloak.auth_manager.constants import (
CONF_CLIENT_ID_KEY,
CONF_CLIENT_SECRET_KEY,
CONF_REALM_KEY,
CONF_SECTION_NAME,
CONF_SERVER_URL_KEY,
)
class TestKeycloakAuthManagerConstants:
def test_conf_section_name(self):
assert CONF_SECTION_NAME == "keycloak_auth_manager"
def test_conf_client_id_key(self):
assert CONF_CLIENT_ID_KEY == "client_id"
def test_conf_client_secret_key(self):
assert CONF_CLIENT_SECRET_KEY == "client_secret"
def test_conf_realm_key(self):
assert CONF_REALM_KEY == "realm"
def test_conf_server_url_key(self):
assert CONF_SERVER_URL_KEY == "server_url"
| {
"repo_id": "apache/airflow",
"file_path": "providers/keycloak/tests/unit/keycloak/auth_manager/test_constants.py",
"license": "Apache License 2.0",
"lines": 35,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:scripts/in_container/run_check_imports_in_providers.py | #!/usr/bin/env python
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import json
import os.path
import subprocess
import sys
from pathlib import Path
sys.path.insert(0, str(Path(__file__).parent.resolve()))
from in_container_utils import console, get_provider_base_dir_from_path, get_provider_id_from_path
def check_imports() -> bool:
errors_found = False
cmd = [
"ruff",
"analyze",
"graph",
"--python",
sys.executable,
]
console.print("Cmd", cmd)
import_tree_str = subprocess.check_output(cmd)
import_tree = json.loads(import_tree_str)
# Uncomment these if you want to debug strange dependencies and see if ruff gets it right
console.print("Dependencies discovered by ruff:")
console.print(import_tree)
for importing_file in sys.argv[1:]:
if not importing_file.startswith("providers/"):
console.print(f"[yellow]Skipping non-provider file: {importing_file}")
continue
importing_file_path = Path(importing_file)
console.print(importing_file_path)
imported_files_array = import_tree.get(importing_file, None)
if imported_files_array is None:
continue
imported_file_paths = [Path(file) for file in imported_files_array]
for imported_file_path in imported_file_paths:
if imported_file_path.name == "version_compat.py":
# Note - this will check also imports from other places - not only from providers
# Which means that import from tests_common, and airflow will be also banned
common_path = os.path.commonpath([importing_file, imported_file_path.as_posix()])
imported_file_parent_dir = imported_file_path.parent.as_posix()
if common_path != imported_file_parent_dir:
provider_id = get_provider_id_from_path(importing_file_path)
provider_dir = get_provider_base_dir_from_path(importing_file_path)
console.print(
f"\n[red]Invalid import of `version_compat` module in provider {provider_id} in:\n"
)
console.print(f"[yellow]{importing_file_path}")
console.print(
f"\n[bright_blue]The AIRFLOW_V_X_Y_PLUS import should be "
f"from the {provider_id} provider root directory ({provider_dir}), but it is currently from:"
)
console.print(f"\n[yellow]{imported_file_path}\n")
console.print(
f"1. Copy `version_compat`.py to `{provider_dir}/version_compat.py` if not there.\n"
f"2. Import the version constants you need as:\n\n"
f"[yellow]from airflow.providers.{provider_id}.version_compat import ...[/]\n"
f"\n"
)
errors_found = True
return errors_found
if check_imports():
console.print("\n[red]Errors found in imports![/]\n")
sys.exit(1)
else:
console.print("\n[green]All version_compat imports are correct![/]\n")
| {
"repo_id": "apache/airflow",
"file_path": "scripts/in_container/run_check_imports_in_providers.py",
"license": "Apache License 2.0",
"lines": 81,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:task-sdk/docs/conf.py | # Disable Flake8 because of all the sphinx imports
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import os
import sys
from pathlib import Path
from docs.utils.conf_constants import (
AIRFLOW_FAVICON_PATH,
SPHINX_DESIGN_STATIC_PATH,
get_html_context,
get_html_sidebars,
get_html_theme_options,
)
import airflow.sdk
CONF_DIR = Path(__file__).parent.absolute()
sys.path.insert(0, str(CONF_DIR.parent.parent.joinpath("devel-common", "src", "sphinx_exts").resolve()))
sys.path.insert(0, str(CONF_DIR.parent.joinpath("src").resolve()))
PACKAGE_NAME = "task-sdk"
os.environ["AIRFLOW_PACKAGE_NAME"] = PACKAGE_NAME
PACKAGE_VERSION = airflow.sdk.__version__
project = "Apache Airflow Task SDK"
# # The version info for the project you're documenting
version = PACKAGE_VERSION
# The full version, including alpha/beta/rc tags.
release = PACKAGE_VERSION
language = "en"
locale_dirs: list[str] = []
extensions = [
"sphinx.ext.autodoc",
"autoapi.extension",
"sphinx.ext.intersphinx",
"exampleinclude",
"sphinxcontrib.spelling",
]
autoapi_dirs = [CONF_DIR.joinpath("..", "src").resolve()]
autoapi_root = "api"
autoapi_ignore = [
"*/airflow/sdk/execution_time",
"*/airflow/sdk/api",
"*/_internal*",
]
autoapi_options = [
"undoc-members",
"members",
"imported-members",
]
autoapi_add_toctree_entry = False
autoapi_generate_api_docs = False
autodoc_typehints = "description"
# Prefer pyi over py files if both are found
autoapi_file_patterns = ["*.pyi", "*.py"]
html_theme = "sphinx_airflow_theme"
html_title = "Apache Airflow Task SDK Documentation"
html_short_title = "Task SDK"
html_favicon = AIRFLOW_FAVICON_PATH.as_posix()
html_static_path = [SPHINX_DESIGN_STATIC_PATH.as_posix()]
html_css_files = ["custom.css"]
html_sidebars = get_html_sidebars(PACKAGE_VERSION)
html_theme_options = get_html_theme_options()
conf_py_path = "/task-sdk/docs/"
html_context = get_html_context(conf_py_path)
html_use_index = True
html_show_copyright = False
global_substitutions = {
"experimental": "This is an :ref:`experimental feature <experimental>`.",
}
rst_epilog = "\n".join(f".. |{key}| replace:: {replace}" for key, replace in global_substitutions.items())
intersphinx_resolve_self = "airflow"
intersphinx_mapping = {
"airflow": ("https://airflow.apache.org/docs/apache-airflow/stable/", None),
}
# Suppress known warnings
suppress_warnings = [
"autoapi.python_import_resolution",
"autodoc",
]
exampleinclude_sourceroot = str(CONF_DIR.joinpath("..").resolve())
spelling_show_suggestions = False
spelling_word_list_filename = [
str(CONF_DIR.parent.parent.joinpath("docs", "spelling_wordlist.txt").resolve())
]
spelling_ignore_importable_modules = True
spelling_ignore_contributor_names = True
| {
"repo_id": "apache/airflow",
"file_path": "task-sdk/docs/conf.py",
"license": "Apache License 2.0",
"lines": 99,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/keycloak/src/airflow/providers/keycloak/auth_manager/resources.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from enum import Enum
class KeycloakResource(Enum):
"""Enum of Keycloak resources."""
ASSET = "Asset"
ASSET_ALIAS = "AssetAlias"
BACKFILL = "Backfill"
CONFIGURATION = "Configuration"
CONNECTION = "Connection"
CUSTOM = "Custom"
DAG = "Dag"
MENU = "Menu"
POOL = "Pool"
TEAM = "Team"
VARIABLE = "Variable"
VIEW = "View"
| {
"repo_id": "apache/airflow",
"file_path": "providers/keycloak/src/airflow/providers/keycloak/auth_manager/resources.py",
"license": "Apache License 2.0",
"lines": 32,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/keycloak/tests/unit/keycloak/auth_manager/test_resources.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from airflow.providers.keycloak.auth_manager.resources import KeycloakResource
class TestKeycloakResource:
def test_asset(self):
assert KeycloakResource.ASSET.value == "Asset"
| {
"repo_id": "apache/airflow",
"file_path": "providers/keycloak/tests/unit/keycloak/auth_manager/test_resources.py",
"license": "Apache License 2.0",
"lines": 21,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/openlineage/tests/system/openlineage/example_openlineage_base_complex_dag.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Complex DAG without schedule, with multiple operators, task groups, dependencies etc.
It checks:
- required keys
- field formats and types
- number of task events (one start, one complete)
- if EmptyOperator will emit OL events with callback or outlet
- if EmptyOperator without modification will not emit OL events
- if CustomOperator without Extractor will emit OL events
- task groups serialization without dependencies
- additional task configuration attrs (owner, max_active_tis_per_dag etc.)
"""
from __future__ import annotations
from datetime import datetime, timedelta
from typing import Any
from airflow import DAG
from airflow.models import Variable
from airflow.providers.common.compat.assets import Asset
from airflow.providers.common.compat.sdk import BaseOperator
from airflow.providers.standard.operators.bash import BashOperator
from airflow.providers.standard.operators.empty import EmptyOperator
from airflow.providers.standard.operators.python import PythonOperator
try:
from airflow.sdk import TaskGroup
except ImportError:
from airflow.utils.task_group import TaskGroup # type: ignore[no-redef]
from system.openlineage.expected_events import AIRFLOW_VERSION, get_expected_event_file_path
from system.openlineage.operator import OpenLineageTestOperator
def check_events_number_func():
for event_type in ("start", "complete", "fail"):
try:
Variable.get(key=f"openlineage_base_complex_dag.task_0.event.{event_type}", deserialize_json=True)
except Exception:
pass
else:
raise ValueError("Expected no events for task `task_0`.")
def do_nothing():
pass
class SomeCustomOperator(BashOperator):
def __init__(self, **kwargs):
# Just to test that these attrs are included in OL event
self.deferrable = True
self.external_dag_id = "external_dag_id"
self.external_task_id = "external_task_id"
super().__init__(**kwargs)
class CustomMappedOperator(BaseOperator):
def __init__(self, value, **kwargs):
super().__init__(**kwargs)
self.value = value
def execute(self, context):
return self.value + 1
DAG_ID = "openlineage_base_complex_dag"
with DAG(
dag_id=DAG_ID,
start_date=datetime(2021, 1, 1),
schedule=None,
catchup=False,
description="OpenLineage complex DAG description",
owner_links={"airflow": "https://airflow.apache.org/"},
tags=["first", "second@", "with'quote", 'z"e'],
default_args={"retries": 0},
) as dag:
# task_0 will not emit any events, but the owner will be picked up and added to DAG
task_0 = EmptyOperator(task_id="task_0", owner='owner"1')
task_1 = BashOperator(
task_id="task_1.id.with.dots",
bash_command="exit 0;",
owner="owner'2",
execution_timeout=timedelta(seconds=456),
doc_rst="RST doc",
)
task_2 = PythonOperator(
task_id="task_2",
python_callable=do_nothing,
inlets=[Asset(uri="s3://bucket2/dir2/file2.txt"), Asset(uri="s3://bucket2/dir2/file3.txt")],
max_retry_delay=42,
doc="text doc",
doc_md="should be skipped",
doc_json="should be skipped",
doc_yaml="should be skipped",
doc_rst="should be skipped",
)
task_3 = EmptyOperator(
task_id="task_3",
outlets=[Asset(uri="s3://bucket/dir/file.txt")],
doc_md="MD doc",
doc_json="should be skipped",
doc_yaml="should be skipped",
doc_rst="should be skipped",
)
task_4 = SomeCustomOperator(
task_id="task_4",
bash_command="exit 0;",
owner="owner3",
max_active_tis_per_dag=7,
max_active_tis_per_dagrun=2,
doc_json="JSON doc",
doc_yaml="should be skipped",
doc_rst="should be skipped",
)
with TaskGroup("section_1", prefix_group_id=True) as tg:
task_5 = CustomMappedOperator.partial(task_id="task_5", doc_md="md doc").expand(value=[1])
with TaskGroup("section_2", parent_group=tg, tooltip="group_tooltip") as tg2:
add_args: dict[str, Any] = {"sla": timedelta(seconds=123)} if AIRFLOW_VERSION.major == 2 else {}
task_6 = EmptyOperator(
task_id="task_6",
on_success_callback=lambda x: print(1),
doc_yaml="YAML doc",
doc_rst="should be skipped",
**add_args,
)
with TaskGroup("section_3", parent_group=tg2):
task_7 = PythonOperator(task_id="task_7", python_callable=lambda: 1)
check_events_number = PythonOperator(
task_id="check_events_number", python_callable=check_events_number_func
)
check_events = OpenLineageTestOperator(
task_id="check_events", file_path=get_expected_event_file_path(DAG_ID)
)
task_1 >> [task_2, task_7] >> check_events_number
task_2 >> task_3 >> [task_4, task_5] >> task_6 >> check_events_number
check_events_number >> check_events
from tests_common.test_utils.system_tests import get_test_run # noqa: E402
# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest)
test_run = get_test_run(dag)
| {
"repo_id": "apache/airflow",
"file_path": "providers/openlineage/tests/system/openlineage/example_openlineage_base_complex_dag.py",
"license": "Apache License 2.0",
"lines": 141,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/openlineage/tests/system/openlineage/example_openlineage_base_simple_dag.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Simple DAG without schedule and extra args, with one operator, to verify OpenLineage event integrity.
It checks:
- required keys
- field formats and types
- number of task events (one start, one complete)
"""
from __future__ import annotations
from datetime import datetime
from airflow import DAG
from airflow.providers.standard.operators.python import PythonOperator
from system.openlineage.expected_events import get_expected_event_file_path
from system.openlineage.operator import OpenLineageTestOperator
try:
from airflow.sdk import chain
except ImportError:
from airflow.models.baseoperator import chain # type: ignore[no-redef]
def do_nothing():
pass
DAG_ID = "openlineage_base_simple_dag"
with DAG(
dag_id=DAG_ID,
start_date=datetime(2021, 1, 1),
schedule=None,
catchup=False,
default_args={"retries": 0},
) as dag:
do_nothing_task = PythonOperator(task_id="do_nothing_task", python_callable=do_nothing)
check_events = OpenLineageTestOperator(
task_id="check_events", file_path=get_expected_event_file_path(DAG_ID)
)
chain(do_nothing_task, check_events)
from tests_common.test_utils.system_tests import get_test_run # noqa: E402
# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest)
test_run = get_test_run(dag)
| {
"repo_id": "apache/airflow",
"file_path": "providers/openlineage/tests/system/openlineage/example_openlineage_base_simple_dag.py",
"license": "Apache License 2.0",
"lines": 51,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/openlineage/tests/system/openlineage/example_openlineage_mapped_simple_dag.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Simple DAG with mapped task.
It checks:
- task's `mapped` attribute
- taskInstance's `map_index` attribute
- number of OL events emitted for mapped task
"""
from __future__ import annotations
from datetime import datetime
from airflow import DAG
try:
from airflow.sdk import task
except ImportError:
# Airflow 2 path
from airflow.decorators import task # type: ignore[attr-defined,no-redef]
from airflow.models import Variable
from airflow.providers.standard.operators.python import PythonOperator
from system.openlineage.expected_events import get_expected_event_file_path
from system.openlineage.operator import OpenLineageTestOperator
def check_events_number_func():
for event_type in ("start", "complete"):
events = Variable.get(
key=f"openlineage_mapped_simple_dag.add_one.event.{event_type}", deserialize_json=True
)
if len(events) != 2:
raise ValueError(
f"Expected exactly 2 {event_type.upper()} events for task `add_one`, got {len(events)}"
)
DAG_ID = "openlineage_mapped_simple_dag"
with DAG(
dag_id=DAG_ID,
start_date=datetime(2021, 1, 1),
schedule=None,
catchup=False,
default_args={"retries": 0},
) as dag:
@task(max_active_tis_per_dagrun=1) # Execute sequentially to not overwrite each other OL events
def add_one(x: int):
return x + 1
@task
def sum_it(values):
total = sum(values)
print(f"Total was {total}")
added_values = add_one.expand(x=[1, 2])
check_events_number = PythonOperator(
task_id="check_events_number", python_callable=check_events_number_func
)
check_events = OpenLineageTestOperator(
task_id="check_events",
file_path=get_expected_event_file_path(DAG_ID),
allow_duplicate_events_regex="openlineage_mapped_simple_dag.add_one.event.(start|complete)",
)
sum_it(added_values) >> check_events_number >> check_events
from tests_common.test_utils.system_tests import get_test_run # noqa: E402
# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest)
test_run = get_test_run(dag)
| {
"repo_id": "apache/airflow",
"file_path": "providers/openlineage/tests/system/openlineage/example_openlineage_mapped_simple_dag.py",
"license": "Apache License 2.0",
"lines": 72,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/openlineage/tests/system/openlineage/example_openlineage_schedule_asset_or_time_dag.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Simple DAG with asset or time schedule.
It checks:
- schedule serialization
"""
from __future__ import annotations
from datetime import datetime
from airflow import DAG
from airflow.providers.common.compat.assets import Asset
from airflow.providers.standard.operators.bash import BashOperator
from airflow.timetables.trigger import CronTriggerTimetable
from system.openlineage.expected_events import AIRFLOW_VERSION, get_expected_event_file_path
from system.openlineage.operator import OpenLineageTestOperator
DAG_ID = "openlineage_schedule_asset_or_time_dag"
if AIRFLOW_VERSION.major == 3:
from airflow.timetables.assets import AssetOrTimeSchedule
schedule = AssetOrTimeSchedule(
timetable=CronTriggerTimetable("21 13 29 2 4", timezone="UTC"),
assets=(
(Asset(uri="s3://bucket/file.txt", extra={"a": 1}) | Asset(uri="s3://bucket2/file.txt")) # type: ignore[arg-type]
& (Asset(uri="s3://bucket3/file.txt") | Asset(uri="s3://bucket4/file.txt", extra={"b": 2}))
),
)
else:
from airflow.timetables.datasets import DatasetOrTimeSchedule
schedule = DatasetOrTimeSchedule( # type: ignore[assignment] # re-defining schedule, but it's for different AF
timetable=CronTriggerTimetable("21 13 29 2 4", timezone="UTC"),
datasets=(
(Asset(uri="s3://bucket/file.txt", extra={"a": 1}) | Asset(uri="s3://bucket2/file.txt"))
& (Asset(uri="s3://bucket3/file.txt") | Asset(uri="s3://bucket4/file.txt", extra={"b": 2}))
),
)
with DAG(
dag_id=DAG_ID,
start_date=datetime(2025, 5, 1),
schedule=schedule,
catchup=False,
default_args={"retries": 0},
) as dag:
do_nothing_task = BashOperator(task_id="do_nothing_task", bash_command="sleep 10;")
check_events = OpenLineageTestOperator(
task_id="check_events", file_path=get_expected_event_file_path(DAG_ID)
)
do_nothing_task >> check_events
from tests_common.test_utils.system_tests import get_test_run # noqa: E402
# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest)
test_run = get_test_run(dag)
| {
"repo_id": "apache/airflow",
"file_path": "providers/openlineage/tests/system/openlineage/example_openlineage_schedule_asset_or_time_dag.py",
"license": "Apache License 2.0",
"lines": 63,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/openlineage/tests/system/openlineage/example_openlineage_schedule_cron_dag.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Simple DAG with cron schedule.
It checks:
- schedule serialization
"""
from __future__ import annotations
from datetime import datetime
from airflow import DAG
from airflow.providers.standard.operators.bash import BashOperator
from system.openlineage.expected_events import get_expected_event_file_path
from system.openlineage.operator import OpenLineageTestOperator
DAG_ID = "openlineage_schedule_cron_dag"
with DAG(
dag_id=DAG_ID,
start_date=datetime(2021, 1, 1),
schedule="21 13 29 2 4", # Unlikely to ever get triggered by itself, February 29th and Thursday
catchup=False,
default_args={"retries": 0},
) as dag:
do_nothing_task = BashOperator(task_id="do_nothing_task", bash_command="sleep 10;")
check_events = OpenLineageTestOperator(
task_id="check_events", file_path=get_expected_event_file_path(DAG_ID)
)
do_nothing_task >> check_events
from tests_common.test_utils.system_tests import get_test_run # noqa: E402
# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest)
test_run = get_test_run(dag)
| {
"repo_id": "apache/airflow",
"file_path": "providers/openlineage/tests/system/openlineage/example_openlineage_schedule_cron_dag.py",
"license": "Apache License 2.0",
"lines": 43,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/openlineage/tests/system/openlineage/example_openlineage_schedule_list_complex_assets_dag.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Simple DAG with multiple asset logical condition in list schedule for Airflow 3.
Use of list will result in the whole condition being wrapped with additional `asset_all`.
It checks:
- schedule serialization
"""
from __future__ import annotations
from datetime import datetime
from airflow import DAG
from airflow.providers.common.compat.assets import Asset
from airflow.providers.standard.operators.bash import BashOperator
from system.openlineage.expected_events import AIRFLOW_VERSION, get_expected_event_file_path
from system.openlineage.operator import OpenLineageTestOperator
if AIRFLOW_VERSION.major == 3:
schedule = [
(Asset(uri="s3://bucket/file.txt", extra={"a": 1}) | Asset(uri="s3://bucket2/file.txt"))
& (Asset(uri="s3://bucket3/file.txt") | Asset(uri="s3://bucket4/file.txt", extra={"b": 2}))
]
else:
# Logical Asset condition wrapped in list breaks DAG processing in Airflow 2 - check is skipped
schedule = None # type: ignore[assignment]
DAG_ID = "openlineage_schedule_list_complex_assets_dag"
with DAG(
dag_id=DAG_ID,
start_date=datetime(2021, 1, 1),
schedule=schedule,
catchup=False,
default_args={"retries": 0},
) as dag:
do_nothing_task = BashOperator(task_id="do_nothing_task", bash_command="sleep 10;")
check_events = OpenLineageTestOperator(
task_id="check_events", file_path=get_expected_event_file_path(DAG_ID)
)
do_nothing_task >> check_events
from tests_common.test_utils.system_tests import get_test_run # noqa: E402
# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest)
test_run = get_test_run(dag)
| {
"repo_id": "apache/airflow",
"file_path": "providers/openlineage/tests/system/openlineage/example_openlineage_schedule_list_complex_assets_dag.py",
"license": "Apache License 2.0",
"lines": 53,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/openlineage/tests/system/openlineage/example_openlineage_schedule_list_multiple_assets_dag.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Simple DAG with multiple asset in list schedule.
It checks:
- schedule serialization
"""
from __future__ import annotations
from datetime import datetime
from airflow import DAG
from airflow.providers.common.compat.assets import Asset
from airflow.providers.standard.operators.bash import BashOperator
from system.openlineage.expected_events import get_expected_event_file_path
from system.openlineage.operator import OpenLineageTestOperator
DAG_ID = "openlineage_schedule_list_multiple_assets_dag"
with DAG(
dag_id=DAG_ID,
start_date=datetime(2021, 1, 1),
schedule=[Asset(uri="s3://bucket/file.txt", extra={"a": 1}), Asset(uri="s3://bucket2/file.txt")],
catchup=False,
default_args={"retries": 0},
) as dag:
do_nothing_task = BashOperator(task_id="do_nothing_task", bash_command="sleep 10;")
check_events = OpenLineageTestOperator(
task_id="check_events", file_path=get_expected_event_file_path(DAG_ID)
)
do_nothing_task >> check_events
from tests_common.test_utils.system_tests import get_test_run # noqa: E402
# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest)
test_run = get_test_run(dag)
| {
"repo_id": "apache/airflow",
"file_path": "providers/openlineage/tests/system/openlineage/example_openlineage_schedule_list_multiple_assets_dag.py",
"license": "Apache License 2.0",
"lines": 44,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/openlineage/tests/system/openlineage/example_openlineage_schedule_list_single_asset_dag.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Simple DAG with single asset in list schedule.
It checks:
- schedule serialization
"""
from __future__ import annotations
from datetime import datetime
from airflow import DAG
from airflow.providers.common.compat.assets import Asset
from airflow.providers.standard.operators.bash import BashOperator
from system.openlineage.expected_events import get_expected_event_file_path
from system.openlineage.operator import OpenLineageTestOperator
DAG_ID = "openlineage_schedule_list_single_asset_dag"
with DAG(
dag_id=DAG_ID,
start_date=datetime(2021, 1, 1),
schedule=[Asset(uri="s3://bucket/file.txt", extra={"some_extra": 123})],
catchup=False,
default_args={"retries": 0},
) as dag:
do_nothing_task = BashOperator(task_id="do_nothing_task", bash_command="sleep 10;")
check_events = OpenLineageTestOperator(
task_id="check_events", file_path=get_expected_event_file_path(DAG_ID)
)
do_nothing_task >> check_events
from tests_common.test_utils.system_tests import get_test_run # noqa: E402
# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest)
test_run = get_test_run(dag)
| {
"repo_id": "apache/airflow",
"file_path": "providers/openlineage/tests/system/openlineage/example_openlineage_schedule_list_single_asset_dag.py",
"license": "Apache License 2.0",
"lines": 44,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/openlineage/tests/system/openlineage/example_openlineage_schedule_multiple_assets_dag.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Simple DAG with multiple assets with logical conditions in schedule.
It checks:
- schedule serialization
"""
from __future__ import annotations
from datetime import datetime
from airflow import DAG
from airflow.providers.common.compat.assets import Asset
from airflow.providers.standard.operators.bash import BashOperator
from system.openlineage.expected_events import get_expected_event_file_path
from system.openlineage.operator import OpenLineageTestOperator
DAG_ID = "openlineage_schedule_multiple_assets_dag"
with DAG(
dag_id=DAG_ID,
start_date=datetime(2021, 1, 1),
schedule=(
(Asset(uri="s3://bucket/file.txt", extra={"a": 1}) | Asset(uri="s3://bucket2/file.txt"))
& (Asset(uri="s3://bucket3/file.txt") | Asset(uri="s3://bucket4/file.txt", extra={"b": 2}))
),
catchup=False,
default_args={"retries": 0},
) as dag:
do_nothing_task = BashOperator(task_id="do_nothing_task", bash_command="sleep 10;")
check_events = OpenLineageTestOperator(
task_id="check_events", file_path=get_expected_event_file_path(DAG_ID)
)
do_nothing_task >> check_events
from tests_common.test_utils.system_tests import get_test_run # noqa: E402
# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest)
test_run = get_test_run(dag)
| {
"repo_id": "apache/airflow",
"file_path": "providers/openlineage/tests/system/openlineage/example_openlineage_schedule_multiple_assets_dag.py",
"license": "Apache License 2.0",
"lines": 47,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/openlineage/tests/system/openlineage/example_openlineage_schedule_single_asset_dag.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Simple DAG with single asset direct schedule.
It checks:
- schedule serialization
"""
from __future__ import annotations
from datetime import datetime
from airflow import DAG
from airflow.providers.common.compat.assets import Asset
from airflow.providers.standard.operators.bash import BashOperator
from system.openlineage.expected_events import get_expected_event_file_path
from system.openlineage.operator import OpenLineageTestOperator
DAG_ID = "openlineage_schedule_single_asset_dag"
with DAG(
dag_id=DAG_ID,
start_date=datetime(2021, 1, 1),
schedule=Asset(uri="s3://bucket/file.txt", extra={"some_extra": 123}),
catchup=False,
default_args={"retries": 0},
) as dag:
do_nothing_task = BashOperator(task_id="do_nothing_task", bash_command="sleep 10;")
check_events = OpenLineageTestOperator(
task_id="check_events", file_path=get_expected_event_file_path(DAG_ID)
)
do_nothing_task >> check_events
from tests_common.test_utils.system_tests import get_test_run # noqa: E402
# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest)
test_run = get_test_run(dag)
| {
"repo_id": "apache/airflow",
"file_path": "providers/openlineage/tests/system/openlineage/example_openlineage_schedule_single_asset_dag.py",
"license": "Apache License 2.0",
"lines": 44,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/openlineage/tests/system/openlineage/example_openlineage_schedule_timetable_dag.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Simple DAG with timetable schedule.
It checks:
- schedule serialization
"""
from __future__ import annotations
from datetime import datetime
import pendulum
from airflow import DAG
from airflow.providers.standard.operators.bash import BashOperator
from airflow.timetables.events import EventsTimetable
from system.openlineage.expected_events import get_expected_event_file_path
from system.openlineage.operator import OpenLineageTestOperator
DAG_ID = "openlineage_schedule_timetable_dag"
with DAG(
dag_id=DAG_ID,
start_date=datetime(2021, 1, 1),
schedule=EventsTimetable( # Unlikely to ever get triggered by itself
event_dates=[
pendulum.datetime(2095, 3, 3, 8, 27, tz="America/Chicago"),
pendulum.datetime(2095, 3, 17, 8, 27, tz="America/Chicago"),
pendulum.datetime(2095, 3, 22, 20, 50, tz="America/Chicago"),
],
description="My Team's Baseball Games",
),
catchup=False,
default_args={"retries": 0},
) as dag:
do_nothing_task = BashOperator(task_id="do_nothing_task", bash_command="sleep 10;")
check_events = OpenLineageTestOperator(
task_id="check_events", file_path=get_expected_event_file_path(DAG_ID)
)
do_nothing_task >> check_events
from tests_common.test_utils.system_tests import get_test_run # noqa: E402
# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest)
test_run = get_test_run(dag)
| {
"repo_id": "apache/airflow",
"file_path": "providers/openlineage/tests/system/openlineage/example_openlineage_schedule_timetable_dag.py",
"license": "Apache License 2.0",
"lines": 52,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/openlineage/tests/system/openlineage/example_openlineage_task_groups_dag.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Simple DAG with task group dependency
It checks:
- task's `task_group` attr
"""
from __future__ import annotations
from datetime import datetime
from airflow import DAG
from airflow.providers.standard.operators.python import PythonOperator
try:
from airflow.sdk import TaskGroup
except ImportError:
from airflow.utils.task_group import TaskGroup # type: ignore[no-redef]
from system.openlineage.expected_events import get_expected_event_file_path
from system.openlineage.operator import OpenLineageTestOperator
DAG_ID = "openlineage_task_groups_dag"
with DAG(
dag_id=DAG_ID,
start_date=datetime(2021, 1, 1),
schedule=None,
catchup=False,
default_args={"retries": 0},
) as dag:
task_0 = PythonOperator(task_id="task_0", python_callable=lambda: print("task 0"))
with TaskGroup("tg1") as tg1:
task_1 = PythonOperator(task_id="task_1", python_callable=lambda: print("task 1"))
with TaskGroup("tg2") as tg2:
task_2 = PythonOperator(task_id="task_2", python_callable=lambda: print("task 2"))
check_events = OpenLineageTestOperator(
task_id="check_events", file_path=get_expected_event_file_path(DAG_ID)
)
task_0 >> tg1 >> tg2 >> check_events
from tests_common.test_utils.system_tests import get_test_run # noqa: E402
# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest)
test_run = get_test_run(dag)
| {
"repo_id": "apache/airflow",
"file_path": "providers/openlineage/tests/system/openlineage/example_openlineage_task_groups_dag.py",
"license": "Apache License 2.0",
"lines": 51,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/openlineage/tests/system/openlineage/example_openlineage_trigger_dag.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Simple DAG that triggers another simple DAG.
It checks:
- task's trigger_dag_id
- DAGRun START and COMPLETE events, for the triggered DAG
- propagation of OL parent and root info from DAGRun conf
"""
from __future__ import annotations
from datetime import datetime
from airflow import DAG
from airflow.providers.standard.operators.bash import BashOperator
from airflow.providers.standard.operators.trigger_dagrun import TriggerDagRunOperator
from system.openlineage.expected_events import get_expected_event_file_path
from system.openlineage.operator import OpenLineageTestOperator
DAG_ID = "openlineage_trigger_dag"
with DAG(
dag_id=DAG_ID,
start_date=datetime(2021, 1, 1),
schedule=None,
catchup=False,
default_args={"retries": 0},
) as dag:
trigger_dagrun = TriggerDagRunOperator(
task_id="trigger_dagrun",
trigger_dag_id="openlineage_trigger_dag_child__notrigger",
trigger_run_id=f"openlineage_trigger_dag_triggering_child_{datetime.now().isoformat()}",
wait_for_completion=True,
conf={
"some_config": "value1",
"openlineage": {
"parentRunId": "3bb703d1-09c1-4a42-8da5-35a0b3216072",
"parentJobNamespace": "prod_biz",
"parentJobName": "get_files",
"rootParentRunId": "9d3b14f7-de91-40b6-aeef-e887e2c7673e",
"rootParentJobNamespace": "prod_analytics",
"rootParentJobName": "generate_report_sales_e2e",
},
},
poke_interval=10,
)
check_events = OpenLineageTestOperator(
task_id="check_events", file_path=get_expected_event_file_path(DAG_ID)
)
trigger_dagrun >> check_events
with DAG(
dag_id="openlineage_trigger_dag_child__notrigger",
start_date=datetime(2021, 1, 1),
schedule=None,
catchup=False,
tags=["first", "second@", "with'quote", 'z"e'],
doc_md="MD DAG doc",
description="DAG description",
default_args={"retries": 0},
) as child_dag:
do_nothing_task = BashOperator(task_id="do_nothing_task", bash_command="sleep 10;")
from tests_common.test_utils.system_tests import get_test_run # noqa: E402
# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest)
test_run = get_test_run(dag)
| {
"repo_id": "apache/airflow",
"file_path": "providers/openlineage/tests/system/openlineage/example_openlineage_trigger_dag.py",
"license": "Apache License 2.0",
"lines": 74,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:task-sdk/src/airflow/sdk/definitions/deadline.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import logging
from datetime import datetime, timedelta
from typing import TYPE_CHECKING
from airflow.models.deadline import DeadlineReferenceType, ReferenceModels
from airflow.sdk.definitions.callback import AsyncCallback, Callback, SyncCallback
if TYPE_CHECKING:
from collections.abc import Callable
from typing import TypeAlias
logger = logging.getLogger(__name__)
DeadlineReferenceTypes: TypeAlias = tuple[type[ReferenceModels.BaseDeadlineReference], ...]
class DeadlineAlert:
"""Store Deadline values needed to calculate the need-by timestamp and the callback information."""
def __init__(
self,
reference: DeadlineReferenceType,
interval: timedelta,
callback: Callback,
):
self.reference = reference
self.interval = interval
if not isinstance(callback, (AsyncCallback, SyncCallback)):
raise ValueError(f"Callbacks of type {type(callback).__name__} are not currently supported")
self.callback = callback
def __eq__(self, other: object) -> bool:
if not isinstance(other, DeadlineAlert):
return NotImplemented
return (
isinstance(self.reference, type(other.reference))
and self.interval == other.interval
and self.callback == other.callback
)
def __hash__(self) -> int:
return hash(
(
type(self.reference).__name__,
self.interval,
self.callback,
)
)
class DeadlineReference:
"""
The public interface class for all DeadlineReference options.
This class provides a unified interface for working with Deadlines, supporting both
calculated deadlines (which fetch values from the database) and fixed deadlines
(which return a predefined datetime).
------
Usage:
------
1. Example deadline references:
.. code-block:: python
fixed = DeadlineReference.FIXED_DATETIME(datetime(2025, 5, 4))
logical = DeadlineReference.DAGRUN_LOGICAL_DATE
queued = DeadlineReference.DAGRUN_QUEUED_AT
2. Using in a DAG:
.. code-block:: python
DAG(
dag_id="dag_with_deadline",
deadline=DeadlineAlert(
reference=DeadlineReference.DAGRUN_LOGICAL_DATE,
interval=timedelta(hours=1),
callback=hello_callback,
),
)
3. Evaluating deadlines will ignore unexpected parameters:
.. code-block:: python
# For deadlines requiring parameters:
deadline = DeadlineReference.DAGRUN_LOGICAL_DATE
deadline.evaluate_with(dag_id=dag.dag_id)
# For deadlines with no required parameters:
deadline = DeadlineReference.FIXED_DATETIME(datetime(2025, 5, 4))
deadline.evaluate_with()
"""
class TYPES:
"""Collection of DeadlineReference types for type checking."""
# Deadlines that should be created when the DagRun is created.
DAGRUN_CREATED: DeadlineReferenceTypes = (
ReferenceModels.DagRunLogicalDateDeadline,
ReferenceModels.FixedDatetimeDeadline,
ReferenceModels.AverageRuntimeDeadline,
)
# Deadlines that should be created when the DagRun is queued.
DAGRUN_QUEUED: DeadlineReferenceTypes = (ReferenceModels.DagRunQueuedAtDeadline,)
# All DagRun-related deadline types.
DAGRUN: DeadlineReferenceTypes = DAGRUN_CREATED + DAGRUN_QUEUED
from airflow.models.deadline import ReferenceModels
DAGRUN_LOGICAL_DATE: DeadlineReferenceType = ReferenceModels.DagRunLogicalDateDeadline()
DAGRUN_QUEUED_AT: DeadlineReferenceType = ReferenceModels.DagRunQueuedAtDeadline()
@classmethod
def AVERAGE_RUNTIME(cls, max_runs: int = 0, min_runs: int | None = None) -> DeadlineReferenceType:
if max_runs == 0:
max_runs = cls.ReferenceModels.AverageRuntimeDeadline.DEFAULT_LIMIT
if min_runs is None:
min_runs = max_runs
return cls.ReferenceModels.AverageRuntimeDeadline(max_runs, min_runs)
@classmethod
def FIXED_DATETIME(cls, datetime: datetime) -> DeadlineReferenceType:
return cls.ReferenceModels.FixedDatetimeDeadline(datetime)
# TODO: Remove this once other deadline types exist.
# This is a temporary reference type used only in tests to verify that
# dag.has_dagrun_deadline() returns false if the dag has a non-dagrun deadline type.
# It should be replaced with a real non-dagrun deadline type when one is available.
_TEMPORARY_TEST_REFERENCE = type(
"TemporaryTestDeadlineForTypeChecking",
(DeadlineReferenceType,),
{"_evaluate_with": lambda self, **kwargs: datetime.now()},
)()
@classmethod
def register_custom_reference(
cls,
reference_class: type[ReferenceModels.BaseDeadlineReference],
deadline_reference_type: DeadlineReferenceTypes | None = None,
) -> type[ReferenceModels.BaseDeadlineReference]:
"""
Register a custom deadline reference class.
:param reference_class: The custom reference class inheriting from BaseDeadlineReference
:param deadline_reference_type: A DeadlineReference.TYPES for when the deadline should be evaluated ("DAGRUN_CREATED",
"DAGRUN_QUEUED", etc.); defaults to DeadlineReference.TYPES.DAGRUN_CREATED
"""
from airflow.models.deadline import ReferenceModels
# Default to DAGRUN_CREATED if no deadline_reference_type specified
if deadline_reference_type is None:
deadline_reference_type = cls.TYPES.DAGRUN_CREATED
# Validate the reference class inherits from BaseDeadlineReference
if not issubclass(reference_class, ReferenceModels.BaseDeadlineReference):
raise ValueError(f"{reference_class.__name__} must inherit from BaseDeadlineReference")
# Register the new reference with ReferenceModels and DeadlineReference for discoverability
setattr(ReferenceModels, reference_class.__name__, reference_class)
setattr(cls, reference_class.__name__, reference_class())
logger.info("Registered DeadlineReference %s", reference_class.__name__)
# Add to appropriate deadline_reference_type classification
if deadline_reference_type is cls.TYPES.DAGRUN_CREATED:
cls.TYPES.DAGRUN_CREATED = cls.TYPES.DAGRUN_CREATED + (reference_class,)
elif deadline_reference_type is cls.TYPES.DAGRUN_QUEUED:
cls.TYPES.DAGRUN_QUEUED = cls.TYPES.DAGRUN_QUEUED + (reference_class,)
else:
raise ValueError(
f"Invalid deadline reference type {deadline_reference_type}; "
"must be a valid DeadlineReference.TYPES option."
)
# Refresh the combined DAGRUN tuple
cls.TYPES.DAGRUN = cls.TYPES.DAGRUN_CREATED + cls.TYPES.DAGRUN_QUEUED
return reference_class
def deadline_reference(
deadline_reference_type: DeadlineReferenceTypes | None = None,
) -> Callable[[type[ReferenceModels.BaseDeadlineReference]], type[ReferenceModels.BaseDeadlineReference]]:
"""
Decorate a class to register a custom deadline reference.
Usage:
@deadline_reference()
class MyCustomReference(ReferenceModels.BaseDeadlineReference):
# By default, evaluate_with will be called when a new dagrun is created.
def _evaluate_with(self, *, session: Session, **kwargs) -> datetime:
# Put your business logic here
return some_datetime
@deadline_reference(DeadlineReference.TYPES.DAGRUN_QUEUED)
class MyQueuedRef(ReferenceModels.BaseDeadlineReference):
# Optionally, you can specify when you want it calculated by providing a DeadlineReference.TYPES
def _evaluate_with(self, *, session: Session, **kwargs) -> datetime:
# Put your business logic here
return some_datetime
"""
def decorator(
reference_class: type[ReferenceModels.BaseDeadlineReference],
) -> type[ReferenceModels.BaseDeadlineReference]:
DeadlineReference.register_custom_reference(reference_class, deadline_reference_type)
return reference_class
return decorator
| {
"repo_id": "apache/airflow",
"file_path": "task-sdk/src/airflow/sdk/definitions/deadline.py",
"license": "Apache License 2.0",
"lines": 184,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:task-sdk/tests/task_sdk/definitions/test_deadline.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from datetime import datetime, timedelta
import pytest
from task_sdk.definitions.test_callback import TEST_CALLBACK_KWARGS, TEST_CALLBACK_PATH, UNIMPORTABLE_DOT_PATH
from airflow.sdk.definitions.callback import AsyncCallback, SyncCallback
from airflow.sdk.definitions.deadline import DeadlineAlert, DeadlineReference
DAG_ID = "dag_id_1"
RUN_ID = 1
DEFAULT_DATE = datetime(2025, 6, 26)
REFERENCE_TYPES = [
pytest.param(DeadlineReference.DAGRUN_LOGICAL_DATE, id="logical_date"),
pytest.param(DeadlineReference.DAGRUN_QUEUED_AT, id="queued_at"),
pytest.param(DeadlineReference.FIXED_DATETIME(DEFAULT_DATE), id="fixed_deadline"),
pytest.param(DeadlineReference.AVERAGE_RUNTIME, id="average_runtime"),
]
TEST_DEADLINE_CALLBACK = AsyncCallback(TEST_CALLBACK_PATH, kwargs=TEST_CALLBACK_KWARGS)
class TestDeadlineAlert:
@pytest.mark.parametrize(
("test_alert", "should_equal"),
[
pytest.param(
DeadlineAlert(
reference=DeadlineReference.DAGRUN_QUEUED_AT,
interval=timedelta(hours=1),
callback=TEST_DEADLINE_CALLBACK,
),
True,
id="same_alert",
),
pytest.param(
DeadlineAlert(
reference=DeadlineReference.DAGRUN_LOGICAL_DATE,
interval=timedelta(hours=1),
callback=TEST_DEADLINE_CALLBACK,
),
False,
id="different_reference",
),
pytest.param(
DeadlineAlert(
reference=DeadlineReference.DAGRUN_QUEUED_AT,
interval=timedelta(hours=2),
callback=TEST_DEADLINE_CALLBACK,
),
False,
id="different_interval",
),
pytest.param(
DeadlineAlert(
reference=DeadlineReference.DAGRUN_QUEUED_AT,
interval=timedelta(hours=1),
callback=AsyncCallback(UNIMPORTABLE_DOT_PATH, kwargs=TEST_CALLBACK_KWARGS),
),
False,
id="different_callback",
),
pytest.param(
DeadlineAlert(
reference=DeadlineReference.DAGRUN_QUEUED_AT,
interval=timedelta(hours=1),
callback=AsyncCallback(TEST_CALLBACK_PATH, kwargs={"arg2": "value2"}),
),
False,
id="different_kwargs",
),
pytest.param("not a DeadlineAlert", False, id="non_deadline_alert"),
],
)
def test_deadline_alert_equality(self, test_alert, should_equal):
base_alert = DeadlineAlert(
reference=DeadlineReference.DAGRUN_QUEUED_AT,
interval=timedelta(hours=1),
callback=TEST_DEADLINE_CALLBACK,
)
assert (base_alert == test_alert) == should_equal
def test_deadline_alert_hash(self):
std_interval = timedelta(hours=1)
std_callback = TEST_CALLBACK_PATH
std_kwargs = TEST_CALLBACK_KWARGS
alert1 = DeadlineAlert(
reference=DeadlineReference.DAGRUN_QUEUED_AT,
interval=std_interval,
callback=AsyncCallback(std_callback, kwargs=std_kwargs),
)
alert2 = DeadlineAlert(
reference=DeadlineReference.DAGRUN_QUEUED_AT,
interval=std_interval,
callback=AsyncCallback(std_callback, kwargs=std_kwargs),
)
assert hash(alert1) == hash(alert1)
assert hash(alert1) == hash(alert2)
def test_deadline_alert_in_set(self):
std_interval = timedelta(hours=1)
std_callback = TEST_CALLBACK_PATH
std_kwargs = TEST_CALLBACK_KWARGS
alert1 = DeadlineAlert(
reference=DeadlineReference.DAGRUN_QUEUED_AT,
interval=std_interval,
callback=AsyncCallback(std_callback, kwargs=std_kwargs),
)
alert2 = DeadlineAlert(
reference=DeadlineReference.DAGRUN_QUEUED_AT,
interval=std_interval,
callback=AsyncCallback(std_callback, kwargs=std_kwargs),
)
alert_set = {alert1, alert2}
assert len(alert_set) == 1
@pytest.mark.parametrize(
("callback_class"),
[
pytest.param(AsyncCallback, id="async_callback"),
pytest.param(SyncCallback, id="sync_callback"),
],
)
def test_deadline_alert_accepts_all_callbacks(self, callback_class):
alert = DeadlineAlert(
reference=DeadlineReference.DAGRUN_QUEUED_AT,
interval=timedelta(hours=1),
callback=callback_class(TEST_CALLBACK_PATH),
)
assert alert.callback is not None
assert isinstance(alert.callback, callback_class)
def test_deadline_alert_rejects_invalid_callback(self):
"""Test that DeadlineAlert rejects non-callback types."""
with pytest.raises(ValueError, match="Callbacks of type str are not currently supported"):
DeadlineAlert(
reference=DeadlineReference.DAGRUN_QUEUED_AT,
interval=timedelta(hours=1),
callback="not_a_callback", # type: ignore
)
| {
"repo_id": "apache/airflow",
"file_path": "task-sdk/tests/task_sdk/definitions/test_deadline.py",
"license": "Apache License 2.0",
"lines": 146,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-core/src/airflow/cli/commands/db_manager_command.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from airflow import settings
from airflow._shared.module_loading import import_string
from airflow.cli.commands.db_command import run_db_downgrade_command, run_db_migrate_command
from airflow.configuration import conf
from airflow.utils import cli as cli_utils
from airflow.utils.providers_configuration_loader import providers_configuration_loaded
def _get_db_manager(classpath: str):
"""Import the db manager class."""
managers = conf.getlist("database", "external_db_managers")
if classpath not in managers:
raise SystemExit(f"DB manager {classpath} not found in configuration.")
return import_string(classpath.strip())
@providers_configuration_loaded
def resetdb(args):
"""Reset the metadata database."""
db_manager = _get_db_manager(args.import_path)
if not (args.yes or input("This will drop existing tables if they exist. Proceed? (y/n)").upper() == "Y"):
raise SystemExit("Cancelled")
db_manager(settings.Session()).resetdb(skip_init=args.skip_init)
@cli_utils.action_cli(check_db=False)
@providers_configuration_loaded
def migratedb(args):
"""Migrates the metadata database."""
db_manager = _get_db_manager(args.import_path)
session = settings.Session()
upgrade_command = db_manager(session).upgradedb
run_db_migrate_command(args, upgrade_command, revision_heads_map=db_manager.revision_heads_map)
@cli_utils.action_cli(check_db=False)
@providers_configuration_loaded
def downgrade(args):
"""Downgrades the metadata database."""
db_manager = _get_db_manager(args.import_path)
session = settings.Session()
downgrade_command = db_manager(session).downgrade
run_db_downgrade_command(args, downgrade_command, revision_heads_map=db_manager.revision_heads_map)
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/cli/commands/db_manager_command.py",
"license": "Apache License 2.0",
"lines": 52,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:airflow-core/tests/unit/cli/commands/test_db_manager_command.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from unittest import mock
import pytest
from airflow.cli import cli_parser
from airflow.cli.commands import db_manager_command
from airflow.utils.db_manager import BaseDBManager
from tests_common.test_utils.config import conf_vars
pytestmark = pytest.mark.db_test
class FakeDBManager(BaseDBManager):
metadata = mock.MagicMock()
migration_dir = "migrations"
alembic_file = "alembic.ini"
version_table_name = "alembic_version_ext"
revision_heads_map = {}
# Test controls
raise_on_init = False
instances: list[FakeDBManager] = []
last_instance: FakeDBManager | None = None
def __init__(self, session):
if self.raise_on_init:
raise AssertionError("Should not instantiate manager when cancelled")
super().__init__(session)
self._resetdb_mock = mock.MagicMock(name="resetdb")
self._upgradedb_mock = mock.MagicMock(name="upgradedb")
self._downgrade_mock = mock.MagicMock(name="downgrade")
FakeDBManager.instances.append(self)
FakeDBManager.last_instance = self
def resetdb(self, skip_init=False):
return self._resetdb_mock(skip_init=skip_init)
def upgradedb(self, to_revision=None, from_revision=None, show_sql_only=False):
return self._upgradedb_mock(
to_revision=to_revision, from_revision=from_revision, show_sql_only=show_sql_only
)
def downgrade(self, to_revision, from_revision=None, show_sql_only=False):
return self._downgrade_mock(
to_revision=to_revision, from_revision=from_revision, show_sql_only=show_sql_only
)
@pytest.fixture(autouse=True)
def _reset_fake_db_manager():
FakeDBManager.revision_heads_map = {}
FakeDBManager.raise_on_init = False
FakeDBManager.instances = []
FakeDBManager.last_instance = None
return None
class TestCliDbManager:
@classmethod
def setup_class(cls):
cls.parser = cli_parser.get_parser()
@mock.patch("airflow.cli.commands.db_manager_command.settings.Session", autospec=True)
@mock.patch("airflow.cli.commands.db_manager_command._get_db_manager")
def test_cli_resetdb_yes_calls_reset(self, mock_get_db_manager, mock_session):
manager_name = "path.to.FakeDBManager"
mock_get_db_manager.return_value = FakeDBManager
args = self.parser.parse_args(["db-manager", "reset", manager_name, "--yes"])
db_manager_command.resetdb(args)
mock_get_db_manager.assert_called_once_with(manager_name)
assert len(FakeDBManager.instances) == 1
FakeDBManager.last_instance._resetdb_mock.assert_called_once_with(skip_init=False)
@mock.patch("airflow.cli.commands.db_manager_command.settings.Session", autospec=True)
@mock.patch("airflow.cli.commands.db_manager_command._get_db_manager")
def test_cli_resetdb_skip_init(self, mock_get_db_manager, mock_session):
manager_name = "path.to.FakeDBManager"
mock_get_db_manager.return_value = FakeDBManager
args = self.parser.parse_args(["db-manager", "reset", manager_name, "--yes", "--skip-init"])
db_manager_command.resetdb(args)
mock_get_db_manager.assert_called_once_with(manager_name)
assert len(FakeDBManager.instances) == 1
FakeDBManager.last_instance._resetdb_mock.assert_called_once_with(skip_init=True)
@mock.patch("airflow.cli.commands.db_manager_command.input")
@mock.patch("airflow.cli.commands.db_manager_command._get_db_manager")
def test_cli_resetdb_prompt_yes(self, mock_get_db_manager, mock_input):
mock_input.return_value = "Y"
manager_name = "path.to.FakeDBManager"
mock_get_db_manager.return_value = FakeDBManager
args = self.parser.parse_args(["db-manager", "reset", manager_name])
db_manager_command.resetdb(args)
assert len(FakeDBManager.instances) == 1
FakeDBManager.last_instance._resetdb_mock.assert_called_once_with(skip_init=False)
@mock.patch("airflow.cli.commands.db_manager_command.input")
@mock.patch("airflow.cli.commands.db_manager_command._get_db_manager")
def test_cli_resetdb_prompt_cancel(self, mock_get_db_manager, mock_input):
mock_input.return_value = "n"
manager_name = "path.to.FakeDBManager"
FakeDBManager.raise_on_init = True
mock_get_db_manager.return_value = FakeDBManager
args = self.parser.parse_args(["db-manager", "reset", manager_name])
with pytest.raises(SystemExit, match="Cancelled"):
db_manager_command.resetdb(args)
assert FakeDBManager.instances == []
@mock.patch("airflow.cli.commands.db_manager_command.settings.Session", autospec=True)
@mock.patch("airflow.cli.commands.db_manager_command._get_db_manager")
@mock.patch("airflow.cli.commands.db_manager_command.run_db_migrate_command")
def test_cli_migrate_db(self, mock_run_db_migrate_cmd, mock_get_db_manager, mock_session):
manager_name = "path.to.FakeDBManager"
FakeDBManager.revision_heads_map = {"2.10.0": "22ed7efa9da2"}
mock_get_db_manager.return_value = FakeDBManager
args = self.parser.parse_args(["db-manager", "migrate", manager_name])
db_manager_command.migratedb(args)
mock_get_db_manager.assert_called_once_with(manager_name)
assert len(FakeDBManager.instances) == 1
# Validate run_db_migrate_command was called with the instance's upgradedb and correct heads map
called_args, called_kwargs = mock_run_db_migrate_cmd.call_args
assert called_args[0] is args
# Verify the bound method refers to the instance's upgradedb implementation
assert called_args[1].__self__ is FakeDBManager.last_instance
assert called_args[1].__func__ is FakeDBManager.upgradedb
assert called_kwargs["revision_heads_map"] == {"2.10.0": "22ed7efa9da2"}
@mock.patch("airflow.cli.commands.db_manager_command.settings.Session", autospec=True)
@mock.patch("airflow.cli.commands.db_manager_command._get_db_manager")
def test_cli_migrate_db_calls_upgradedb_with_args(self, mock_get_db_manager, mock_session):
manager_name = "path.to.FakeDBManager"
mock_get_db_manager.return_value = FakeDBManager
args = self.parser.parse_args(
[
"db-manager",
"migrate",
manager_name,
"--to-revision",
"abc",
"--from-revision",
"def",
"--show-sql-only",
]
)
db_manager_command.migratedb(args)
assert FakeDBManager.last_instance is not None
FakeDBManager.last_instance._upgradedb_mock.assert_called_once_with(
to_revision="abc", from_revision="def", show_sql_only=True
)
@mock.patch("airflow.cli.commands.db_manager_command.settings.Session", autospec=True)
@mock.patch("airflow.cli.commands.db_manager_command._get_db_manager")
def test_cli_downgrade_db_calls_downgrade_with_args(self, mock_get_db_manager, mock_session):
manager_name = "path.to.FakeDBManager"
mock_get_db_manager.return_value = FakeDBManager
args = self.parser.parse_args(
[
"db-manager",
"downgrade",
manager_name,
"--to-revision",
"abc",
"--from-revision",
"def",
"--show-sql-only",
]
)
db_manager_command.downgrade(args)
assert FakeDBManager.last_instance is not None
FakeDBManager.last_instance._downgrade_mock.assert_called_once_with(
to_revision="abc", from_revision="def", show_sql_only=True
)
@mock.patch("airflow.cli.commands.db_manager_command.settings.Session", autospec=True)
@mock.patch("airflow.cli.commands.db_manager_command._get_db_manager")
@mock.patch("airflow.cli.commands.db_manager_command.run_db_downgrade_command")
def test_cli_downgrade_db(self, mock_run_db_downgrade_cmd, mock_get_db_manager, mock_session):
manager_name = "path.to.FakeDBManager"
FakeDBManager.revision_heads_map = {"2.10.0": "22ed7efa9da2"}
mock_get_db_manager.return_value = FakeDBManager
args = self.parser.parse_args(["db-manager", "downgrade", manager_name])
db_manager_command.downgrade(args)
mock_get_db_manager.assert_called_once_with(manager_name)
assert len(FakeDBManager.instances) == 1
called_args, called_kwargs = mock_run_db_downgrade_cmd.call_args
assert called_args[0] is args
# Verify the bound method refers to the instance's downgrade implementation
assert called_args[1].__self__ is FakeDBManager.last_instance
assert called_args[1].__func__ is FakeDBManager.downgrade
assert called_kwargs["revision_heads_map"] == {"2.10.0": "22ed7efa9da2"}
@conf_vars({("database", "external_db_managers"): "path.to.manager.TestDBManager"})
@mock.patch("airflow.cli.commands.db_manager_command.import_string")
def test_get_db_manager(self, mock_import_string):
manager_name = "path.to.manager.TestDBManager"
db_manager = db_manager_command._get_db_manager(manager_name)
mock_import_string.assert_called_once_with("path.to.manager.TestDBManager")
assert db_manager is not None
@conf_vars({("database", "external_db_managers"): "path.to.manager.TestDBManager"})
@mock.patch("airflow.cli.commands.db_manager_command.import_string")
def test_get_db_manager_raises(self, mock_import_string):
manager_name = "NonExistentDBManager"
with pytest.raises(SystemExit):
db_manager_command._get_db_manager(manager_name)
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/tests/unit/cli/commands/test_db_manager_command.py",
"license": "Apache License 2.0",
"lines": 197,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-core/tests/unit/api_fastapi/execution_api/versions/v2025_04_28/test_xcom.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import pytest
from airflow.models.xcom import XComModel
from airflow.providers.standard.operators.empty import EmptyOperator
pytestmark = pytest.mark.db_test
class TestXComsGetEndpoint:
@pytest.mark.parametrize(
("offset", "expected_status", "expected_json"),
[
pytest.param(
-4,
404,
{
"detail": {
"reason": "not_found",
"message": (
"XCom with key='xcom_1' offset=-4 not found "
"for task 'task' in DAG run 'runid' of 'dag'"
),
},
},
id="-4",
),
pytest.param(-3, 200, {"key": "xcom_1", "value": "f"}, id="-3"),
pytest.param(-2, 200, {"key": "xcom_1", "value": "o"}, id="-2"),
pytest.param(-1, 200, {"key": "xcom_1", "value": "b"}, id="-1"),
pytest.param(0, 200, {"key": "xcom_1", "value": "f"}, id="0"),
pytest.param(1, 200, {"key": "xcom_1", "value": "o"}, id="1"),
pytest.param(2, 200, {"key": "xcom_1", "value": "b"}, id="2"),
pytest.param(
3,
404,
{
"detail": {
"reason": "not_found",
"message": (
"XCom with key='xcom_1' offset=3 not found "
"for task 'task' in DAG run 'runid' of 'dag'"
),
},
},
id="3",
),
],
)
def test_xcom_get_with_offset(
self,
client,
dag_maker,
session,
offset,
expected_status,
expected_json,
):
xcom_values = ["f", None, "o", "b"]
class MyOperator(EmptyOperator):
def __init__(self, *, x, **kwargs):
super().__init__(**kwargs)
self.x = x
with dag_maker(dag_id="dag"):
MyOperator.partial(task_id="task").expand(x=xcom_values)
dag_run = dag_maker.create_dagrun(run_id="runid")
tis = {ti.map_index: ti for ti in dag_run.task_instances}
for map_index, db_value in enumerate(xcom_values):
if db_value is None: # We don't put None to XCom.
continue
ti = tis[map_index]
x = XComModel(
key="xcom_1",
value=db_value,
dag_run_id=ti.dag_run.id,
run_id=ti.run_id,
task_id=ti.task_id,
dag_id=ti.dag_id,
map_index=map_index,
)
session.add(x)
session.commit()
response = client.get(f"/execution/xcoms/dag/runid/task/xcom_1?offset={offset}")
assert response.status_code == expected_status
assert response.json() == expected_json
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/tests/unit/api_fastapi/execution_api/versions/v2025_04_28/test_xcom.py",
"license": "Apache License 2.0",
"lines": 97,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/alibaba/src/airflow/providers/alibaba/cloud/exceptions.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
class MaxComputeConfigurationException(Exception):
"""Raised when MaxCompute project or endpoint is not configured properly."""
| {
"repo_id": "apache/airflow",
"file_path": "providers/alibaba/src/airflow/providers/alibaba/cloud/exceptions.py",
"license": "Apache License 2.0",
"lines": 19,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/alibaba/src/airflow/providers/alibaba/cloud/hooks/base_alibaba.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import Any, NamedTuple
from airflow.providers.common.compat.sdk import BaseHook
class AccessKeyCredentials(NamedTuple):
"""
A NamedTuple to store Alibaba Cloud Access Key credentials.
:param access_key_id: The Access Key ID for Alibaba Cloud authentication.
:param access_key_secret: The Access Key Secret for Alibaba Cloud authentication.
"""
access_key_id: str
access_key_secret: str
class AlibabaBaseHook(BaseHook):
"""
A base hook for Alibaba Cloud-related hooks.
This hook provides a common interface for authenticating using Alibaba Cloud credentials.
Supports Access Key-based authentication.
:param alibaba_cloud_conn_id: The connection ID to use when fetching connection info.
"""
conn_name_attr = "alibabacloud_conn_id"
default_conn_name = "alibabacloud_default"
conn_type = "alibaba_cloud"
hook_name = "Alibaba Cloud"
def __init__(
self,
alibabacloud_conn_id: str = "alibabacloud_default",
**kwargs,
) -> None:
super().__init__(**kwargs)
self.alibaba_cloud_conn_id = alibabacloud_conn_id
self.extras: dict = self.get_connection(self.alibaba_cloud_conn_id).extra_dejson
@classmethod
def get_connection_form_widgets(cls) -> dict[str, Any]:
"""Return connection widgets to add to connection form."""
from flask_appbuilder.fieldwidgets import BS3PasswordFieldWidget
from flask_babel import lazy_gettext
from wtforms import PasswordField
return {
"access_key_id": PasswordField(lazy_gettext("Access Key ID"), widget=BS3PasswordFieldWidget()),
"access_key_secret": PasswordField(
lazy_gettext("Access Key Secret"), widget=BS3PasswordFieldWidget()
),
}
@classmethod
def get_ui_field_behaviour(cls) -> dict[str, Any]:
"""Return custom field behaviour."""
return super().get_ui_field_behaviour()
def _get_field(self, field_name: str, default: Any = None) -> Any:
"""Fetch a field from extras, and returns it."""
value = self.extras.get(field_name)
return value if value is not None else default
def get_access_key_credential(self) -> AccessKeyCredentials:
"""
Fetch Access Key Credential for authentication.
:return: AccessKeyCredentials object containing access_key_id and access_key_secret.
"""
access_key_id = self._get_field("access_key_id", None)
access_key_secret = self._get_field("access_key_secret", None)
if not access_key_id:
raise ValueError("No access_key_id is specified.")
if not access_key_secret:
raise ValueError("No access_key_secret is specified.")
return AccessKeyCredentials(access_key_id, access_key_secret)
| {
"repo_id": "apache/airflow",
"file_path": "providers/alibaba/src/airflow/providers/alibaba/cloud/hooks/base_alibaba.py",
"license": "Apache License 2.0",
"lines": 78,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/alibaba/src/airflow/providers/alibaba/cloud/hooks/maxcompute.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import functools
from collections.abc import Callable
from typing import TYPE_CHECKING, Any, TypeVar
from odps import ODPS
from airflow.providers.alibaba.cloud.exceptions import MaxComputeConfigurationException
from airflow.providers.alibaba.cloud.hooks.base_alibaba import AlibabaBaseHook
if TYPE_CHECKING:
from odps.models import Instance
RT = TypeVar("RT")
def fallback_to_default_project_endpoint(func: Callable[..., RT]) -> Callable[..., RT]:
"""
Provide fallback for MaxCompute project and endpoint to be used as a decorator.
If the project or endpoint is None it will be replaced with the project from the
connection extra definition.
:param func: function to wrap
:return: result of the function call
"""
@functools.wraps(func)
def inner_wrapper(self, **kwargs) -> RT:
required_args = ("project", "endpoint")
for arg_name in required_args:
# Use the value from kwargs if it is provided and value is not None, otherwise use the
# value from the connection extra property.
kwargs[arg_name] = getattr(self, arg_name) if kwargs.get(arg_name) is None else kwargs[arg_name]
if not kwargs[arg_name]:
raise MaxComputeConfigurationException(
f'"{arg_name}" must be passed either as '
"keyword parameter or as extra "
"in the MaxCompute connection definition. Both are not set!"
)
return func(self, **kwargs)
return inner_wrapper
class MaxComputeHook(AlibabaBaseHook):
"""
Interact with Alibaba MaxCompute (previously known as ODPS).
:param maxcompute_conn_id: The connection ID to use when fetching connection info.
"""
conn_name_attr = "maxcompute_conn_id"
default_conn_name = "maxcompute_default"
conn_type = "maxcompute"
hook_name = "MaxCompute"
def __init__(self, maxcompute_conn_id: str = "maxcompute_default", **kwargs) -> None:
self.maxcompute_conn_id = maxcompute_conn_id
super().__init__(alibabacloud_conn_id=maxcompute_conn_id, **kwargs)
@classmethod
def get_connection_form_widgets(cls) -> dict[str, Any]:
"""Return connection widgets to add to connection form."""
from flask_appbuilder.fieldwidgets import BS3TextFieldWidget
from flask_babel import lazy_gettext
from wtforms import StringField
connection_form_widgets = super().get_connection_form_widgets()
connection_form_widgets["project"] = StringField(
lazy_gettext("Project"),
widget=BS3TextFieldWidget(),
)
connection_form_widgets["endpoint"] = StringField(
lazy_gettext("Endpoint"),
widget=BS3TextFieldWidget(),
)
return connection_form_widgets
@classmethod
def get_ui_field_behaviour(cls) -> dict[str, Any]:
"""Return custom field behaviour."""
return {
"hidden_fields": ["host", "schema", "login", "password", "port", "extra"],
"relabeling": {},
}
@property
def project(self) -> str:
"""
Returns project ID.
:return: ID of the project
"""
return self._get_field("project")
@property
def endpoint(self) -> str:
"""
Returns MaxCompute Endpoint.
:return: Endpoint of the MaxCompute project
"""
return self._get_field("endpoint")
@fallback_to_default_project_endpoint
def get_client(self, *, project: str, endpoint: str) -> ODPS:
"""
Get an authenticated MaxCompute ODPS Client.
:param project_id: Project ID for the project which the client acts on behalf of.
:param location: Default location for jobs / datasets / tables.
"""
creds = self.get_access_key_credential()
return ODPS(
creds.access_key_id,
creds.access_key_secret,
project=project,
endpoint=endpoint,
)
@fallback_to_default_project_endpoint
def run_sql(
self,
*,
sql: str,
project: str | None = None,
endpoint: str | None = None,
priority: int | None = None,
running_cluster: str | None = None,
hints: dict[str, Any] | None = None,
aliases: dict[str, str] | None = None,
default_schema: str | None = None,
quota_name: str | None = None,
) -> Instance:
"""
Run a given SQL statement in MaxCompute.
The method will submit your SQL statement to MaxCompute
and return the corresponding task Instance object.
.. seealso:: https://pyodps.readthedocs.io/en/latest/base-sql.html#execute-sql
:param sql: The SQL statement to run.
:param project: The project ID to use.
:param endpoint: The endpoint to use.
:param priority: The priority of the SQL statement ranges from 0 to 9,
applicable to projects with the job priority feature enabled.
Takes precedence over the `odps.instance.priority` setting from `hints`.
Defaults to 9.
See https://www.alibabacloud.com/help/en/maxcompute/user-guide/job-priority
for details.
:param running_cluster: The cluster to run the SQL statement on.
:param hints: Hints for setting runtime parameters. See
https://pyodps.readthedocs.io/en/latest/base-sql.html#id4 and
https://www.alibabacloud.com/help/en/maxcompute/user-guide/flag-parameters
for details.
:param aliases: Aliases for the SQL statement.
:param default_schema: The default schema to use.
:param quota_name: The quota name to use.
Defaults to project default quota if not specified.
:return: The MaxCompute task instance.
"""
client = self.get_client(project=project, endpoint=endpoint)
if priority is None and hints is not None:
priority = hints.get("odps.instance.priority")
return client.run_sql(
sql=sql,
priority=priority,
running_cluster=running_cluster,
hints=hints,
aliases=aliases,
default_schema=default_schema,
quota_name=quota_name,
)
@fallback_to_default_project_endpoint
def get_instance(
self,
*,
instance_id: str,
project: str | None = None,
endpoint: str | None = None,
) -> Instance:
"""
Get a MaxCompute task instance.
.. seealso:: https://pyodps.readthedocs.io/en/latest/base-instances.html#instances
:param instance_id: The ID of the instance to get.
:param project: The project ID to use.
:param endpoint: The endpoint to use.
:return: The MaxCompute task instance.
:raises ValueError: If the instance does not exist.
"""
client = self.get_client(project=project, endpoint=endpoint)
return client.get_instance(id_=instance_id, project=project)
@fallback_to_default_project_endpoint
def stop_instance(
self,
*,
instance_id: str,
project: str | None = None,
endpoint: str | None = None,
) -> None:
"""
Stop a MaxCompute task instance.
:param instance_id: The ID of the instance to stop.
:param project: The project ID to use.
:param endpoint: The endpoint to use.
"""
client = self.get_client(project=project, endpoint=endpoint)
try:
client.stop_instance(id_=instance_id, project=project)
self.log.info("Instance %s stop requested.", instance_id)
except Exception:
self.log.exception("Failed to stop instance %s.", instance_id)
raise
| {
"repo_id": "apache/airflow",
"file_path": "providers/alibaba/src/airflow/providers/alibaba/cloud/hooks/maxcompute.py",
"license": "Apache License 2.0",
"lines": 203,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/alibaba/src/airflow/providers/alibaba/cloud/links/maxcompute.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING
from airflow.providers.common.compat.sdk import BaseOperatorLink, XCom
if TYPE_CHECKING:
from airflow.providers.common.compat.sdk import BaseOperator, TaskInstanceKey
from airflow.sdk import Context
class MaxComputeLogViewLink(BaseOperatorLink):
"""Helper class for constructing MaxCompute Log View Link."""
name = "MaxCompute Log View"
key = "maxcompute_log_view"
def get_link(
self,
operator: BaseOperator,
*,
ti_key: TaskInstanceKey,
) -> str:
url = XCom.get_value(key=self.key, ti_key=ti_key)
if not url:
return ""
return url
@staticmethod
def persist(
context: Context,
log_view_url: str,
):
"""
Persist the log view URL to XCom for later retrieval.
:param context: The context of the task instance.
:param log_view_url: The log view URL to persist.
"""
context["task_instance"].xcom_push(
key=MaxComputeLogViewLink.key,
value=log_view_url,
)
| {
"repo_id": "apache/airflow",
"file_path": "providers/alibaba/src/airflow/providers/alibaba/cloud/links/maxcompute.py",
"license": "Apache License 2.0",
"lines": 50,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/alibaba/src/airflow/providers/alibaba/cloud/operators/maxcompute.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module contains Alibaba Cloud MaxCompute operators."""
from __future__ import annotations
from collections.abc import Sequence
from typing import TYPE_CHECKING
from airflow.providers.alibaba.cloud.hooks.maxcompute import MaxComputeHook
from airflow.providers.alibaba.cloud.links.maxcompute import MaxComputeLogViewLink
from airflow.providers.common.compat.sdk import BaseOperator
if TYPE_CHECKING:
from odps.models import Instance
from airflow.sdk import Context
class MaxComputeSQLOperator(BaseOperator):
"""
Executes an SQL statement in MaxCompute.
Waits for the SQL task instance to complete and returns instance id.
:param sql: The SQL statement to run.
:param project: The project ID to use.
:param endpoint: The endpoint to use.
:param priority: The priority of the SQL statement ranges from 0 to 9,
applicable to projects with the job priority feature enabled.
Takes precedence over the `odps.instance.priority` setting from `hints`.
Defaults to 9.
See https://www.alibabacloud.com/help/en/maxcompute/user-guide/job-priority
for details.
:param running_cluster: The cluster to run the SQL statement on.
:param hints: Hints for setting runtime parameters. See
https://pyodps.readthedocs.io/en/latest/base-sql.html#id4 and
https://www.alibabacloud.com/help/en/maxcompute/user-guide/flag-parameters
for details.
:param aliases: Aliases for the SQL statement.
:param default_schema: The default schema to use.
:param quota_name: The quota name to use.
Defaults to project default quota if not specified.
:param maxcompute_conn_id: The connection ID to use. Defaults to
`maxcompute_default` if not specified.
:param cancel_on_kill: Flag which indicates whether to stop running instance
or not when task is killed. Default is True.
"""
template_fields: Sequence[str] = (
"sql",
"project",
"endpoint",
"priority",
"running_cluster",
"hints",
"aliases",
"default_schema",
"quota_name",
"maxcompute_conn_id",
)
template_ext: Sequence[str] = (".sql",)
template_fields_renderers = {"sql": "sql"}
operator_extra_links = (MaxComputeLogViewLink(),)
def __init__(
self,
*,
sql: str,
project: str | None = None,
endpoint: str | None = None,
priority: int | None = None,
running_cluster: str | None = None,
hints: dict[str, str] | None = None,
aliases: dict[str, str] | None = None,
default_schema: str | None = None,
quota_name: str | None = None,
maxcompute_conn_id: str = "maxcompute_default",
cancel_on_kill: bool = True,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.sql = sql
self.project = project
self.endpoint = endpoint
self.priority = priority
self.running_cluster = running_cluster
self.hints = hints
self.aliases = aliases
self.default_schema = default_schema
self.quota_name = quota_name
self.maxcompute_conn_id = maxcompute_conn_id
self.cancel_on_kill = cancel_on_kill
self.hook: MaxComputeHook | None = None
self.instance: Instance | None = None
def execute(self, context: Context) -> str:
self.hook = MaxComputeHook(maxcompute_conn_id=self.maxcompute_conn_id)
self.instance = self.hook.run_sql(
sql=self.sql,
project=self.project,
endpoint=self.endpoint,
priority=self.priority,
running_cluster=self.running_cluster,
hints=self.hints,
aliases=self.aliases,
default_schema=self.default_schema,
quota_name=self.quota_name,
)
MaxComputeLogViewLink.persist(context=context, log_view_url=self.instance.get_logview_address())
self.instance.wait_for_success()
return self.instance.id
def on_kill(self) -> None:
instance_id = self.instance.id if self.instance else None
if instance_id and self.hook and self.cancel_on_kill:
self.hook.stop_instance(
instance_id=instance_id,
project=self.project,
endpoint=self.endpoint,
)
else:
self.log.info("Skipping to stop instance: %s:%s.%s", self.project, self.endpoint, instance_id)
| {
"repo_id": "apache/airflow",
"file_path": "providers/alibaba/src/airflow/providers/alibaba/cloud/operators/maxcompute.py",
"license": "Apache License 2.0",
"lines": 125,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/alibaba/tests/system/alibaba/example_maxcompute_sql.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import os
from datetime import datetime
from airflow.models.dag import DAG
from airflow.providers.alibaba.cloud.operators.maxcompute import MaxComputeSQLOperator
ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
DAG_ID = "maxcompute_sql_dag"
SQL = "SELECT 1"
with DAG(
dag_id=DAG_ID,
start_date=datetime(2021, 1, 1),
schedule="@once",
tags=["example", "maxcompute"],
catchup=False,
) as dag:
run_sql = MaxComputeSQLOperator(
task_id="run_sql",
sql=SQL,
)
from tests_common.test_utils.watcher import watcher
# This test needs watcher in order to properly mark success/failure
# when "tearDown" task with trigger rule is part of the DAG
list(dag.tasks) >> watcher()
from tests_common.test_utils.system_tests import get_test_run # noqa: E402
# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest)
test_run = get_test_run(dag)
| {
"repo_id": "apache/airflow",
"file_path": "providers/alibaba/tests/system/alibaba/example_maxcompute_sql.py",
"license": "Apache License 2.0",
"lines": 42,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/alibaba/tests/unit/alibaba/cloud/hooks/test_base_alibaba.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from unittest import mock
from airflow.providers.alibaba.cloud.hooks.base_alibaba import AccessKeyCredentials, AlibabaBaseHook
BASE_ALIBABA_HOOK_MODULE = "airflow.providers.alibaba.cloud.hooks.base_alibaba.{}"
MOCK_MAXCOMPUTE_CONN_ID = "mock_id"
MOCK_ACCESS_KEY_ID = "mock_access_key_id"
MOCK_ACCESS_KEY_SECRET = "mock_access_key_secret"
class TestAlibabaBaseHook:
def setup_method(self):
with mock.patch(
BASE_ALIBABA_HOOK_MODULE.format("AlibabaBaseHook.get_connection"),
) as mock_get_connection:
mock_conn = mock.MagicMock()
mock_conn.extra_dejson = {
"access_key_id": MOCK_ACCESS_KEY_ID,
"access_key_secret": MOCK_ACCESS_KEY_SECRET,
}
mock_get_connection.return_value = mock_conn
self.hook = AlibabaBaseHook(alibabacloud_conn_id=MOCK_MAXCOMPUTE_CONN_ID)
def test_get_access_key_credential(self):
creds = AccessKeyCredentials(
access_key_id=MOCK_ACCESS_KEY_ID,
access_key_secret=MOCK_ACCESS_KEY_SECRET,
)
creds = self.hook.get_access_key_credential()
assert creds.access_key_id == MOCK_ACCESS_KEY_ID
assert creds.access_key_secret == MOCK_ACCESS_KEY_SECRET
| {
"repo_id": "apache/airflow",
"file_path": "providers/alibaba/tests/unit/alibaba/cloud/hooks/test_base_alibaba.py",
"license": "Apache License 2.0",
"lines": 44,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/alibaba/tests/unit/alibaba/cloud/hooks/test_maxcompute.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from unittest import mock
from airflow.providers.alibaba.cloud.hooks.maxcompute import MaxComputeHook
MAXCOMPUTE_HOOK_MODULE = "airflow.providers.alibaba.cloud.hooks.maxcompute.MaxComputeHook.{}"
MOCK_MAXCOMPUTE_CONN_ID = "mock_id"
MOCK_MAXCOMPUTE_PROJECT = "mock_project"
MOCK_MAXCOMPUTE_ENDPOINT = "mock_endpoint"
class TestMaxComputeHook:
def setup_method(self):
with mock.patch(
MAXCOMPUTE_HOOK_MODULE.format("get_connection"),
) as mock_get_connection:
mock_conn = mock.MagicMock()
mock_conn.extra_dejson = {
"access_key_id": "mock_access_key_id",
"access_key_secret": "mock_access_key_secret",
"project": MOCK_MAXCOMPUTE_PROJECT,
"endpoint": MOCK_MAXCOMPUTE_ENDPOINT,
}
mock_get_connection.return_value = mock_conn
self.hook = MaxComputeHook(maxcompute_conn_id=MOCK_MAXCOMPUTE_CONN_ID)
@mock.patch(MAXCOMPUTE_HOOK_MODULE.format("get_client"))
def test_run_sql(self, mock_get_client):
mock_instance = mock.MagicMock()
mock_client = mock.MagicMock()
mock_client.run_sql.return_value = mock_instance
mock_get_client.return_value = mock_client
sql = "SELECT 1"
priority = 1
running_cluster = "mock_running_cluster"
hints = {"hint_key": "hint_value"}
aliases = {"alias_key": "alias_value"}
default_schema = "mock_default_schema"
quota_name = "mock_quota_name"
instance = self.hook.run_sql(
sql=sql,
priority=priority,
running_cluster=running_cluster,
hints=hints,
aliases=aliases,
default_schema=default_schema,
quota_name=quota_name,
)
assert instance == mock_instance
assert mock_client.run_sql.asssert_called_once_with(
sql=sql,
priority=priority,
running_cluster=running_cluster,
hints=hints,
aliases=aliases,
default_schema=default_schema,
quota_name=quota_name,
)
@mock.patch(MAXCOMPUTE_HOOK_MODULE.format("get_client"))
def test_get_instance(self, mock_get_client):
mock_client = mock.MagicMock()
mock_client.exist_instance.return_value = True
mock_instance = mock.MagicMock()
mock_client.get_instance.return_value = mock_instance
mock_get_client.return_value = mock_client
instance_id = "mock_instance_id"
instance = self.hook.get_instance(
instance_id=instance_id,
project=MOCK_MAXCOMPUTE_PROJECT,
endpoint=MOCK_MAXCOMPUTE_ENDPOINT,
)
mock_client.get_instance.assert_called_once_with(
id_=instance_id,
project=MOCK_MAXCOMPUTE_PROJECT,
)
assert instance == mock_instance
@mock.patch(MAXCOMPUTE_HOOK_MODULE.format("get_client"))
def test_stop_instance_success(self, mock_get_client):
mock_client = mock.MagicMock()
mock_get_client.return_value = mock_client
instance_id = "mock_instance_id"
self.hook.stop_instance(
instance_id=instance_id,
project=MOCK_MAXCOMPUTE_PROJECT,
endpoint=MOCK_MAXCOMPUTE_ENDPOINT,
)
mock_client.stop_instance.assert_called_once()
| {
"repo_id": "apache/airflow",
"file_path": "providers/alibaba/tests/unit/alibaba/cloud/hooks/test_maxcompute.py",
"license": "Apache License 2.0",
"lines": 99,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/alibaba/tests/unit/alibaba/cloud/links/test_maxcompute.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from unittest import mock
import pytest
from airflow.providers.alibaba.cloud.links.maxcompute import MaxComputeLogViewLink
MAXCOMPUTE_LINK_MODULE = "airflow.providers.alibaba.cloud.links.maxcompute.{}"
MOCK_TASK_ID = "run_sql"
MOCK_SQL = "SELECT 1"
MOCK_INSTANCE_ID = "mock_instance_id"
class TestMaxComputeLogViewLink:
@pytest.mark.parametrize(
("xcom_value", "expected_link"),
[
pytest.param("http://mock_url.com", "http://mock_url.com", id="has-log-link"),
pytest.param(None, "", id="no-log-link"),
],
)
@mock.patch(MAXCOMPUTE_LINK_MODULE.format("XCom"))
def test_get_link(self, mock_xcom, xcom_value, expected_link):
mock_xcom.get_value.return_value = xcom_value
link = MaxComputeLogViewLink().get_link(
operator=mock.MagicMock(),
ti_key=mock.MagicMock(),
)
assert link == expected_link
def test_persist(self):
mock_task_instance = mock.MagicMock()
mock_context = {"task_instance": mock_task_instance}
mock_url = "mock_url"
MaxComputeLogViewLink.persist(
context=mock_context,
log_view_url=mock_url,
)
mock_task_instance.xcom_push.assert_called_once_with(
key=MaxComputeLogViewLink.key,
value=mock_url,
)
| {
"repo_id": "apache/airflow",
"file_path": "providers/alibaba/tests/unit/alibaba/cloud/links/test_maxcompute.py",
"license": "Apache License 2.0",
"lines": 53,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/alibaba/tests/unit/alibaba/cloud/operators/test_maxcompute.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from unittest import mock
from airflow.providers.alibaba.cloud.operators.maxcompute import MaxComputeSQLOperator
MAXCOMPUTE_OPERATOR_MODULE = "airflow.providers.alibaba.cloud.operators.maxcompute.{}"
MOCK_TASK_ID = "run_sql"
MOCK_SQL = "SELECT 1"
MOCK_INSTANCE_ID = "mock_instance_id"
class TestMaxComputeSQLOperator:
@mock.patch(MAXCOMPUTE_OPERATOR_MODULE.format("MaxComputeLogViewLink"))
@mock.patch(MAXCOMPUTE_OPERATOR_MODULE.format("MaxComputeHook"))
def test_execute(self, mock_hook, mock_log_view_link):
instance_mock = mock.MagicMock()
instance_mock.id = MOCK_INSTANCE_ID
instance_mock.get_logview_address.return_value = "http://mock_logview_address"
mock_hook.return_value.run_sql.return_value = instance_mock
op = MaxComputeSQLOperator(
task_id=MOCK_TASK_ID,
sql=MOCK_SQL,
)
instance_id = op.execute(context=mock.MagicMock())
assert instance_id == instance_mock.id
mock_hook.return_value.run_sql.assert_called_once_with(
project=op.project,
sql=op.sql,
endpoint=op.endpoint,
priority=op.priority,
running_cluster=op.running_cluster,
hints=op.hints,
aliases=op.aliases,
default_schema=op.default_schema,
quota_name=op.quota_name,
)
mock_log_view_link.persist.assert_called_once_with(
context=mock.ANY,
log_view_url=instance_mock.get_logview_address.return_value,
)
@mock.patch(MAXCOMPUTE_OPERATOR_MODULE.format("MaxComputeHook"))
def test_on_kill(self, mock_hook):
instance_mock = mock.MagicMock()
instance_mock.id = MOCK_INSTANCE_ID
mock_hook.return_value.run_sql.return_value = instance_mock
op = MaxComputeSQLOperator(
task_id=MOCK_TASK_ID,
sql=MOCK_SQL,
cancel_on_kill=False,
)
op.execute(context=mock.MagicMock())
op.on_kill()
mock_hook.return_value.cancel_job.assert_not_called()
op.cancel_on_kill = True
op.on_kill()
mock_hook.return_value.stop_instance.assert_called_once_with(
instance_id=instance_mock.id, project=op.project, endpoint=op.endpoint
)
| {
"repo_id": "apache/airflow",
"file_path": "providers/alibaba/tests/unit/alibaba/cloud/operators/test_maxcompute.py",
"license": "Apache License 2.0",
"lines": 71,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/alibaba/tests/unit/alibaba/cloud/test_exceptions.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import pytest
from airflow.providers.alibaba.cloud.hooks.maxcompute import MaxComputeConfigurationException
def test_maxcompute_configuration_exception_message():
message = "Project or endpoint missing"
with pytest.raises(MaxComputeConfigurationException) as e:
raise MaxComputeConfigurationException(message)
assert str(e.value) == message
| {
"repo_id": "apache/airflow",
"file_path": "providers/alibaba/tests/unit/alibaba/cloud/test_exceptions.py",
"license": "Apache License 2.0",
"lines": 24,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-core/src/airflow/api_fastapi/core_api/services/public/task_instances.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from collections.abc import Sequence
import structlog
from fastapi import HTTPException, Query, status
from fastapi.exceptions import RequestValidationError
from pydantic import ValidationError
from sqlalchemy import select, tuple_
from sqlalchemy.orm import joinedload
from sqlalchemy.orm.session import Session
from airflow.api_fastapi.common.dagbag import DagBagDep, get_latest_version_of_dag
from airflow.api_fastapi.common.db.common import SessionDep
from airflow.api_fastapi.core_api.datamodels.common import (
BulkActionNotOnExistence,
BulkActionResponse,
BulkBody,
BulkCreateAction,
BulkDeleteAction,
BulkUpdateAction,
)
from airflow.api_fastapi.core_api.datamodels.task_instances import BulkTaskInstanceBody, PatchTaskInstanceBody
from airflow.api_fastapi.core_api.security import GetUserDep
from airflow.api_fastapi.core_api.services.public.common import BulkService
from airflow.listeners.listener import get_listener_manager
from airflow.models.taskinstance import TaskInstance as TI
from airflow.serialization.definitions.dag import SerializedDAG
from airflow.utils.state import TaskInstanceState
log = structlog.get_logger(__name__)
def _patch_ti_validate_request(
dag_id: str,
dag_run_id: str,
task_id: str,
dag_bag: DagBagDep,
body: PatchTaskInstanceBody,
session: SessionDep,
map_index: int | None = -1,
update_mask: list[str] | None = Query(None),
) -> tuple[SerializedDAG, list[TI], dict]:
dag = get_latest_version_of_dag(dag_bag, dag_id, session)
if not dag.has_task(task_id):
raise HTTPException(status.HTTP_404_NOT_FOUND, f"Task '{task_id}' not found in DAG '{dag_id}'")
query = (
select(TI)
.where(TI.dag_id == dag_id, TI.run_id == dag_run_id, TI.task_id == task_id)
.join(TI.dag_run)
.options(joinedload(TI.rendered_task_instance_fields))
)
if map_index is not None:
query = query.where(TI.map_index == map_index)
else:
query = query.order_by(TI.map_index)
tis = session.scalars(query).all()
err_msg_404 = (
f"The Task Instance with dag_id: `{dag_id}`, run_id: `{dag_run_id}`, task_id: `{task_id}` and map_index: `{map_index}` was not found",
)
if len(tis) == 0:
raise HTTPException(status.HTTP_404_NOT_FOUND, err_msg_404)
fields_to_update = body.model_fields_set
if update_mask:
fields_to_update = fields_to_update.intersection(update_mask)
else:
try:
PatchTaskInstanceBody.model_validate(body)
except ValidationError as e:
raise RequestValidationError(errors=e.errors())
return dag, list(tis), body.model_dump(include=fields_to_update, by_alias=True)
def _patch_task_instance_state(
task_id: str,
dag_run_id: str,
dag: SerializedDAG,
task_instance_body: BulkTaskInstanceBody | PatchTaskInstanceBody,
data: dict,
session: Session,
) -> None:
map_index = getattr(task_instance_body, "map_index", None)
map_indexes = None if map_index is None else [map_index]
updated_tis = dag.set_task_instance_state(
task_id=task_id,
run_id=dag_run_id,
map_indexes=map_indexes,
state=data["new_state"],
upstream=task_instance_body.include_upstream,
downstream=task_instance_body.include_downstream,
future=task_instance_body.include_future,
past=task_instance_body.include_past,
commit=True,
session=session,
)
if not updated_tis:
raise HTTPException(
status.HTTP_409_CONFLICT,
f"Task id {task_id} is already in {data['new_state']} state",
)
for ti in updated_tis:
try:
if data["new_state"] == TaskInstanceState.SUCCESS:
get_listener_manager().hook.on_task_instance_success(previous_state=None, task_instance=ti)
elif data["new_state"] == TaskInstanceState.FAILED:
get_listener_manager().hook.on_task_instance_failed(
previous_state=None,
task_instance=ti,
error=f"TaskInstance's state was manually set to `{TaskInstanceState.FAILED}`.",
)
elif data["new_state"] == TaskInstanceState.SKIPPED:
get_listener_manager().hook.on_task_instance_skipped(previous_state=None, task_instance=ti)
except Exception:
log.exception("error calling listener")
def _patch_task_instance_note(
task_instance_body: BulkTaskInstanceBody | PatchTaskInstanceBody,
tis: list[TI],
user: GetUserDep,
update_mask: list[str] | None = Query(None),
) -> None:
for ti in tis:
if update_mask or task_instance_body.note is not None:
if ti.task_instance_note is None:
ti.note = (task_instance_body.note, user.get_id())
else:
ti.task_instance_note.content = task_instance_body.note
ti.task_instance_note.user_id = user.get_id()
class BulkTaskInstanceService(BulkService[BulkTaskInstanceBody]):
"""Service for handling bulk operations on task instances."""
def __init__(
self,
session: Session,
request: BulkBody[BulkTaskInstanceBody],
dag_id: str,
dag_run_id: str,
dag_bag: DagBagDep,
user: GetUserDep,
):
super().__init__(session, request)
self.dag_id = dag_id
self.dag_run_id = dag_run_id
self.dag_bag = dag_bag
self.user = user
def _extract_task_identifiers(
self, entity: str | BulkTaskInstanceBody
) -> tuple[str, str, str, int | None]:
"""
Extract task identifiers from an id or entity object.
:param entity: Task identifier as string or BulkTaskInstanceBody object
:return: tuple of (dag_id, dag_run_id, task_id, map_index)
"""
if isinstance(entity, str):
dag_id = self.dag_id
dag_run_id = self.dag_run_id
task_id = entity
map_index = None
else:
dag_id = entity.dag_id if entity.dag_id else self.dag_id
dag_run_id = entity.dag_run_id if entity.dag_run_id else self.dag_run_id
task_id = entity.task_id
map_index = entity.map_index
return dag_id, dag_run_id, task_id, map_index
def _categorize_entities(
self,
entities: Sequence[str | BulkTaskInstanceBody],
results: BulkActionResponse,
) -> tuple[set[tuple[str, str, str, int]], set[tuple[str, str, str]]]:
"""
Validate entities and categorize them into specific and all map index update sets.
:param entities: Sequence of entities to validate
:param results: BulkActionResponse object to track errors
:return: tuple of (specific_map_index_task_keys, all_map_index_task_keys)
"""
specific_map_index_task_keys = set()
all_map_index_task_keys = set()
for entity in entities:
dag_id, dag_run_id, task_id, map_index = self._extract_task_identifiers(entity)
# Validate that we have specific values, not wildcards
if dag_id == "~" or dag_run_id == "~":
if isinstance(entity, str):
error_msg = f"When using wildcard in path, dag_id and dag_run_id must be specified in BulkTaskInstanceBody object, not as string for task_id: {entity}"
else:
error_msg = f"When using wildcard in path, dag_id and dag_run_id must be specified in request body for task_id: {entity.task_id}"
results.errors.append(
{
"error": error_msg,
"status_code": status.HTTP_400_BAD_REQUEST,
}
)
continue
# Separate logic for "update all" vs "update specific"
if map_index is not None:
specific_map_index_task_keys.add((dag_id, dag_run_id, task_id, map_index))
else:
all_map_index_task_keys.add((dag_id, dag_run_id, task_id))
return specific_map_index_task_keys, all_map_index_task_keys
def _categorize_task_instances(
self, task_keys: set[tuple[str, str, str, int]]
) -> tuple[
dict[tuple[str, str, str, int], TI], set[tuple[str, str, str, int]], set[tuple[str, str, str, int]]
]:
"""
Categorize the given task_keys into matched and not_found based on existing task instances.
:param task_keys: set of task_keys (tuple of dag_id, dag_run_id, task_id, and map_index)
:return: tuple of (task_instances_map, matched_task_keys, not_found_task_keys)
"""
# Filter at database level using exact tuple matching instead of fetching all combinations
# and filtering in Python
task_keys_list = list(task_keys)
query = select(TI).where(tuple_(TI.dag_id, TI.run_id, TI.task_id, TI.map_index).in_(task_keys_list))
task_instances = self.session.scalars(query).all()
task_instances_map = {
(ti.dag_id, ti.run_id, ti.task_id, ti.map_index if ti.map_index is not None else -1): ti
for ti in task_instances
}
matched_task_keys = set(task_instances_map.keys())
not_found_task_keys = task_keys - matched_task_keys
return task_instances_map, matched_task_keys, not_found_task_keys
def _perform_update(
self,
entity: BulkTaskInstanceBody,
dag_id: str,
dag_run_id: str,
task_id: str,
map_index: int,
results: BulkActionResponse,
update_mask: list[str] | None = Query(None),
) -> None:
dag, tis, data = _patch_ti_validate_request(
dag_id=dag_id,
dag_run_id=dag_run_id,
task_id=task_id,
dag_bag=self.dag_bag,
body=entity,
session=self.session,
update_mask=update_mask,
)
for key, _ in data.items():
if key == "new_state":
_patch_task_instance_state(
task_id=task_id,
dag_run_id=dag_run_id,
dag=dag,
task_instance_body=entity,
session=self.session,
data=data,
)
elif key == "note":
_patch_task_instance_note(
task_instance_body=entity,
tis=tis,
user=self.user,
)
results.success.append(f"{dag_id}.{dag_run_id}.{task_id}[{map_index}]")
def handle_bulk_create(
self, action: BulkCreateAction[BulkTaskInstanceBody], results: BulkActionResponse
) -> None:
results.errors.append(
{
"error": "Task instances bulk create is not supported",
"status_code": status.HTTP_405_METHOD_NOT_ALLOWED,
}
)
def handle_bulk_update(
self, action: BulkUpdateAction[BulkTaskInstanceBody], results: BulkActionResponse
) -> None:
"""Bulk Update Task Instances."""
# Validate and categorize entities into specific and all map index update sets
update_specific_map_index_task_keys, update_all_map_index_task_keys = self._categorize_entities(
action.entities, results
)
try:
specific_entity_map = {
(entity.dag_id, entity.dag_run_id, entity.task_id, entity.map_index): entity
for entity in action.entities
if entity.map_index is not None
}
all_map_entity_map = {
(entity.dag_id, entity.dag_run_id, entity.task_id): entity
for entity in action.entities
if entity.map_index is None
}
# Handle updates for specific map_index task instances
if update_specific_map_index_task_keys:
_, matched_task_keys, not_found_task_keys = self._categorize_task_instances(
update_specific_map_index_task_keys
)
if action.action_on_non_existence == BulkActionNotOnExistence.FAIL and not_found_task_keys:
not_found_task_ids = [
{"dag_id": dag_id, "dag_run_id": run_id, "task_id": task_id, "map_index": map_index}
for dag_id, run_id, task_id, map_index in not_found_task_keys
]
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"The task instances with these identifiers: {not_found_task_ids} were not found",
)
for dag_id, dag_run_id, task_id, map_index in matched_task_keys:
entity = specific_entity_map.get((dag_id, dag_run_id, task_id, map_index))
if entity is not None:
self._perform_update(
dag_id=dag_id,
dag_run_id=dag_run_id,
task_id=task_id,
map_index=map_index,
entity=entity,
results=results,
update_mask=action.update_mask,
)
# Handle updates for all map indexes
if update_all_map_index_task_keys:
all_dag_ids = {dag_id for dag_id, _, _ in update_all_map_index_task_keys}
all_run_ids = {run_id for _, run_id, _ in update_all_map_index_task_keys}
all_task_ids = {task_id for _, _, task_id in update_all_map_index_task_keys}
batch_task_instances = self.session.scalars(
select(TI).where(
TI.dag_id.in_(all_dag_ids),
TI.run_id.in_(all_run_ids),
TI.task_id.in_(all_task_ids),
)
).all()
# Group task instances by (dag_id, run_id, task_id)
task_instances_by_key: dict[tuple[str, str, str], list[TI]] = {}
for ti in batch_task_instances:
key = (ti.dag_id, ti.run_id, ti.task_id)
task_instances_by_key.setdefault(key, []).append(ti)
for dag_id, run_id, task_id in update_all_map_index_task_keys:
all_task_instances = task_instances_by_key.get((dag_id, run_id, task_id), [])
if (
not all_task_instances
and action.action_on_non_existence == BulkActionNotOnExistence.FAIL
):
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"No task instances found for dag_id: {dag_id}, run_id: {run_id}, task_id: {task_id}",
)
entity = all_map_entity_map.get((dag_id, run_id, task_id))
if entity is not None:
for ti in all_task_instances:
self._perform_update(
dag_id=dag_id,
dag_run_id=run_id,
task_id=task_id,
map_index=ti.map_index if ti.map_index is not None else -1,
entity=entity,
results=results,
update_mask=action.update_mask,
)
except ValidationError as e:
results.errors.append({"error": f"{e.errors()}"})
except HTTPException as e:
results.errors.append({"error": f"{e.detail}", "status_code": e.status_code})
def handle_bulk_delete(
self, action: BulkDeleteAction[BulkTaskInstanceBody], results: BulkActionResponse
) -> None:
"""Bulk delete task instances."""
# Validate and categorize entities into specific and all map index delete sets
delete_specific_map_index_task_keys, delete_all_map_index_task_keys = self._categorize_entities(
action.entities, results
)
try:
# Handle deletion of specific (dag_id, dag_run_id, task_id, map_index) tuples
if delete_specific_map_index_task_keys:
_, matched_task_keys, not_found_task_keys = self._categorize_task_instances(
delete_specific_map_index_task_keys
)
not_found_task_ids = [
{"dag_id": dag_id, "dag_run_id": run_id, "task_id": task_id, "map_index": map_index}
for dag_id, run_id, task_id, map_index in not_found_task_keys
]
if action.action_on_non_existence == BulkActionNotOnExistence.FAIL and not_found_task_keys:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"The task instances with these identifiers: {not_found_task_ids} were not found",
)
for dag_id, run_id, task_id, map_index in matched_task_keys:
ti = (
self.session.execute(
select(TI).where(
TI.dag_id == dag_id,
TI.run_id == run_id,
TI.task_id == task_id,
TI.map_index == map_index,
)
)
.scalars()
.one_or_none()
)
if ti:
self.session.delete(ti)
results.success.append(f"{dag_id}.{run_id}.{task_id}[{map_index}]")
# Handle deletion of all map indexes for certain (dag_id, dag_run_id, task_id) tuples
if delete_all_map_index_task_keys:
all_dag_ids = {dag_id for dag_id, _, _ in delete_all_map_index_task_keys}
all_run_ids = {run_id for _, run_id, _ in delete_all_map_index_task_keys}
all_task_ids = {task_id for _, _, task_id in delete_all_map_index_task_keys}
batch_task_instances = self.session.scalars(
select(TI).where(
TI.dag_id.in_(all_dag_ids),
TI.run_id.in_(all_run_ids),
TI.task_id.in_(all_task_ids),
)
).all()
# Group task instances by (dag_id, run_id, task_id) for efficient lookup
task_instances_by_key: dict[tuple[str, str, str], list[TI]] = {}
for ti in batch_task_instances:
key = (ti.dag_id, ti.run_id, ti.task_id)
task_instances_by_key.setdefault(key, []).append(ti)
for dag_id, run_id, task_id in delete_all_map_index_task_keys:
all_task_instances = task_instances_by_key.get((dag_id, run_id, task_id), [])
if (
not all_task_instances
and action.action_on_non_existence == BulkActionNotOnExistence.FAIL
):
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"No task instances found for dag_id: {dag_id}, run_id: {run_id}, task_id: {task_id}",
)
for ti in all_task_instances:
self.session.delete(ti)
results.success.append(f"{dag_id}.{run_id}.{task_id}[{ti.map_index}]")
except HTTPException as e:
results.errors.append({"error": f"{e.detail}", "status_code": e.status_code})
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/api_fastapi/core_api/services/public/task_instances.py",
"license": "Apache License 2.0",
"lines": 430,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/google/src/airflow/providers/google/cloud/transfers/http_to_gcs.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module contains operator to move data from HTTP endpoint to GCS."""
from __future__ import annotations
from functools import cached_property
from typing import TYPE_CHECKING, Any
from airflow.providers.google.cloud.hooks.gcs import GCSHook
from airflow.providers.google.version_compat import BaseOperator
from airflow.providers.http.hooks.http import HttpHook
if TYPE_CHECKING:
from collections.abc import Sequence
from requests.auth import AuthBase
from airflow.providers.common.compat.sdk import Context
class HttpToGCSOperator(BaseOperator):
"""
Calls an endpoint on an HTTP system to execute an action and store the result in GCS.
:param http_conn_id: The :ref:`http connection<howto/connection:http>` to run
the operator against
:param endpoint: The relative part of the full url. (templated)
:param method: The HTTP method to use, default = "POST"
:param data: The data to pass. POST-data in POST/PUT and params
in the URL for a GET request. (templated)
:param headers: The HTTP headers to be added to the GET request
:param response_check: A check against the 'requests' response object.
The callable takes the response object as the first positional argument
and optionally any number of keyword arguments available in the context dictionary.
It should return True for 'pass' and False otherwise.
:param response_filter: A function allowing you to manipulate the response
text. e.g response_filter=lambda response: json.loads(response.text).
The callable takes the response object as the first positional argument
and optionally any number of keyword arguments available in the context dictionary.
:param extra_options: Extra options for the 'requests' library, see the
'requests' documentation (options to modify timeout, ssl, etc.)
:param log_response: Log the response (default: False)
:param auth_type: The auth type for the service
:param tcp_keep_alive: Enable TCP Keep Alive for the connection.
:param tcp_keep_alive_idle: The TCP Keep Alive Idle parameter (corresponds to ``socket.TCP_KEEPIDLE``).
:param tcp_keep_alive_count: The TCP Keep Alive count parameter (corresponds to ``socket.TCP_KEEPCNT``)
:param tcp_keep_alive_interval: The TCP Keep Alive interval parameter (corresponds to
``socket.TCP_KEEPINTVL``)
:param gcp_conn_id: The connection ID to use when fetching connection info.
:param impersonation_chain: Optional service account to impersonate using short-term credentials,
or chained list of accounts required to get the access_token of the last account in the list,
which will be impersonated in the request. If set as a string,
the account must grant the originating account the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant Service Account Token Creator IAM role to the directly preceding identity,
with first account from the list granting this role to the originating account.
:param bucket_name: The bucket to upload to.
:param object_name: The object name to set when uploading the file.
:param mime_type: The file mime type set when uploading the file.
:param gzip: Option to compress local file or file data for upload
:param encoding: bytes encoding for file data if provided as string
:param chunk_size: Blob chunk size.
:param timeout: Request timeout in seconds.
:param num_max_attempts: Number of attempts to try to upload the file.
:param metadata: The metadata to be uploaded with the file.
:param cache_contro: Cache-Control metadata field.
:param user_project: The identifier of the Google Cloud project to bill for the request. Required for Requester Pays buckets.
"""
template_fields: Sequence[str] = (
"http_conn_id",
"endpoint",
"data",
"headers",
"gcp_conn_id",
"bucket_name",
"object_name",
)
template_fields_renderers = {"headers": "json", "data": "py"}
template_ext: Sequence[str] = ()
ui_color = "#f4a460"
def __init__(
self,
*,
endpoint: str | None = None,
method: str = "GET",
data: Any = None,
headers: dict[str, str] | None = None,
extra_options: dict[str, Any] | None = None,
http_conn_id: str = "http_default",
log_response: bool = False,
auth_type: type[AuthBase] | None = None,
tcp_keep_alive: bool = True,
tcp_keep_alive_idle: int = 120,
tcp_keep_alive_count: int = 20,
tcp_keep_alive_interval: int = 30,
gcp_conn_id: str = "google_cloud_default",
impersonation_chain: str | Sequence[str] | None = None,
bucket_name: str,
object_name: str,
mime_type: str | None = None,
gzip: bool = False,
encoding: str | None = None,
chunk_size: int | None = None,
timeout: int | None = None,
num_max_attempts: int = 3,
metadata: dict | None = None,
cache_control: str | None = None,
user_project: str | None = None,
**kwargs,
):
super().__init__(**kwargs)
self.http_conn_id = http_conn_id
self.method = method
self.endpoint = endpoint
self.headers = headers or {}
self.data = data or {}
self.extra_options = extra_options or {}
self.log_response = log_response
self.auth_type = auth_type
self.tcp_keep_alive = tcp_keep_alive
self.tcp_keep_alive_idle = tcp_keep_alive_idle
self.tcp_keep_alive_count = tcp_keep_alive_count
self.tcp_keep_alive_interval = tcp_keep_alive_interval
self.gcp_conn_id = gcp_conn_id
self.impersonation_chain = impersonation_chain
self.bucket_name = bucket_name
self.object_name = object_name
self.mime_type = mime_type
self.gzip = gzip
self.encoding = encoding
self.chunk_size = chunk_size
self.timeout = timeout
self.num_max_attempts = num_max_attempts
self.metadata = metadata
self.cache_control = cache_control
self.user_project = user_project
@cached_property
def http_hook(self) -> HttpHook:
"""Create and return an HttpHook."""
return HttpHook(
self.method,
http_conn_id=self.http_conn_id,
auth_type=self.auth_type,
tcp_keep_alive=self.tcp_keep_alive,
tcp_keep_alive_idle=self.tcp_keep_alive_idle,
tcp_keep_alive_count=self.tcp_keep_alive_count,
tcp_keep_alive_interval=self.tcp_keep_alive_interval,
)
@cached_property
def gcs_hook(self) -> GCSHook:
"""Create and return an GCSHook."""
return GCSHook(gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain)
def execute(self, context: Context) -> list[str]:
"""Return List of destination URIs (gs://bucket_name/object_name) for uploaded file."""
self.log.info("Calling HTTP method")
response = self.http_hook.run(
endpoint=self.endpoint, data=self.data, headers=self.headers, extra_options=self.extra_options
)
self.log.info("Uploading to GCS")
self.gcs_hook.upload(
data=response.content,
bucket_name=self.bucket_name,
object_name=self.object_name,
mime_type=self.mime_type,
gzip=self.gzip,
encoding=self.encoding or response.encoding,
chunk_size=self.chunk_size,
timeout=self.timeout,
num_max_attempts=self.num_max_attempts,
metadata=self.metadata,
cache_control=self.cache_control,
user_project=self.user_project,
)
return [f"gs://{self.bucket_name}/{self.object_name}"]
| {
"repo_id": "apache/airflow",
"file_path": "providers/google/src/airflow/providers/google/cloud/transfers/http_to_gcs.py",
"license": "Apache License 2.0",
"lines": 180,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/google/tests/system/google/cloud/gcs/example_http_to_gcs.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Example Airflow DAG for HTTP to Google Cloud Storage transfer operators.
"""
from __future__ import annotations
import os
from datetime import datetime
from typing import Any
from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS
if AIRFLOW_V_3_0_PLUS:
from airflow.sdk import task
else:
# Airflow 2 path
from airflow.decorators import task # type: ignore[attr-defined,no-redef]
from airflow.models.dag import DAG
from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator
from airflow.providers.google.cloud.transfers.http_to_gcs import HttpToGCSOperator
try:
from airflow.sdk import TriggerRule
except ImportError:
# Compatibility for Airflow < 3.1
from airflow.utils.trigger_rule import TriggerRule # type: ignore[no-redef,attr-defined]
from system.google.gcp_api_client_helpers import create_airflow_connection, delete_airflow_connection
ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "default")
DAG_ID = "example_http_to_gcs"
BUCKET_NAME = f"bucket-{DAG_ID}-{ENV_ID}"
IS_COMPOSER = bool(os.environ.get("COMPOSER_ENVIRONMENT", ""))
with DAG(
DAG_ID,
schedule="@once",
start_date=datetime(2025, 1, 1),
catchup=False,
tags=["example, http_to_gcs"],
) as dag:
conn_id_name = f"{ENV_ID}-http-conn-id"
create_bucket = GCSCreateBucketOperator(task_id="create_bucket", bucket_name=BUCKET_NAME)
@task(task_id="create_connection")
def create_connection(conn_id_name: str):
connection: dict[str, Any] = {"conn_type": "http", "host": "http://airflow.apache.org"}
create_airflow_connection(
connection_id=conn_id_name,
connection_conf=connection,
is_composer=IS_COMPOSER,
)
set_up_connection = create_connection(conn_id_name)
# [START howto_transfer_http_to_gcs]
http_to_gcs_task = HttpToGCSOperator(
task_id="http_to_gcs_task",
http_conn_id=conn_id_name,
endpoint="/community",
bucket_name=BUCKET_NAME,
object_name="endpoint_http_content_file",
)
# [END howto_transfer_http_to_gcs]
delete_bucket = GCSDeleteBucketOperator(
task_id="delete_bucket", bucket_name=BUCKET_NAME, trigger_rule=TriggerRule.ALL_DONE
)
@task(task_id="delete_connection", trigger_rule=TriggerRule.ALL_DONE)
def delete_connection(connection_id: str) -> None:
delete_airflow_connection(connection_id=connection_id, is_composer=IS_COMPOSER)
delete_connection_task = delete_connection(connection_id=conn_id_name)
(
# TEST SETUP
[create_bucket, set_up_connection]
# TEST BODY
>> http_to_gcs_task
# TEST TEARDOWN
>> [delete_bucket, delete_connection_task]
)
from tests_common.test_utils.watcher import watcher
# This test needs watcher in order to properly mark success/failure
# when "tearDown" task with trigger rule is part of the DAG
list(dag.tasks) >> watcher()
from tests_common.test_utils.system_tests import get_test_run # noqa: E402
# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest)
test_run = get_test_run(dag)
| {
"repo_id": "apache/airflow",
"file_path": "providers/google/tests/system/google/cloud/gcs/example_http_to_gcs.py",
"license": "Apache License 2.0",
"lines": 92,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/google/tests/unit/google/cloud/transfers/test_http_to_gcs.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from unittest import mock
from airflow.providers.google.cloud.transfers.http_to_gcs import HttpToGCSOperator
TASK_ID = "test-http-to-gcs-operator"
GCP_CONN_ID = "GCP_CONN_ID"
HTTP_CONN_ID = "HTTP_CONN_ID"
IMPERSONATION_CHAIN = ["ACCOUNT_1", "ACCOUNT_2", "ACCOUNT_3"]
TEST_BUCKET = "test-bucket"
DESTINATION_PATH_FILE = "destination_dir/copy.txt"
ENDPOINT = "/"
HEADERS = {"header_key": "header_value"}
DATA = {"some": "data"}
EXTRA_OPTIONS = {"check_response": False}
DEFAULT_HTTP_METHOD = "GET"
NUM_MAX_ATTEMPTS = 3
TCP_KEEP_ALIVE_IDLE = 120
TCP_KEEP_ALIVE_COUNT = 20
TCP_KEEP_ALIVE_INTERVAL = 30
class TestHttpToGCSOperator:
def test_init(self):
operator = HttpToGCSOperator(
task_id="http_to_gcs_operator",
http_conn_id=HTTP_CONN_ID,
endpoint=ENDPOINT,
object_name=DESTINATION_PATH_FILE,
bucket_name=TEST_BUCKET,
)
assert operator.endpoint == ENDPOINT
assert operator.object_name == DESTINATION_PATH_FILE
assert operator.bucket_name == TEST_BUCKET
assert operator.http_conn_id == HTTP_CONN_ID
@mock.patch("airflow.providers.google.cloud.transfers.http_to_gcs.GCSHook")
@mock.patch("airflow.providers.google.cloud.transfers.http_to_gcs.HttpHook")
def test_execute_copy_single_file(self, http_hook, gcs_hook):
task = HttpToGCSOperator(
task_id="http_to_gcs_operator",
http_conn_id=HTTP_CONN_ID,
endpoint=ENDPOINT,
headers=HEADERS,
data=DATA,
extra_options=EXTRA_OPTIONS,
object_name=DESTINATION_PATH_FILE,
bucket_name=TEST_BUCKET,
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
)
result = task.execute(None)
# GCS
gcs_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID, impersonation_chain=IMPERSONATION_CHAIN)
task.gcs_hook.upload.assert_called_once_with(
bucket_name=TEST_BUCKET,
object_name=DESTINATION_PATH_FILE,
data=task.http_hook.run.return_value.content,
mime_type=None,
gzip=False,
encoding=task.http_hook.run.return_value.encoding,
chunk_size=None,
timeout=None,
num_max_attempts=NUM_MAX_ATTEMPTS,
metadata=None,
cache_control=None,
user_project=None,
)
# HTTP
http_hook.assert_called_once_with(
DEFAULT_HTTP_METHOD,
http_conn_id=HTTP_CONN_ID,
auth_type=None,
tcp_keep_alive=True,
tcp_keep_alive_idle=TCP_KEEP_ALIVE_IDLE,
tcp_keep_alive_count=TCP_KEEP_ALIVE_COUNT,
tcp_keep_alive_interval=TCP_KEEP_ALIVE_INTERVAL,
)
task.http_hook.run.assert_called_once_with(
endpoint=ENDPOINT, headers=HEADERS, data=DATA, extra_options=EXTRA_OPTIONS
)
# Return value: list of destination GCS URIs (per issue #11323 / PR #61306)
expected_uri = f"gs://{TEST_BUCKET}/{DESTINATION_PATH_FILE}"
assert result == [expected_uri]
@mock.patch("airflow.providers.google.cloud.transfers.http_to_gcs.GCSHook")
@mock.patch("airflow.providers.google.cloud.transfers.http_to_gcs.HttpHook")
def test_execute_returns_destination_uris(self, http_hook, gcs_hook):
"""Test that execute() returns a list of destination GCS URIs (gs://bucket/object)."""
task = HttpToGCSOperator(
task_id="http_to_gcs_operator",
http_conn_id=HTTP_CONN_ID,
endpoint=ENDPOINT,
headers=HEADERS,
data=DATA,
extra_options=EXTRA_OPTIONS,
object_name=DESTINATION_PATH_FILE,
bucket_name=TEST_BUCKET,
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
)
result = task.execute(None)
expected_uris = f"gs://{TEST_BUCKET}/{DESTINATION_PATH_FILE}"
assert result == [expected_uris]
| {
"repo_id": "apache/airflow",
"file_path": "providers/google/tests/unit/google/cloud/transfers/test_http_to_gcs.py",
"license": "Apache License 2.0",
"lines": 115,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-ctl/src/airflowctl/ctl/commands/variable_command.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import json
import os
import sys
import rich
from airflowctl.api.client import NEW_API_CLIENT, ClientKind, provide_api_client
from airflowctl.api.datamodels.generated import (
BulkActionOnExistence,
BulkBodyVariableBody,
BulkCreateActionVariableBody,
VariableBody,
)
@provide_api_client(kind=ClientKind.CLI)
def import_(args, api_client=NEW_API_CLIENT) -> list[str]:
"""Import variables from a given file."""
success_message = "[green]Import successful! success: {success}[/green]"
errors_message = "[red]Import failed! errors: {errors}[/red]"
if not os.path.exists(args.file):
rich.print(f"[red]Missing variable file: {args.file}")
sys.exit(1)
with open(args.file) as var_file:
try:
var_json = json.load(var_file)
except json.JSONDecodeError:
rich.print(f"[red]Invalid variable file: {args.file}")
sys.exit(1)
action_on_existence = BulkActionOnExistence(args.action_on_existing_key)
vars_to_update = []
for k, v in var_json.items():
value, description = v, None
if isinstance(v, dict) and v.get("value"):
value, description = v["value"], v.get("description")
vars_to_update.append(
VariableBody(
key=k,
value=value,
description=description,
)
)
bulk_body = BulkBodyVariableBody(
actions=[
BulkCreateActionVariableBody(
action="create",
entities=vars_to_update,
action_on_existence=action_on_existence,
)
]
)
result = api_client.variables.bulk(variables=bulk_body)
if result.create.errors:
rich.print(errors_message.format(errors=result.create.errors))
sys.exit(1)
rich.print(success_message.format(success=result.create.success))
return result.create.success
| {
"repo_id": "apache/airflow",
"file_path": "airflow-ctl/src/airflowctl/ctl/commands/variable_command.py",
"license": "Apache License 2.0",
"lines": 70,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:airflow-ctl/tests/airflow_ctl/ctl/commands/test_variable_command.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import json
import pytest
from airflowctl.api.client import ClientKind
from airflowctl.api.datamodels.generated import (
BulkActionResponse,
BulkResponse,
VariableCollectionResponse,
VariableResponse,
)
from airflowctl.ctl import cli_parser
from airflowctl.ctl.commands import variable_command
class TestCliVariableCommands:
key = "key"
value = "value"
description = "description"
export_file_name = "exported_json.json"
parser = cli_parser.get_parser()
variable_collection_response = VariableCollectionResponse(
variables=[
VariableResponse(
key=key,
value=value,
description=description,
is_encrypted=False,
),
],
total_entries=1,
)
bulk_response_success = BulkResponse(
create=BulkActionResponse(success=[key], errors=[]), update=None, delete=None
)
bulk_response_error = BulkResponse(
create=BulkActionResponse(
success=[],
errors=[
{"error": f"The variables with these keys: {{'{key}'}} already exist.", "status_code": 409}
],
),
update=None,
delete=None,
)
def test_import_success(self, api_client_maker, tmp_path, monkeypatch):
api_client = api_client_maker(
path="/api/v2/variables",
response_json=self.bulk_response_success.model_dump(),
expected_http_status_code=200,
kind=ClientKind.CLI,
)
monkeypatch.chdir(tmp_path)
expected_json_path = tmp_path / self.export_file_name
variable_file = {
self.key: self.value,
}
expected_json_path.write_text(json.dumps(variable_file))
response = variable_command.import_(
self.parser.parse_args(["variables", "import", expected_json_path.as_posix()]),
api_client=api_client,
)
assert response == [self.key]
def test_import_error(self, api_client_maker, tmp_path, monkeypatch):
api_client = api_client_maker(
path="/api/v2/variables",
response_json=self.bulk_response_error.model_dump(),
expected_http_status_code=200,
kind=ClientKind.CLI,
)
monkeypatch.chdir(tmp_path)
expected_json_path = tmp_path / self.export_file_name
variable_file = {
self.key: self.value,
}
expected_json_path.write_text(json.dumps(variable_file))
with pytest.raises(SystemExit):
variable_command.import_(
self.parser.parse_args(["variables", "import", expected_json_path.as_posix()]),
api_client=api_client,
)
| {
"repo_id": "apache/airflow",
"file_path": "airflow-ctl/tests/airflow_ctl/ctl/commands/test_variable_command.py",
"license": "Apache License 2.0",
"lines": 94,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/keycloak/src/airflow/providers/keycloak/auth_manager/keycloak_auth_manager.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import base64
import json
import logging
import time
import warnings
from base64 import urlsafe_b64decode
from typing import TYPE_CHECKING, Any
from urllib.parse import urljoin
import requests
from fastapi import FastAPI
from keycloak import KeycloakOpenID
from keycloak.exceptions import KeycloakPostError
from requests.adapters import HTTPAdapter
from urllib3.util import Retry
from airflow.api_fastapi.app import AUTH_MANAGER_FASTAPI_APP_PREFIX
from airflow.api_fastapi.auth.managers.base_auth_manager import BaseAuthManager
from airflow.exceptions import AirflowProviderDeprecationWarning
try:
from airflow.api_fastapi.auth.managers.base_auth_manager import ExtendedResourceMethod
except ImportError:
from airflow.api_fastapi.auth.managers.base_auth_manager import ResourceMethod as ExtendedResourceMethod
from airflow.api_fastapi.common.types import MenuItem
from airflow.cli.cli_config import CLICommand
try:
from airflow.providers.common.compat.sdk import AirflowException, conf
except ModuleNotFoundError:
from airflow.configuration import conf
from airflow.exceptions import AirflowException
from airflow.providers.keycloak.auth_manager.constants import (
CONF_CLIENT_ID_KEY,
CONF_CLIENT_SECRET_KEY,
CONF_REALM_KEY,
CONF_REQUESTS_POOL_SIZE_KEY,
CONF_REQUESTS_RETRIES_KEY,
CONF_SECTION_NAME,
CONF_SERVER_URL_KEY,
)
from airflow.providers.keycloak.auth_manager.resources import KeycloakResource
from airflow.providers.keycloak.auth_manager.user import KeycloakAuthManagerUser
from airflow.utils.helpers import prune_dict
if TYPE_CHECKING:
from airflow.api_fastapi.auth.managers.base_auth_manager import ResourceMethod
from airflow.api_fastapi.auth.managers.models.resource_details import (
AccessView,
AssetAliasDetails,
AssetDetails,
BackfillDetails,
ConfigurationDetails,
ConnectionDetails,
DagAccessEntity,
DagDetails,
PoolDetails,
TeamDetails,
VariableDetails,
)
from airflow.cli.cli_config import CLICommand
log = logging.getLogger(__name__)
RESOURCE_ID_ATTRIBUTE_NAME = "resource_id"
TEAM_SCOPED_RESOURCES = frozenset(
{
KeycloakResource.CONNECTION,
KeycloakResource.DAG,
KeycloakResource.POOL,
KeycloakResource.TEAM,
KeycloakResource.VARIABLE,
}
)
class KeycloakAuthManager(BaseAuthManager[KeycloakAuthManagerUser]):
"""
Keycloak auth manager.
Leverages Keycloak to perform authentication and authorization in Airflow.
"""
def __init__(self):
super().__init__()
self._http_session = None
@property
def http_session(self) -> requests.Session:
"""Lazy-initialize and return the requests session with connection pooling."""
if self._http_session is not None:
return self._http_session
self._http_session = requests.Session()
pool_size = conf.getint(CONF_SECTION_NAME, CONF_REQUESTS_POOL_SIZE_KEY, fallback=10)
retry_total = conf.getint(CONF_SECTION_NAME, CONF_REQUESTS_RETRIES_KEY, fallback=3)
retry_strategy = Retry(
total=retry_total,
backoff_factor=0.1,
status_forcelist=[500, 502, 503, 504],
allowed_methods=["HEAD", "GET", "OPTIONS", "POST"],
)
adapter = HTTPAdapter(pool_connections=pool_size, pool_maxsize=pool_size, max_retries=retry_strategy)
self._http_session.mount("https://", adapter)
self._http_session.mount("http://", adapter)
return self._http_session
def deserialize_user(self, token: dict[str, Any]) -> KeycloakAuthManagerUser:
return KeycloakAuthManagerUser(
user_id=token.pop("user_id"),
name=token.pop("name"),
access_token=token.pop("access_token"),
refresh_token=token.pop("refresh_token"),
)
def serialize_user(self, user: KeycloakAuthManagerUser) -> dict[str, Any]:
return {
"user_id": user.get_id(),
"name": user.get_name(),
"access_token": user.access_token,
"refresh_token": user.refresh_token,
}
def get_url_login(self, **kwargs) -> str:
base_url = conf.get("api", "base_url", fallback="/")
return urljoin(base_url, f"{AUTH_MANAGER_FASTAPI_APP_PREFIX}/login")
def get_url_logout(self) -> str | None:
base_url = conf.get("api", "base_url", fallback="/")
return urljoin(base_url, f"{AUTH_MANAGER_FASTAPI_APP_PREFIX}/logout")
def refresh_user(self, *, user: KeycloakAuthManagerUser) -> KeycloakAuthManagerUser | None:
# According to RFC6749 section 4.4.3, a refresh token should not be included when using
# the Service accounts/client_credentials flow.
# We check whether the user has a refresh token; if not, we assume it's a service account
# and return None.
if not user.refresh_token:
return None
if self._token_expired(user.access_token):
tokens = self.refresh_tokens(user=user)
if tokens:
user.refresh_token = tokens["refresh_token"]
user.access_token = tokens["access_token"]
return user
return None
def refresh_tokens(self, *, user: KeycloakAuthManagerUser) -> dict[str, str]:
if not user.refresh_token:
# It is a service account. It used the client credentials flow and no refresh token is issued.
return {}
try:
log.debug("Refreshing the token")
client = self.get_keycloak_client()
return client.refresh_token(user.refresh_token)
except KeycloakPostError as exc:
try:
from airflow.api_fastapi.auth.managers.exceptions import (
AuthManagerRefreshTokenExpiredException,
)
except ImportError:
return {}
else:
raise AuthManagerRefreshTokenExpiredException(exc)
def is_authorized_configuration(
self,
*,
method: ResourceMethod,
user: KeycloakAuthManagerUser,
details: ConfigurationDetails | None = None,
) -> bool:
config_section = details.section if details else None
return self._is_authorized(
method=method, resource_type=KeycloakResource.CONFIGURATION, user=user, resource_id=config_section
)
def is_authorized_connection(
self,
*,
method: ResourceMethod,
user: KeycloakAuthManagerUser,
details: ConnectionDetails | None = None,
) -> bool:
connection_id = details.conn_id if details else None
team_name = self._get_team_name(details)
return self._is_authorized(
method=method,
resource_type=KeycloakResource.CONNECTION,
user=user,
resource_id=connection_id,
team_name=team_name,
)
def is_authorized_dag(
self,
*,
method: ResourceMethod,
user: KeycloakAuthManagerUser,
access_entity: DagAccessEntity | None = None,
details: DagDetails | None = None,
) -> bool:
dag_id = details.id if details else None
team_name = self._get_team_name(details)
access_entity_str = access_entity.value if access_entity else None
return self._is_authorized(
method=method,
resource_type=KeycloakResource.DAG,
user=user,
resource_id=dag_id,
team_name=team_name,
attributes={"dag_entity": access_entity_str},
)
def is_authorized_backfill(
self, *, method: ResourceMethod, user: KeycloakAuthManagerUser, details: BackfillDetails | None = None
) -> bool:
# Method can be removed once the min Airflow version is >= 3.2.0.
warnings.warn(
"Use ``is_authorized_dag`` on ``DagAccessEntity.RUN`` instead for a dag level access control.",
AirflowProviderDeprecationWarning,
stacklevel=2,
)
backfill_id = str(details.id) if details else None
return self._is_authorized(
method=method, resource_type=KeycloakResource.BACKFILL, user=user, resource_id=backfill_id
)
def is_authorized_asset(
self, *, method: ResourceMethod, user: KeycloakAuthManagerUser, details: AssetDetails | None = None
) -> bool:
asset_id = details.id if details else None
return self._is_authorized(
method=method, resource_type=KeycloakResource.ASSET, user=user, resource_id=asset_id
)
def is_authorized_asset_alias(
self,
*,
method: ResourceMethod,
user: KeycloakAuthManagerUser,
details: AssetAliasDetails | None = None,
) -> bool:
asset_alias_id = details.id if details else None
return self._is_authorized(
method=method,
resource_type=KeycloakResource.ASSET_ALIAS,
user=user,
resource_id=asset_alias_id,
)
def is_authorized_variable(
self, *, method: ResourceMethod, user: KeycloakAuthManagerUser, details: VariableDetails | None = None
) -> bool:
variable_key = details.key if details else None
team_name = self._get_team_name(details)
return self._is_authorized(
method=method,
resource_type=KeycloakResource.VARIABLE,
user=user,
resource_id=variable_key,
team_name=team_name,
)
def is_authorized_pool(
self, *, method: ResourceMethod, user: KeycloakAuthManagerUser, details: PoolDetails | None = None
) -> bool:
pool_name = details.name if details else None
team_name = self._get_team_name(details)
return self._is_authorized(
method=method,
resource_type=KeycloakResource.POOL,
user=user,
resource_id=pool_name,
team_name=team_name,
)
def is_authorized_team(
self, *, method: ResourceMethod, user: KeycloakAuthManagerUser, details: TeamDetails | None = None
) -> bool:
team_name = details.name if details else None
return self._is_authorized(
method=method,
resource_type=KeycloakResource.TEAM,
user=user,
team_name=team_name,
)
def is_authorized_view(self, *, access_view: AccessView, user: KeycloakAuthManagerUser) -> bool:
return self._is_authorized(
method="GET",
resource_type=KeycloakResource.VIEW,
user=user,
resource_id=access_view.value,
)
def is_authorized_custom_view(
self, *, method: ResourceMethod | str, resource_name: str, user: KeycloakAuthManagerUser
) -> bool:
return self._is_authorized(
method=method, resource_type=KeycloakResource.CUSTOM, user=user, resource_id=resource_name
)
def filter_authorized_menu_items(
self, menu_items: list[MenuItem], *, user: KeycloakAuthManagerUser
) -> list[MenuItem]:
authorized_menus = self._is_batch_authorized(
permissions=[("MENU", menu_item.value) for menu_item in menu_items],
user=user,
)
return [MenuItem(menu[1]) for menu in authorized_menus]
def get_fastapi_app(self) -> FastAPI | None:
from airflow.providers.keycloak.auth_manager.routes.login import login_router
from airflow.providers.keycloak.auth_manager.routes.token import token_router
app = FastAPI(
title="Keycloak auth manager sub application",
description=(
"This is the Keycloak auth manager fastapi sub application. This API is only available if the "
"auth manager used in the Airflow environment is Keycloak auth manager. "
"This sub application provides login routes."
),
)
app.include_router(login_router)
app.include_router(token_router)
return app
@staticmethod
def get_cli_commands() -> list[CLICommand]:
"""Vends CLI commands to be included in Airflow CLI."""
from airflow.providers.keycloak.cli.definition import get_keycloak_cli_commands
return get_keycloak_cli_commands()
@staticmethod
def get_keycloak_client(client_id: str | None = None, client_secret: str | None = None) -> KeycloakOpenID:
"""
Get a KeycloakOpenID client instance.
:param client_id: Optional client ID to override config. If provided, client_secret must also be provided.
:param client_secret: Optional client secret to override config. If provided, client_id must also be provided.
"""
if (client_id is None) != (client_secret is None):
raise ValueError(
"Both `client_id` and `client_secret` must be provided together, or both must be None"
)
if client_id is None:
client_id = conf.get(CONF_SECTION_NAME, CONF_CLIENT_ID_KEY)
client_secret = conf.get(CONF_SECTION_NAME, CONF_CLIENT_SECRET_KEY)
realm = conf.get(CONF_SECTION_NAME, CONF_REALM_KEY)
server_url = conf.get(CONF_SECTION_NAME, CONF_SERVER_URL_KEY)
return KeycloakOpenID(
server_url=server_url,
client_id=client_id,
client_secret_key=client_secret,
realm_name=realm,
)
def _is_authorized(
self,
*,
method: ResourceMethod | str,
resource_type: KeycloakResource,
user: KeycloakAuthManagerUser,
resource_id: str | None = None,
team_name: str | None = None,
attributes: dict[str, str | None] | None = None,
) -> bool:
client_id = conf.get(CONF_SECTION_NAME, CONF_CLIENT_ID_KEY)
realm = conf.get(CONF_SECTION_NAME, CONF_REALM_KEY)
server_url = conf.get(CONF_SECTION_NAME, CONF_SERVER_URL_KEY)
context_attributes = prune_dict(attributes or {})
if resource_id:
context_attributes[RESOURCE_ID_ATTRIBUTE_NAME] = resource_id
elif method == "GET":
method = "LIST"
if (
team_name
and conf.getboolean("core", "multi_team", fallback=False)
and resource_type in TEAM_SCOPED_RESOURCES
):
resource_name = f"{resource_type.value}:{team_name}"
else:
resource_name = resource_type.value
permission = f"{resource_name}#{method}"
resp = self.http_session.post(
self._get_token_url(server_url, realm),
data=self._get_payload(client_id, permission, context_attributes),
headers=self._get_headers(user.access_token),
timeout=5,
)
if resp.status_code == 200:
return True
if resp.status_code == 401:
log.debug("Received 401 from Keycloak: %s", resp.text)
return False
if resp.status_code == 403:
return False
if resp.status_code == 400:
error = json.loads(resp.text)
raise AirflowException(
f"Request not recognized by Keycloak. {error.get('error')}. {error.get('error_description')}"
)
raise AirflowException(f"Unexpected error: {resp.status_code} - {resp.text}")
def _is_batch_authorized(
self,
*,
permissions: list[tuple[ExtendedResourceMethod, str]],
user: KeycloakAuthManagerUser,
) -> set[tuple[ExtendedResourceMethod, str]]:
client_id = conf.get(CONF_SECTION_NAME, CONF_CLIENT_ID_KEY)
realm = conf.get(CONF_SECTION_NAME, CONF_REALM_KEY)
server_url = conf.get(CONF_SECTION_NAME, CONF_SERVER_URL_KEY)
resp = self.http_session.post(
self._get_token_url(server_url, realm),
data=self._get_batch_payload(client_id, permissions),
headers=self._get_headers(user.access_token),
timeout=5,
)
if resp.status_code == 200:
return {(perm["scopes"][0], perm["rsname"]) for perm in resp.json()}
if resp.status_code == 401:
log.debug("Received 401 from Keycloak: %s", resp.text)
return set()
if resp.status_code == 403:
return set()
if resp.status_code == 400:
error = json.loads(resp.text)
raise AirflowException(
f"Request not recognized by Keycloak. {error.get('error')}. {error.get('error_description')}"
)
raise AirflowException(f"Unexpected error: {resp.status_code} - {resp.text}")
@staticmethod
def _get_token_url(server_url, realm):
# Normalize server_url to avoid double slashes (required for Keycloak 26.4+ strict path validation).
return f"{server_url.rstrip('/')}/realms/{realm}/protocol/openid-connect/token"
@staticmethod
def _get_team_name(
details: ConnectionDetails | DagDetails | PoolDetails | VariableDetails | None,
) -> str | None:
return getattr(details, "team_name", None) if details else None
@staticmethod
def _get_payload(client_id: str, permission: str, attributes: dict[str, str] | None = None):
payload: dict[str, Any] = {
"grant_type": "urn:ietf:params:oauth:grant-type:uma-ticket",
"audience": client_id,
"permission": permission,
}
if attributes:
# Per UMA spec, push claims using claim_token parameter with base64-encoded JSON
# Values must be arrays of strings per Keycloak documentation
# See: https://www.keycloak.org/docs/latest/authorization_services/index.html#_service_pushing_claims
claims = {key: [value] for key, value in attributes.items()}
claim_json = json.dumps(claims, sort_keys=True)
claim_token = base64.b64encode(claim_json.encode()).decode()
payload["claim_token"] = claim_token
payload["claim_token_format"] = "urn:ietf:params:oauth:token-type:jwt"
return payload
@staticmethod
def _get_batch_payload(client_id: str, permissions: list[tuple[ExtendedResourceMethod, str]]):
payload: dict[str, Any] = {
"grant_type": "urn:ietf:params:oauth:grant-type:uma-ticket",
"audience": client_id,
"permission": [f"{permission[1]}#{permission[0]}" for permission in permissions],
"response_mode": "permissions",
}
return payload
@staticmethod
def _get_headers(access_token):
return {
"Authorization": f"Bearer {access_token}",
"Content-Type": "application/x-www-form-urlencoded",
}
@staticmethod
def _token_expired(token: str) -> bool:
"""
Check whether a JWT token is expired.
:meta private:
:param token: the token
"""
payload_b64 = token.split(".")[1] + "=="
payload_bytes = urlsafe_b64decode(payload_b64)
payload = json.loads(payload_bytes)
return payload["exp"] < int(time.time())
| {
"repo_id": "apache/airflow",
"file_path": "providers/keycloak/src/airflow/providers/keycloak/auth_manager/keycloak_auth_manager.py",
"license": "Apache License 2.0",
"lines": 463,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/keycloak/src/airflow/providers/keycloak/auth_manager/routes/login.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import json
import logging
from typing import cast
from urllib.parse import quote
from fastapi import Request # noqa: TC002
from fastapi.responses import HTMLResponse, RedirectResponse
from airflow.api_fastapi.app import get_auth_manager
from airflow.api_fastapi.auth.managers.base_auth_manager import COOKIE_NAME_JWT_TOKEN
from airflow.providers.keycloak.version_compat import AIRFLOW_V_3_1_1_PLUS
try:
from airflow.api_fastapi.auth.managers.exceptions import AuthManagerRefreshTokenExpiredException
except ImportError:
class AuthManagerRefreshTokenExpiredException(Exception): # type: ignore[no-redef]
"""In case it is using a version of Airflow without ``AuthManagerRefreshTokenExpiredException``."""
pass
from airflow.api_fastapi.common.router import AirflowRouter
from airflow.providers.common.compat.sdk import conf
from airflow.providers.keycloak.auth_manager.keycloak_auth_manager import KeycloakAuthManager
from airflow.providers.keycloak.auth_manager.user import KeycloakAuthManagerUser
log = logging.getLogger(__name__)
login_router = AirflowRouter(tags=["KeycloakAuthManagerLogin"])
COOKIE_NAME_ID_TOKEN = "_id_token"
@login_router.get("/login")
def login(request: Request) -> RedirectResponse:
"""Initiate the authentication."""
client = KeycloakAuthManager.get_keycloak_client()
redirect_uri = request.url_for("login_callback")
auth_url = client.auth_url(redirect_uri=str(redirect_uri), scope="openid")
return RedirectResponse(auth_url)
@login_router.get("/login_callback")
def login_callback(request: Request):
"""Authenticate the user."""
code = request.query_params.get("code")
if not code:
return HTMLResponse("Missing code", status_code=400)
client = KeycloakAuthManager.get_keycloak_client()
redirect_uri = request.url_for("login_callback")
tokens = client.token(
grant_type="authorization_code",
code=code,
redirect_uri=str(redirect_uri),
)
userinfo_raw: dict | bytes = client.userinfo(tokens["access_token"])
# Decode bytes to dict if necessary
userinfo: dict = json.loads(userinfo_raw) if isinstance(userinfo_raw, bytes) else userinfo_raw
user = KeycloakAuthManagerUser(
user_id=userinfo["sub"],
name=userinfo["preferred_username"],
access_token=tokens["access_token"],
refresh_token=tokens["refresh_token"],
)
token = get_auth_manager().generate_jwt(user)
response = RedirectResponse(url=conf.get("api", "base_url", fallback="/"), status_code=303)
secure = bool(conf.get("api", "ssl_cert", fallback=""))
# In Airflow 3.1.1 authentication changes, front-end no longer handle the token
# See https://github.com/apache/airflow/pull/55506
if AIRFLOW_V_3_1_1_PLUS:
response.set_cookie(COOKIE_NAME_JWT_TOKEN, token, secure=secure, httponly=True)
else:
response.set_cookie(COOKIE_NAME_JWT_TOKEN, token, secure=secure)
# Save id token as separate cookie.
# Cookies have a size limit (usually 4k), saving all the tokens in a same cookie goes beyond this limit
response.set_cookie(COOKIE_NAME_ID_TOKEN, tokens["id_token"], secure=secure, httponly=True)
return response
@login_router.get("/logout")
def logout(request: Request):
"""Log out the user from Keycloak."""
auth_manager = cast("KeycloakAuthManager", get_auth_manager())
keycloak_config = auth_manager.get_keycloak_client().well_known()
end_session_endpoint = keycloak_config["end_session_endpoint"]
id_token = request.cookies.get(COOKIE_NAME_ID_TOKEN)
post_logout_redirect_uri = request.url_for("logout_callback")
if id_token:
encoded_id_token = quote(id_token, safe="")
logout_url = (
f"{end_session_endpoint}?post_logout_redirect_uri={post_logout_redirect_uri}"
f"&id_token_hint={encoded_id_token}"
)
else:
logout_url = str(post_logout_redirect_uri)
return RedirectResponse(logout_url)
@login_router.get("/logout_callback")
def logout_callback(request: Request):
"""
Complete the log-out.
This callback is redirected by Keycloak after the user has been logged out from Keycloak.
"""
login_url = get_auth_manager().get_url_login()
secure = request.base_url.scheme == "https" or bool(conf.get("api", "ssl_cert", fallback=""))
response = RedirectResponse(login_url)
response.delete_cookie(
key=COOKIE_NAME_JWT_TOKEN,
secure=secure,
httponly=True,
)
response.delete_cookie(
key=COOKIE_NAME_ID_TOKEN,
secure=secure,
httponly=True,
)
return response
| {
"repo_id": "apache/airflow",
"file_path": "providers/keycloak/src/airflow/providers/keycloak/auth_manager/routes/login.py",
"license": "Apache License 2.0",
"lines": 118,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/keycloak/src/airflow/providers/keycloak/auth_manager/user.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from airflow.api_fastapi.auth.managers.models.base_user import BaseUser
class KeycloakAuthManagerUser(BaseUser):
"""User model for users managed by Keycloak auth manager."""
def __init__(self, *, user_id: str, name: str, access_token: str, refresh_token: str | None) -> None:
self.user_id = user_id
self.name = name
self.access_token = access_token
self.refresh_token = refresh_token
def get_id(self) -> str:
return self.user_id
def get_name(self) -> str:
return self.name
| {
"repo_id": "apache/airflow",
"file_path": "providers/keycloak/src/airflow/providers/keycloak/auth_manager/user.py",
"license": "Apache License 2.0",
"lines": 29,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/keycloak/tests/unit/keycloak/auth_manager/routes/test_login.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from unittest.mock import ANY, Mock, patch
import pytest
from airflow.api_fastapi.app import AUTH_MANAGER_FASTAPI_APP_PREFIX
class TestLoginRouter:
@patch("airflow.providers.keycloak.auth_manager.routes.login.KeycloakAuthManager.get_keycloak_client")
def test_login(self, mock_get_keycloak_client, client):
redirect_url = "redirect_url"
mock_keycloak_client = Mock()
mock_keycloak_client.auth_url.return_value = redirect_url
mock_get_keycloak_client.return_value = mock_keycloak_client
response = client.get(AUTH_MANAGER_FASTAPI_APP_PREFIX + "/login", follow_redirects=False)
assert response.status_code == 307
assert "location" in response.headers
assert response.headers["location"] == redirect_url
@patch("airflow.providers.keycloak.auth_manager.routes.login.get_auth_manager")
@patch("airflow.providers.keycloak.auth_manager.routes.login.KeycloakAuthManager.get_keycloak_client")
def test_login_callback(self, mock_get_keycloak_client, mock_get_auth_manager, client):
code = "code"
token = "token"
mock_keycloak_client = Mock()
mock_keycloak_client.token.return_value = {
"access_token": "access_token",
"refresh_token": "refresh_token",
"id_token": "id_token",
}
mock_keycloak_client.userinfo.return_value = {
"sub": "sub",
"preferred_username": "preferred_username",
}
mock_get_keycloak_client.return_value = mock_keycloak_client
mock_auth_manager = Mock()
mock_get_auth_manager.return_value = mock_auth_manager
mock_auth_manager.generate_jwt.return_value = token
response = client.get(
AUTH_MANAGER_FASTAPI_APP_PREFIX + f"/login_callback?code={code}", follow_redirects=False
)
mock_keycloak_client.token.assert_called_once_with(
grant_type="authorization_code",
code=code,
redirect_uri=ANY,
)
mock_keycloak_client.userinfo.assert_called_once_with("access_token")
mock_auth_manager.generate_jwt.assert_called_once()
user = mock_auth_manager.generate_jwt.call_args[0][0]
assert user.get_id() == "sub"
assert user.get_name() == "preferred_username"
assert user.access_token == "access_token"
assert user.refresh_token == "refresh_token"
assert response.status_code == 303
assert "location" in response.headers
assert "_token" in response.cookies
assert response.cookies["_token"] == token
assert response.cookies["_id_token"] == "id_token"
def test_login_callback_without_code(self, client):
response = client.get(AUTH_MANAGER_FASTAPI_APP_PREFIX + "/login_callback")
assert response.status_code == 400
@pytest.mark.parametrize(
("id_token", "logout_callback_url"),
[
(None, "http://testserver/auth/logout_callback"),
(
"id_token",
"logout_url?post_logout_redirect_uri=http://testserver/auth/logout_callback&id_token_hint=id_token",
),
],
)
@patch("airflow.providers.keycloak.auth_manager.routes.login.KeycloakAuthManager.get_keycloak_client")
def test_logout(self, mock_get_keycloak_client, id_token, logout_callback_url, client):
mock_keycloak_client = Mock()
mock_keycloak_client.well_known.return_value = {"end_session_endpoint": "logout_url"}
mock_get_keycloak_client.return_value = mock_keycloak_client
response = client.get(
AUTH_MANAGER_FASTAPI_APP_PREFIX + "/logout",
cookies={"_id_token": id_token},
follow_redirects=False,
)
assert response.status_code == 307
assert "location" in response.headers
assert response.headers["location"] == logout_callback_url
| {
"repo_id": "apache/airflow",
"file_path": "providers/keycloak/tests/unit/keycloak/auth_manager/routes/test_login.py",
"license": "Apache License 2.0",
"lines": 96,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/keycloak/tests/unit/keycloak/auth_manager/test_keycloak_auth_manager.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import base64
import json
from contextlib import ExitStack
from unittest.mock import Mock, patch
import pytest
from keycloak import KeycloakPostError
from airflow.api_fastapi.app import AUTH_MANAGER_FASTAPI_APP_PREFIX
from airflow.api_fastapi.auth.managers.models.resource_details import (
AccessView,
AssetAliasDetails,
AssetDetails,
BackfillDetails,
ConfigurationDetails,
ConnectionDetails,
DagAccessEntity,
DagDetails,
PoolDetails,
VariableDetails,
)
from tests_common.test_utils.version_compat import AIRFLOW_V_3_1_7_PLUS, AIRFLOW_V_3_2_PLUS
if AIRFLOW_V_3_2_PLUS:
from airflow.api_fastapi.auth.managers.models.resource_details import TeamDetails
else:
TeamDetails = None # type: ignore[assignment,misc]
from airflow.api_fastapi.common.types import MenuItem
from airflow.exceptions import AirflowProviderDeprecationWarning
try:
from airflow.providers.common.compat.sdk import AirflowException
except ModuleNotFoundError:
from airflow.exceptions import AirflowException
from airflow.providers.keycloak.auth_manager.constants import (
CONF_CLIENT_ID_KEY,
CONF_CLIENT_SECRET_KEY,
CONF_REALM_KEY,
CONF_SECTION_NAME,
CONF_SERVER_URL_KEY,
)
from airflow.providers.keycloak.auth_manager.keycloak_auth_manager import (
RESOURCE_ID_ATTRIBUTE_NAME,
KeycloakAuthManager,
)
from airflow.providers.keycloak.auth_manager.user import KeycloakAuthManagerUser
from tests_common.test_utils.config import conf_vars
def _build_access_token(payload: dict[str, object]) -> str:
header = {"alg": "none", "typ": "JWT"}
header_b64 = base64.urlsafe_b64encode(json.dumps(header).encode()).decode().rstrip("=")
payload_b64 = base64.urlsafe_b64encode(json.dumps(payload).encode()).decode().rstrip("=")
return f"{header_b64}.{payload_b64}."
@pytest.fixture
def auth_manager():
with conf_vars(
{
(CONF_SECTION_NAME, CONF_CLIENT_ID_KEY): "client_id",
(CONF_SECTION_NAME, CONF_CLIENT_SECRET_KEY): "client_secret",
(CONF_SECTION_NAME, CONF_REALM_KEY): "realm",
(CONF_SECTION_NAME, CONF_SERVER_URL_KEY): "server_url",
}
):
yield KeycloakAuthManager()
@pytest.fixture
def auth_manager_multi_team():
with conf_vars(
{
("core", "multi_team"): "True",
(CONF_SECTION_NAME, CONF_CLIENT_ID_KEY): "client_id",
(CONF_SECTION_NAME, CONF_CLIENT_SECRET_KEY): "client_secret",
(CONF_SECTION_NAME, CONF_REALM_KEY): "realm",
(CONF_SECTION_NAME, CONF_SERVER_URL_KEY): "server_url",
}
):
yield KeycloakAuthManager()
@pytest.fixture
def user():
user = Mock()
user.access_token = "access_token"
user.refresh_token = "refresh_token"
return user
class TestKeycloakAuthManager:
def test_deserialize_user(self, auth_manager):
result = auth_manager.deserialize_user(
{
"user_id": "user_id",
"name": "name",
"access_token": "access_token",
"refresh_token": "refresh_token",
}
)
assert result.user_id == "user_id"
assert result.name == "name"
assert result.access_token == "access_token"
assert result.refresh_token == "refresh_token"
def test_serialize_user(self, auth_manager):
result = auth_manager.serialize_user(
KeycloakAuthManagerUser(
user_id="user_id", name="name", access_token="access_token", refresh_token="refresh_token"
)
)
assert result == {
"user_id": "user_id",
"name": "name",
"access_token": "access_token",
"refresh_token": "refresh_token",
}
def test_get_url_login(self, auth_manager):
result = auth_manager.get_url_login()
assert result == f"{AUTH_MANAGER_FASTAPI_APP_PREFIX}/login"
def test_get_url_logout(self, auth_manager):
result = auth_manager.get_url_logout()
assert result == f"{AUTH_MANAGER_FASTAPI_APP_PREFIX}/logout"
@patch.object(KeycloakAuthManager, "_token_expired")
def test_refresh_user_not_expired(self, mock_token_expired, auth_manager):
mock_token_expired.return_value = False
result = auth_manager.refresh_user(user=Mock())
assert result is None
def test_refresh_user_no_refresh_token(self, auth_manager):
"""Test that refresh_user returns None when refresh_token is empty (client_credentials case)."""
user_without_refresh = Mock()
user_without_refresh.refresh_token = None
user_without_refresh.access_token = "access_token"
result = auth_manager.refresh_user(user=user_without_refresh)
assert result is None
@patch.object(KeycloakAuthManager, "get_keycloak_client")
@patch.object(KeycloakAuthManager, "_token_expired")
def test_refresh_user_expired(self, mock_token_expired, mock_get_keycloak_client, auth_manager, user):
mock_token_expired.return_value = True
keycloak_client = Mock()
keycloak_client.refresh_token.return_value = {
"access_token": "new_access_token",
"refresh_token": "new_refresh_token",
}
mock_get_keycloak_client.return_value = keycloak_client
result = auth_manager.refresh_user(user=user)
keycloak_client.refresh_token.assert_called_with("refresh_token")
assert result.access_token == "new_access_token"
assert result.refresh_token == "new_refresh_token"
@patch.object(KeycloakAuthManager, "get_keycloak_client")
@patch.object(KeycloakAuthManager, "_token_expired")
def test_refresh_user_expired_with_invalid_token(
self, mock_token_expired, mock_get_keycloak_client, auth_manager, user
):
mock_token_expired.return_value = True
keycloak_client = Mock()
keycloak_client.refresh_token.side_effect = KeycloakPostError(
response_code=400,
response_body=b'{"error":"invalid_grant","error_description":"Token is not active"}',
)
mock_get_keycloak_client.return_value = keycloak_client
if AIRFLOW_V_3_1_7_PLUS:
from airflow.api_fastapi.auth.managers.exceptions import AuthManagerRefreshTokenExpiredException
with pytest.raises(AuthManagerRefreshTokenExpiredException):
auth_manager.refresh_user(user=user)
else:
auth_manager.refresh_user(user=user)
keycloak_client.refresh_token.assert_called_with("refresh_token")
@pytest.mark.parametrize(
("function", "method", "details", "permission", "attributes"),
[
[
"is_authorized_configuration",
"GET",
ConfigurationDetails(section="test"),
"Configuration#GET",
{RESOURCE_ID_ATTRIBUTE_NAME: "test"},
],
["is_authorized_configuration", "GET", None, "Configuration#LIST", None],
[
"is_authorized_configuration",
"PUT",
ConfigurationDetails(section="test"),
"Configuration#PUT",
{RESOURCE_ID_ATTRIBUTE_NAME: "test"},
],
[
"is_authorized_connection",
"DELETE",
ConnectionDetails(conn_id="test"),
"Connection#DELETE",
{RESOURCE_ID_ATTRIBUTE_NAME: "test"},
],
["is_authorized_connection", "GET", None, "Connection#LIST", {}],
[
"is_authorized_backfill",
"POST",
BackfillDetails(id=1),
"Backfill#POST",
{RESOURCE_ID_ATTRIBUTE_NAME: "1"},
],
["is_authorized_backfill", "GET", None, "Backfill#LIST", {}],
[
"is_authorized_asset",
"GET",
AssetDetails(id="test"),
"Asset#GET",
{RESOURCE_ID_ATTRIBUTE_NAME: "test"},
],
["is_authorized_asset", "GET", None, "Asset#LIST", {}],
[
"is_authorized_asset_alias",
"GET",
AssetAliasDetails(id="test"),
"AssetAlias#GET",
{RESOURCE_ID_ATTRIBUTE_NAME: "test"},
],
["is_authorized_asset_alias", "GET", None, "AssetAlias#LIST", {}],
[
"is_authorized_variable",
"PUT",
VariableDetails(key="test"),
"Variable#PUT",
{RESOURCE_ID_ATTRIBUTE_NAME: "test"},
],
["is_authorized_variable", "GET", None, "Variable#LIST", {}],
[
"is_authorized_pool",
"POST",
PoolDetails(name="test"),
"Pool#POST",
{RESOURCE_ID_ATTRIBUTE_NAME: "test"},
],
["is_authorized_pool", "GET", None, "Pool#LIST", {}],
["is_authorized_team", "GET", None, "Team#LIST", {}],
],
)
@pytest.mark.parametrize(
("status_code", "expected"),
[
[200, True],
[401, False],
[403, False],
],
)
def test_is_authorized(
self,
function,
method,
details,
permission,
attributes,
status_code,
expected,
auth_manager,
user,
):
mock_response = Mock()
mock_response.status_code = status_code
auth_manager.http_session.post = Mock(return_value=mock_response)
with ExitStack() as stack:
if function == "is_authorized_backfill":
stack.enter_context(
pytest.warns(
AirflowProviderDeprecationWarning,
match="Use ``is_authorized_dag`` on ``DagAccessEntity.RUN`` instead for a dag level access control.",
)
)
result = getattr(auth_manager, function)(method=method, user=user, details=details)
token_url = auth_manager._get_token_url("server_url", "realm")
payload = auth_manager._get_payload("client_id", permission, attributes)
headers = auth_manager._get_headers(user.access_token)
auth_manager.http_session.post.assert_called_once_with(
token_url, data=payload, headers=headers, timeout=5
)
assert result == expected
@pytest.mark.skipif(not AIRFLOW_V_3_2_PLUS, reason="TeamDetails not available before Airflow 3.2.0")
@pytest.mark.parametrize(
("status_code", "expected"),
[
[200, True],
[401, False],
[403, False],
],
)
def test_is_authorized_team_with_details(self, status_code, expected, auth_manager, user):
details = TeamDetails(name="team-a")
mock_response = Mock()
mock_response.status_code = status_code
auth_manager.http_session.post = Mock(return_value=mock_response)
result = auth_manager.is_authorized_team(method="GET", user=user, details=details)
token_url = auth_manager._get_token_url("server_url", "realm")
payload = auth_manager._get_payload("client_id", "Team#LIST", {})
headers = auth_manager._get_headers(user.access_token)
auth_manager.http_session.post.assert_called_once_with(
token_url, data=payload, headers=headers, timeout=5
)
assert result == expected
@pytest.mark.parametrize(
"function",
[
"is_authorized_configuration",
"is_authorized_connection",
"is_authorized_dag",
"is_authorized_backfill",
"is_authorized_asset",
"is_authorized_asset_alias",
"is_authorized_variable",
"is_authorized_pool",
"is_authorized_team",
],
)
def test_is_authorized_failure(self, function, auth_manager, user):
resp = Mock()
resp.status_code = 500
auth_manager.http_session.post = Mock(return_value=resp)
with ExitStack() as stack:
if function == "is_authorized_backfill":
stack.enter_context(
pytest.warns(
AirflowProviderDeprecationWarning,
match="Use ``is_authorized_dag`` on ``DagAccessEntity.RUN`` instead for a dag level access control.",
)
)
with pytest.raises(AirflowException) as e:
getattr(auth_manager, function)(method="GET", user=user)
assert "Unexpected error" in str(e.value)
@pytest.mark.parametrize(
"function",
[
"is_authorized_configuration",
"is_authorized_connection",
"is_authorized_dag",
"is_authorized_backfill",
"is_authorized_asset",
"is_authorized_asset_alias",
"is_authorized_variable",
"is_authorized_pool",
"is_authorized_team",
],
)
def test_is_authorized_invalid_request(self, function, auth_manager, user):
resp = Mock()
resp.status_code = 400
resp.text = json.dumps({"error": "invalid_scope", "error_description": "Invalid scopes: GET"})
auth_manager.http_session.post = Mock(return_value=resp)
with ExitStack() as stack:
if function == "is_authorized_backfill":
stack.enter_context(
pytest.warns(
AirflowProviderDeprecationWarning,
match="Use ``is_authorized_dag`` on ``DagAccessEntity.RUN`` instead for a dag level access control.",
)
)
with pytest.raises(AirflowException) as e:
getattr(auth_manager, function)(method="GET", user=user)
assert "Request not recognized by Keycloak. invalid_scope. Invalid scopes: GET" in str(e.value)
@pytest.mark.parametrize(
("method", "access_entity", "details", "permission", "attributes"),
[
[
"GET",
None,
None,
"Dag#LIST",
{},
],
[
"GET",
DagAccessEntity.TASK_INSTANCE,
DagDetails(id="test"),
"Dag#GET",
{RESOURCE_ID_ATTRIBUTE_NAME: "test", "dag_entity": "TASK_INSTANCE"},
],
[
"GET",
None,
DagDetails(id="test"),
"Dag#GET",
{RESOURCE_ID_ATTRIBUTE_NAME: "test"},
],
[
"GET",
DagAccessEntity.TASK_INSTANCE,
None,
"Dag#LIST",
{"dag_entity": "TASK_INSTANCE"},
],
],
)
@pytest.mark.parametrize(
("status_code", "expected"),
[
[200, True],
[403, False],
],
)
def test_is_authorized_dag(
self,
method,
access_entity,
details,
permission,
attributes,
status_code,
expected,
auth_manager,
user,
):
mock_response = Mock()
mock_response.status_code = status_code
auth_manager.http_session.post = Mock(return_value=mock_response)
result = auth_manager.is_authorized_dag(
method=method, user=user, access_entity=access_entity, details=details
)
token_url = auth_manager._get_token_url("server_url", "realm")
payload = auth_manager._get_payload("client_id", permission, attributes)
headers = auth_manager._get_headers(user.access_token)
auth_manager.http_session.post.assert_called_once_with(
token_url, data=payload, headers=headers, timeout=5
)
assert result == expected
@pytest.mark.skipif(not AIRFLOW_V_3_2_PLUS, reason="team_name not supported before Airflow 3.2.0")
@pytest.mark.parametrize(
("function", "method", "details_cls", "details_kwargs", "permission"),
[
("is_authorized_dag", "GET", DagDetails, {"id": "test", "team_name": "team-a"}, "Dag#GET"),
(
"is_authorized_connection",
"DELETE",
ConnectionDetails,
{"conn_id": "test", "team_name": "team-a"},
"Connection#DELETE",
),
(
"is_authorized_variable",
"PUT",
VariableDetails,
{"key": "test", "team_name": "team-a"},
"Variable#PUT",
),
("is_authorized_pool", "POST", PoolDetails, {"name": "test", "team_name": "team-a"}, "Pool#POST"),
("is_authorized_team", "GET", TeamDetails, {"name": "team-a"}, "Team#LIST"),
],
)
def test_team_name_ignored_when_multi_team_disabled(
self, auth_manager, user, function, method, details_cls, details_kwargs, permission
):
details = details_cls(**details_kwargs)
mock_response = Mock()
mock_response.status_code = 200
auth_manager.http_session.post = Mock(return_value=mock_response)
getattr(auth_manager, function)(method=method, user=user, details=details)
actual_permission = auth_manager.http_session.post.call_args.kwargs["data"]["permission"]
assert actual_permission == permission
@pytest.mark.skipif(not AIRFLOW_V_3_2_PLUS, reason="team_name not supported before Airflow 3.2.0")
@pytest.mark.parametrize(
("function", "details_cls", "details_kwargs", "permission"),
[
("is_authorized_dag", DagDetails, {"id": "test", "team_name": "team-a"}, "Dag:team-a#GET"),
(
"is_authorized_connection",
ConnectionDetails,
{"conn_id": "test", "team_name": "team-a"},
"Connection:team-a#GET",
),
(
"is_authorized_variable",
VariableDetails,
{"key": "test", "team_name": "team-a"},
"Variable:team-a#GET",
),
("is_authorized_pool", PoolDetails, {"name": "test", "team_name": "team-a"}, "Pool:team-a#GET"),
("is_authorized_team", TeamDetails, {"name": "team-a"}, "Team:team-a#LIST"),
],
)
def test_with_team_name_uses_team_scoped_permission(
self, auth_manager_multi_team, user, function, details_cls, details_kwargs, permission
):
details = details_cls(**details_kwargs)
mock_response = Mock()
mock_response.status_code = 200
auth_manager_multi_team.http_session.post = Mock(return_value=mock_response)
getattr(auth_manager_multi_team, function)(method="GET", user=user, details=details)
actual_permission = auth_manager_multi_team.http_session.post.call_args.kwargs["data"]["permission"]
assert actual_permission == permission
@pytest.mark.parametrize(
("function", "details", "permission"),
[
("is_authorized_dag", DagDetails(id="test"), "Dag#GET"),
("is_authorized_connection", ConnectionDetails(conn_id="test"), "Connection#GET"),
("is_authorized_variable", VariableDetails(key="test"), "Variable#GET"),
("is_authorized_pool", PoolDetails(name="test"), "Pool#GET"),
("is_authorized_team", None, "Team#LIST"),
],
)
def test_without_team_name_uses_global_permission(
self, auth_manager_multi_team, user, function, details, permission
):
mock_response = Mock()
mock_response.status_code = 200
auth_manager_multi_team.http_session.post = Mock(return_value=mock_response)
getattr(auth_manager_multi_team, function)(method="GET", user=user, details=details)
actual_permission = auth_manager_multi_team.http_session.post.call_args.kwargs["data"]["permission"]
assert actual_permission == permission
@pytest.mark.parametrize(
("function", "permission"),
[
("is_authorized_dag", "Dag#LIST"),
("is_authorized_connection", "Connection#LIST"),
("is_authorized_variable", "Variable#LIST"),
("is_authorized_pool", "Pool#LIST"),
("is_authorized_team", "Team#LIST"),
],
)
def test_list_without_team_name_uses_global_permission(
self, auth_manager_multi_team, user, function, permission
):
mock_response = Mock()
mock_response.status_code = 200
auth_manager_multi_team.http_session.post = Mock(return_value=mock_response)
getattr(auth_manager_multi_team, function)(method="GET", user=user)
actual_permission = auth_manager_multi_team.http_session.post.call_args.kwargs["data"]["permission"]
assert actual_permission == permission
@pytest.mark.skipif(not AIRFLOW_V_3_2_PLUS, reason="team_name not supported before Airflow 3.2.0")
@pytest.mark.parametrize(
("function", "details_cls", "details_kwargs", "permission"),
[
("is_authorized_dag", DagDetails, {"team_name": "team-a"}, "Dag:team-a#LIST"),
(
"is_authorized_connection",
ConnectionDetails,
{"team_name": "team-a"},
"Connection:team-a#LIST",
),
("is_authorized_variable", VariableDetails, {"team_name": "team-a"}, "Variable:team-a#LIST"),
("is_authorized_pool", PoolDetails, {"team_name": "team-a"}, "Pool:team-a#LIST"),
("is_authorized_team", TeamDetails, {"name": "team-a"}, "Team:team-a#LIST"),
],
)
def test_list_with_team_name_uses_team_scoped_permission(
self, auth_manager_multi_team, user, function, details_cls, details_kwargs, permission
):
details = details_cls(**details_kwargs)
user.access_token = _build_access_token({"groups": ["team-a"]})
mock_response = Mock()
mock_response.status_code = 200
auth_manager_multi_team.http_session.post = Mock(return_value=mock_response)
getattr(auth_manager_multi_team, function)(method="GET", user=user, details=details)
actual_permission = auth_manager_multi_team.http_session.post.call_args.kwargs["data"]["permission"]
assert actual_permission == permission
@pytest.mark.skipif(not AIRFLOW_V_3_2_PLUS, reason="team_name not supported before Airflow 3.2.0")
@patch.object(KeycloakAuthManager, "is_authorized_dag", return_value=False)
def test_filter_authorized_dag_ids_team_mismatch(self, mock_is_authorized, auth_manager_multi_team, user):
result = auth_manager_multi_team.filter_authorized_dag_ids(
dag_ids={"dag-a"}, user=user, team_name="team-b"
)
mock_is_authorized.assert_called_once()
assert result == set()
@pytest.mark.skipif(not AIRFLOW_V_3_2_PLUS, reason="team_name not supported before Airflow 3.2.0")
@patch.object(KeycloakAuthManager, "is_authorized_dag", return_value=True)
def test_filter_authorized_dag_ids_team_match(self, mock_is_authorized, auth_manager_multi_team, user):
result = auth_manager_multi_team.filter_authorized_dag_ids(
dag_ids={"dag-a"}, user=user, team_name="team-a"
)
mock_is_authorized.assert_called_once()
assert result == {"dag-a"}
@pytest.mark.skipif(not AIRFLOW_V_3_2_PLUS, reason="team_name not supported before Airflow 3.2.0")
@patch.object(KeycloakAuthManager, "is_authorized_pool", return_value=False)
def test_filter_authorized_pools_no_team_returns_empty(
self, mock_is_authorized, auth_manager_multi_team, user
):
result = auth_manager_multi_team.filter_authorized_pools(
pool_names={"pool-a"}, user=user, team_name=None
)
mock_is_authorized.assert_called_once()
assert result == set()
@pytest.mark.skipif(not AIRFLOW_V_3_2_PLUS, reason="team_name not supported before Airflow 3.2.0")
@pytest.mark.parametrize(
("function", "details_cls", "details_kwargs"),
[
("is_authorized_dag", DagDetails, {"team_name": "team-b"}),
("is_authorized_connection", ConnectionDetails, {"team_name": "team-b"}),
("is_authorized_variable", VariableDetails, {"team_name": "team-b"}),
("is_authorized_pool", PoolDetails, {"team_name": "team-b"}),
("is_authorized_team", TeamDetails, {"name": "team-b"}),
],
)
def test_list_with_mismatched_team_delegates_to_keycloak(
self, auth_manager_multi_team, user, function, details_cls, details_kwargs
):
details = details_cls(**details_kwargs)
mock_response = Mock()
mock_response.status_code = 403
auth_manager_multi_team.http_session.post = Mock(return_value=mock_response)
result = getattr(auth_manager_multi_team, function)(method="GET", user=user, details=details)
auth_manager_multi_team.http_session.post.assert_called_once()
assert result is False
def test_filter_authorized_menu_items_with_batch_authorized(self, auth_manager, user):
with patch.object(
KeycloakAuthManager,
"_is_batch_authorized",
return_value={("MENU", menu.value) for menu in MenuItem},
):
result = auth_manager.filter_authorized_menu_items(list(MenuItem), user=user)
assert set(result) == set(MenuItem)
@pytest.mark.parametrize(
("status_code", "expected"),
[
[200, True],
[403, False],
],
)
def test_is_authorized_view(
self,
status_code,
expected,
auth_manager,
user,
):
mock_response = Mock()
mock_response.status_code = status_code
auth_manager.http_session.post = Mock(return_value=mock_response)
result = auth_manager.is_authorized_view(access_view=AccessView.CLUSTER_ACTIVITY, user=user)
token_url = auth_manager._get_token_url("server_url", "realm")
payload = auth_manager._get_payload(
"client_id", "View#GET", {RESOURCE_ID_ATTRIBUTE_NAME: "CLUSTER_ACTIVITY"}
)
headers = auth_manager._get_headers(user.access_token)
auth_manager.http_session.post.assert_called_once_with(
token_url, data=payload, headers=headers, timeout=5
)
assert result == expected
@pytest.mark.parametrize(
("status_code", "expected"),
[
[200, True],
[403, False],
],
)
def test_is_authorized_custom_view(
self,
status_code,
expected,
auth_manager,
user,
):
mock_response = Mock()
mock_response.status_code = status_code
auth_manager.http_session.post = Mock(return_value=mock_response)
result = auth_manager.is_authorized_custom_view(method="GET", resource_name="test", user=user)
token_url = auth_manager._get_token_url("server_url", "realm")
payload = auth_manager._get_payload("client_id", "Custom#GET", {RESOURCE_ID_ATTRIBUTE_NAME: "test"})
headers = auth_manager._get_headers(user.access_token)
auth_manager.http_session.post.assert_called_once_with(
token_url, data=payload, headers=headers, timeout=5
)
assert result == expected
@pytest.mark.parametrize(
("status_code", "response", "expected"),
[
[
200,
[{"scopes": ["MENU"], "rsname": "Assets"}, {"scopes": ["MENU"], "rsname": "Connections"}],
{MenuItem.ASSETS, MenuItem.CONNECTIONS},
],
[200, [{"scopes": ["MENU"], "rsname": "Assets"}], {MenuItem.ASSETS}],
[200, [], set()],
[401, [{"scopes": ["MENU"], "rsname": "Assets"}], set()],
[403, [{"scopes": ["MENU"], "rsname": "Assets"}], set()],
],
)
def test_filter_authorized_menu_items(self, status_code, response, expected, auth_manager, user):
mock_response = Mock()
mock_response.status_code = status_code
mock_response.json.return_value = response
auth_manager.http_session.post = Mock(return_value=mock_response)
menu_items = [MenuItem.ASSETS, MenuItem.CONNECTIONS]
result = auth_manager.filter_authorized_menu_items(menu_items, user=user)
token_url = auth_manager._get_token_url("server_url", "realm")
payload = auth_manager._get_batch_payload(
"client_id", [("MENU", MenuItem.ASSETS.value), ("MENU", MenuItem.CONNECTIONS.value)]
)
headers = auth_manager._get_headers(user.access_token)
auth_manager.http_session.post.assert_called_once_with(
token_url, data=payload, headers=headers, timeout=5
)
assert set(result) == expected
@pytest.mark.parametrize(
"status_code",
[400, 500],
)
def test_filter_authorized_menu_items_with_failure(self, status_code, auth_manager, user):
resp = Mock()
resp.status_code = status_code
resp.text = json.dumps({})
auth_manager.http_session.post = Mock(return_value=resp)
menu_items = [MenuItem.ASSETS, MenuItem.CONNECTIONS]
with pytest.raises(AirflowException):
auth_manager.filter_authorized_menu_items(menu_items, user=user)
token_url = auth_manager._get_token_url("server_url", "realm")
payload = auth_manager._get_batch_payload(
"client_id", [("MENU", MenuItem.ASSETS.value), ("MENU", MenuItem.CONNECTIONS.value)]
)
headers = auth_manager._get_headers(user.access_token)
auth_manager.http_session.post.assert_called_once_with(
token_url, data=payload, headers=headers, timeout=5
)
def test_get_cli_commands_return_cli_commands(self, auth_manager):
assert len(auth_manager.get_cli_commands()) == 1
@pytest.mark.parametrize(
("expiration", "expected"),
[
(-30, True),
(30, False),
],
)
def test_token_expired(self, auth_manager, expiration, expected):
token = auth_manager._get_token_signer(expiration_time_in_seconds=expiration).generate({})
assert KeycloakAuthManager._token_expired(token) is expected
@pytest.mark.parametrize(
("client_id", "client_secret"),
[
("test_client", None),
(None, "test_secret"),
],
)
def test_get_keycloak_client_with_partial_credentials_raises_error(
self, auth_manager, client_id, client_secret
):
"""Test that providing only client_id or only client_secret raises ValueError."""
with pytest.raises(
ValueError, match="Both `client_id` and `client_secret` must be provided together"
):
auth_manager.get_keycloak_client(client_id=client_id, client_secret=client_secret)
@patch("airflow.providers.keycloak.auth_manager.keycloak_auth_manager.KeycloakOpenID")
def test_get_keycloak_client_with_both_credentials(self, mock_keycloak_openid, auth_manager):
"""Test that providing both client_id and client_secret works correctly."""
client = auth_manager.get_keycloak_client(client_id="test_client", client_secret="test_secret")
mock_keycloak_openid.assert_called_once_with(
server_url="server_url",
realm_name="realm",
client_id="test_client",
client_secret_key="test_secret",
)
assert client == mock_keycloak_openid.return_value
@patch("airflow.providers.keycloak.auth_manager.keycloak_auth_manager.KeycloakOpenID")
def test_get_keycloak_client_with_no_credentials(self, mock_keycloak_openid, auth_manager):
"""Test that providing neither credential uses config defaults."""
client = auth_manager.get_keycloak_client()
mock_keycloak_openid.assert_called_once_with(
server_url="server_url",
realm_name="realm",
client_id="client_id",
client_secret_key="client_secret",
)
assert client == mock_keycloak_openid.return_value
@pytest.mark.parametrize(
("client_id", "permission", "attributes", "expected_claims"),
[
# Test without attributes - no claim_token should be added
("test_client", "DAG#GET", None, None),
# Test with single attribute
("test_client", "DAG#READ", {"resource_id": "test_dag"}, {"resource_id": ["test_dag"]}),
# Test with multiple attributes
(
"test_client",
"DAG#GET",
{"resource_id": "my_dag", "dag_entity": "RUN"},
{"dag_entity": ["RUN"], "resource_id": ["my_dag"]}, # sorted by key
),
# Test with different attribute types
(
"my_client",
"Connection#POST",
{"resource_id": "conn123", "extra": "value"},
{"extra": ["value"], "resource_id": ["conn123"]}, # sorted by key
),
],
)
def test_get_payload(self, client_id, permission, attributes, expected_claims, auth_manager):
"""Test _get_payload with various attribute configurations."""
import base64
payload = auth_manager._get_payload(client_id, permission, attributes)
# Verify basic payload structure
assert payload["grant_type"] == "urn:ietf:params:oauth:grant-type:uma-ticket"
assert payload["audience"] == client_id
assert payload["permission"] == permission
if attributes is None:
# When no attributes, claim_token should not be present
assert "claim_token" not in payload
assert "claim_token_format" not in payload
else:
# When attributes are provided, claim_token should be present
assert "claim_token" in payload
assert "claim_token_format" in payload
assert payload["claim_token_format"] == "urn:ietf:params:oauth:token-type:jwt"
# Decode and verify the claim_token contains the attributes as arrays
decoded_claim = base64.b64decode(payload["claim_token"]).decode()
claims = json.loads(decoded_claim)
assert claims == expected_claims
@pytest.mark.parametrize(
("server_url", "expected_url"),
[
(
"https://keycloak.example.com/auth",
"https://keycloak.example.com/auth/realms/myrealm/protocol/openid-connect/token",
),
(
"https://keycloak.example.com/auth/",
"https://keycloak.example.com/auth/realms/myrealm/protocol/openid-connect/token",
),
(
"https://keycloak.example.com/auth///",
"https://keycloak.example.com/auth/realms/myrealm/protocol/openid-connect/token",
),
(
"https://keycloak.example.com/",
"https://keycloak.example.com/realms/myrealm/protocol/openid-connect/token",
),
],
)
def test_get_token_url_normalization(self, auth_manager, server_url, expected_url):
"""Test that _get_token_url normalizes server_url by stripping trailing slashes."""
token_url = auth_manager._get_token_url(server_url, "myrealm")
assert token_url == expected_url
| {
"repo_id": "apache/airflow",
"file_path": "providers/keycloak/tests/unit/keycloak/auth_manager/test_keycloak_auth_manager.py",
"license": "Apache License 2.0",
"lines": 829,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/keycloak/tests/unit/keycloak/auth_manager/test_user.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import pytest
from airflow.providers.keycloak.auth_manager.user import KeycloakAuthManagerUser
@pytest.fixture
def user():
return KeycloakAuthManagerUser(
user_id="user_id", name="name", access_token="access_token", refresh_token="refresh_token"
)
class TestKeycloakAuthManagerUser:
def test_get_id(self, user):
assert user.get_id() == "user_id"
def test_get_name(self, user):
assert user.get_name() == "name"
def test_get_access_token(self, user):
assert user.access_token == "access_token"
def test_get_refresh_token(self, user):
assert user.refresh_token == "refresh_token"
| {
"repo_id": "apache/airflow",
"file_path": "providers/keycloak/tests/unit/keycloak/auth_manager/test_user.py",
"license": "Apache License 2.0",
"lines": 33,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-ctl/src/airflowctl/ctl/commands/pool_command.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Command functions for managing Airflow pools."""
from __future__ import annotations
import json
from json import JSONDecodeError
from pathlib import Path
import rich
from airflowctl.api.client import NEW_API_CLIENT, Client, ClientKind, provide_api_client
from airflowctl.api.datamodels.generated import (
BulkActionOnExistence,
BulkBodyPoolBody,
BulkCreateActionPoolBody,
PoolBody,
)
@provide_api_client(kind=ClientKind.CLI)
def import_(args, api_client: Client = NEW_API_CLIENT) -> None:
"""Import pools from file."""
filepath = Path(args.file)
if not filepath.exists():
raise SystemExit(f"Missing pools file {args.file}")
success, errors = _import_helper(api_client, filepath)
if errors:
raise SystemExit(f"Failed to update pool(s): {errors}")
rich.print(success)
@provide_api_client(kind=ClientKind.CLI)
def export(args, api_client: Client = NEW_API_CLIENT) -> None:
"""
Export all pools.
If output is json, write to file. Otherwise, print to console.
"""
try:
pools_response = api_client.pools.list()
pools_list = [
{
"name": pool.name,
"slots": pool.slots,
"description": pool.description,
"include_deferred": pool.include_deferred,
"occupied_slots": pool.occupied_slots,
"running_slots": pool.running_slots,
"queued_slots": pool.queued_slots,
"scheduled_slots": pool.scheduled_slots,
"open_slots": pool.open_slots,
"deferred_slots": pool.deferred_slots,
}
for pool in pools_response.pools
]
if args.output == "json":
file_path = Path(args.file)
with open(file_path, "w") as f:
json.dump(pools_list, f, indent=4, sort_keys=True)
rich.print(f"Exported {pools_response.total_entries} pool(s) to {args.file}")
else:
# For non-json formats, print the pools directly to console
rich.print(pools_list)
except Exception as e:
raise SystemExit(f"Failed to export pools: {e}")
def _import_helper(api_client: Client, filepath: Path):
"""Help import pools from the json file."""
try:
with open(filepath) as f:
pools_json = json.load(f)
except JSONDecodeError as e:
raise SystemExit(f"Invalid json file: {e}")
if not isinstance(pools_json, list):
raise SystemExit("Invalid format: Expected a list of pool objects")
pools_to_update = []
for pool_config in pools_json:
if not isinstance(pool_config, dict) or "name" not in pool_config or "slots" not in pool_config:
raise SystemExit(f"Invalid pool configuration: {pool_config}")
pools_to_update.append(
PoolBody(
name=pool_config["name"],
slots=pool_config["slots"],
description=pool_config.get("description", ""),
include_deferred=pool_config.get("include_deferred", False),
)
)
bulk_body = BulkBodyPoolBody(
actions=[
BulkCreateActionPoolBody(
action="create",
entities=pools_to_update,
action_on_existence=BulkActionOnExistence.FAIL,
)
]
)
result = api_client.pools.bulk(pools=bulk_body)
# Return the successful and failed entities directly from the response
return result.create.success, result.create.errors
| {
"repo_id": "apache/airflow",
"file_path": "airflow-ctl/src/airflowctl/ctl/commands/pool_command.py",
"license": "Apache License 2.0",
"lines": 105,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:airflow-ctl/tests/airflow_ctl/ctl/commands/test_pool_command.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Tests for pool commands."""
from __future__ import annotations
import json
from unittest import mock
import pytest
from airflowctl.api.client import Client
from airflowctl.api.datamodels.generated import (
BulkActionOnExistence,
BulkBodyPoolBody,
BulkCreateActionPoolBody,
)
from airflowctl.ctl.commands import pool_command
@pytest.fixture
def mock_client():
"""Create a mock client."""
with mock.patch("airflowctl.api.client.get_client") as mock_get_client:
client = mock.MagicMock(spec=Client)
mock_get_client.return_value.__enter__.return_value = client
yield client
class TestPoolImportCommand:
"""Test cases for pool import command."""
def test_import_missing_file(self, mock_client, tmp_path):
"""Test import with missing file."""
non_existent = tmp_path / "non_existent.json"
with pytest.raises(SystemExit, match=f"Missing pools file {non_existent}"):
pool_command.import_(mock.MagicMock(file=non_existent))
def test_import_invalid_json(self, mock_client, tmp_path):
"""Test import with invalid JSON file."""
invalid_json = tmp_path / "invalid.json"
invalid_json.write_text("invalid json")
with pytest.raises(SystemExit, match="Invalid json file"):
pool_command.import_(mock.MagicMock(file=invalid_json))
def test_import_invalid_pool_config(self, mock_client, tmp_path):
"""Test import with invalid pool configuration."""
invalid_pool = tmp_path / "invalid_pool.json"
invalid_pool.write_text(json.dumps([{"invalid": "config"}]))
with pytest.raises(SystemExit, match="Invalid pool configuration: {'invalid': 'config'}"):
pool_command.import_(mock.MagicMock(file=invalid_pool))
def test_import_success(self, mock_client, tmp_path, capsys):
"""Test successful pool import."""
pools_file = tmp_path / "pools.json"
pools_data = [
{
"name": "test_pool",
"slots": 1,
"description": "Test pool",
"include_deferred": True,
}
]
pools_file.write_text(json.dumps(pools_data))
# Mock the bulk response with the correct structure
mock_response = mock.MagicMock()
mock_response.success = ["test_pool"]
mock_response.errors = []
mock_bulk_builder = mock.MagicMock()
mock_bulk_builder.create = mock_response
mock_client.pools.bulk.return_value = mock_bulk_builder
pool_command.import_(mock.MagicMock(file=pools_file))
# Verify bulk operation was called with correct parameters
mock_client.pools.bulk.assert_called_once()
call_args = mock_client.pools.bulk.call_args[1]
assert isinstance(call_args["pools"], BulkBodyPoolBody)
assert len(call_args["pools"].actions) == 1
action = call_args["pools"].actions[0]
assert isinstance(action, BulkCreateActionPoolBody)
assert action.action == "create"
assert action.action_on_existence == BulkActionOnExistence.FAIL
assert len(action.entities) == 1
assert action.entities[0].name == "test_pool"
assert action.entities[0].slots == 1
assert action.entities[0].description == "Test pool"
assert action.entities[0].include_deferred is True
# Update the assertion to match the actual output format
captured = capsys.readouterr()
assert str(["test_pool"]) in captured.out
class TestPoolExportCommand:
"""Test cases for pool export command."""
def test_export_json_to_file(self, mock_client, tmp_path, capsys):
"""Test successful pool export to file with json output."""
export_file = tmp_path / "export.json"
# Create a proper pool object with dictionary attributes instead of MagicMock
pool = {
"name": "test_pool",
"slots": 1,
"description": "Test pool",
"include_deferred": True,
"occupied_slots": 0,
"running_slots": 0,
"queued_slots": 0,
"scheduled_slots": 0,
"open_slots": 1,
"deferred_slots": 0,
}
# Create a mock response with proper dictionary attributes
mock_pools = mock.MagicMock()
mock_pools.pools = [type("Pool", (), pool)()]
mock_pools.total_entries = 1
mock_client.pools.list.return_value = mock_pools
pool_command.export(mock.MagicMock(file=export_file, output="json"))
# Verify the exported file content
exported_data = json.loads(export_file.read_text())
assert len(exported_data) == 1
assert exported_data[0]["name"] == "test_pool"
assert exported_data[0]["slots"] == 1
assert exported_data[0]["description"] == "Test pool"
assert exported_data[0]["include_deferred"] is True
# Verify output message
captured = capsys.readouterr()
expected_output = f"Exported {len(exported_data)} pool(s) to {export_file}"
assert expected_output in captured.out.replace("\n", "")
def test_export_non_json_output(self, mock_client, tmp_path, capsys):
"""Test pool export with non-json output format."""
# Create a proper dictionary structure
mock_pool = {
"name": "test_pool",
"slots": 1,
"description": "Test pool",
"include_deferred": True,
"occupied_slots": 0,
"running_slots": 0,
"queued_slots": 0,
"scheduled_slots": 0,
"open_slots": 1,
"deferred_slots": 0,
}
# Create a mock response with a proper pools attribute
mock_pools = mock.MagicMock()
mock_pools.pools = [mock.MagicMock(**mock_pool)]
mock_pools.total_entries = 1
mock_client.pools.list.return_value = mock_pools
pool_command.export(mock.MagicMock(file=tmp_path / "unused.json", output="table"))
# Verify console output contains the raw dict
captured = capsys.readouterr()
assert "test_pool" in captured.out
assert "slots" in captured.out
assert "description" in captured.out
assert "include_deferred" in captured.out
def test_export_failure(self, mock_client, tmp_path):
"""Test pool export with API failure."""
export_file = tmp_path / "export.json"
mock_client.pools.list.side_effect = Exception("API Error")
with pytest.raises(SystemExit, match="Failed to export pools: API Error"):
pool_command.export(mock.MagicMock(file=export_file, output="json"))
| {
"repo_id": "apache/airflow",
"file_path": "airflow-ctl/tests/airflow_ctl/ctl/commands/test_pool_command.py",
"license": "Apache License 2.0",
"lines": 159,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/keycloak/docs/conf.py | # Disable Flake8 because of all the sphinx imports
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Configuration of Providers docs building."""
from __future__ import annotations
import os
os.environ["AIRFLOW_PACKAGE_NAME"] = "apache-airflow-providers-keycloak"
from docs.provider_conf import * # noqa: F403
| {
"repo_id": "apache/airflow",
"file_path": "providers/keycloak/docs/conf.py",
"license": "Apache License 2.0",
"lines": 23,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:helm-tests/tests/helm_tests/apiserver/test_apiserver.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import jmespath
from chart_utils.helm_template_generator import render_chart
class TestAPIServerDeployment:
"""Tests API Server deployment."""
def test_airflow_2(self):
"""
API Server only supports Airflow 3.0.0 and later.
"""
docs = render_chart(
values={"airflowVersion": "2.11.0"},
show_only=["templates/api-server/api-server-deployment.yaml"],
)
assert len(docs) == 0
def test_should_not_create_api_server_configmap_when_lower_than_3(self):
"""
API Server configmap is only created for Airflow 3.0.0 and later.
"""
docs = render_chart(
values={"airflowVersion": "2.11.0"},
show_only=["templates/configmaps/api-server-configmap.yaml"],
)
assert len(docs) == 0
def test_should_add_annotations_to_api_server_configmap(self):
docs = render_chart(
values={
"airflowVersion": "3.0.0",
"apiServer": {
"apiServerConfig": "CSRF_ENABLED = True # {{ .Release.Name }}",
"configMapAnnotations": {"test_annotation": "test_annotation_value"},
},
},
show_only=["templates/configmaps/api-server-configmap.yaml"],
)
assert "annotations" in jmespath.search("metadata", docs[0])
assert jmespath.search("metadata.annotations", docs[0])["test_annotation"] == "test_annotation_value"
def test_should_add_volume_and_volume_mount_when_exist_api_server_config(self):
docs = render_chart(
values={"apiServer": {"apiServerConfig": "CSRF_ENABLED = True"}, "airflowVersion": "3.0.0"},
show_only=["templates/api-server/api-server-deployment.yaml"],
)
assert {
"name": "api-server-config",
"configMap": {"name": "release-name-api-server-config"},
} in jmespath.search("spec.template.spec.volumes", docs[0])
assert {
"name": "api-server-config",
"mountPath": "/opt/airflow/webserver_config.py",
"subPath": "webserver_config.py",
"readOnly": True,
} in jmespath.search("spec.template.spec.containers[0].volumeMounts", docs[0])
class TestAPIServerJWTSecret:
"""Tests API Server JWT secret."""
def test_should_add_annotations_to_jwt_secret(self):
docs = render_chart(
values={
"jwtSecretAnnotations": {"test_annotation": "test_annotation_value"},
},
show_only=["templates/secrets/jwt-secret.yaml"],
)[0]
assert "annotations" in jmespath.search("metadata", docs)
assert jmespath.search("metadata.annotations", docs)["test_annotation"] == "test_annotation_value"
class TestApiSecretKeySecret:
"""Tests api secret key secret."""
def test_should_add_annotations_to_api_secret_key_secret(self):
docs = render_chart(
values={
"airflowVersion": "3.0.0",
"apiSecretAnnotations": {"test_annotation": "test_annotation_value"},
},
show_only=["templates/secrets/api-secret-key-secret.yaml"],
)[0]
assert "annotations" in jmespath.search("metadata", docs)
assert jmespath.search("metadata.annotations", docs)["test_annotation"] == "test_annotation_value"
class TestApiserverConfigmap:
"""Tests apiserver configmap."""
def test_no_apiserver_config_configmap_by_default(self):
docs = render_chart(show_only=["templates/configmaps/api-server-configmap.yaml"])
assert len(docs) == 0
def test_no_apiserver_config_configmap_with_configmap_name(self):
docs = render_chart(
values={
"apiServer": {
"apiServerConfig": "CSRF_ENABLED = True # {{ .Release.Name }}",
"apiServerConfigConfigMapName": "my-configmap",
}
},
show_only=["templates/configmaps/api-server-configmap.yaml"],
)
assert len(docs) == 0
def test_apiserver_with_custom_configmap_name(self):
docs = render_chart(
values={
"apiServer": {
"apiServerConfigConfigMapName": "my-custom-configmap",
}
},
show_only=["templates/api-server/api-server-deployment.yaml"],
)
assert (
jmespath.search("spec.template.spec.volumes[1].configMap.name", docs[0]) == "my-custom-configmap"
)
def test_apiserver_config_configmap(self):
docs = render_chart(
values={"apiServer": {"apiServerConfig": "CSRF_ENABLED = True # {{ .Release.Name }}"}},
show_only=["templates/configmaps/api-server-configmap.yaml"],
)
assert docs[0]["kind"] == "ConfigMap"
assert jmespath.search("metadata.name", docs[0]) == "release-name-api-server-config"
assert (
jmespath.search('data."webserver_config.py"', docs[0]).strip()
== "CSRF_ENABLED = True # release-name"
)
| {
"repo_id": "apache/airflow",
"file_path": "helm-tests/tests/helm_tests/apiserver/test_apiserver.py",
"license": "Apache License 2.0",
"lines": 129,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:dev/breeze/src/airflow_breeze/commands/workflow_commands.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import json
import os
import re
import sys
import click
from airflow_breeze.commands.common_options import argument_doc_packages
from airflow_breeze.utils.click_utils import BreezeGroup
from airflow_breeze.utils.console import get_console
from airflow_breeze.utils.custom_param_types import BetterChoice
from airflow_breeze.utils.gh_workflow_utils import trigger_workflow_and_monitor
from airflow_breeze.utils.run_utils import run_command
WORKFLOW_NAME_MAPS = {
"publish-docs": "publish-docs-to-s3.yml",
"airflow-refresh-site": "build.yml",
"sync-s3-to-github": "s3-to-github.yml",
}
APACHE_AIRFLOW_REPO = "apache/airflow"
APACHE_AIRFLOW_SITE_REPO = "apache/airflow-site"
APACHE_AIRFLOW_SITE_ARCHIVE_REPO = "apache/airflow-site-archive"
@click.group(cls=BreezeGroup, name="workflow-run", help="Tools to manage Airflow repository workflows ")
def workflow_run_group():
pass
@workflow_run_group.command(name="publish-docs", help="Trigger publish docs to S3 workflow")
@click.option(
"--ref",
help="Git reference tag to checkout to build documentation.",
required=True,
)
@click.option(
"--skip-tag-validation",
help="Skip validation of the tag. Allows to use `main` or commit hash. Use with caution.",
is_flag=True,
)
@click.option(
"--exclude-docs",
help="Comma separated short name list of docs packages to exclude from the publish. (example: apache.druid,google)",
default="no-docs-excluded",
)
@click.option(
"--site-env",
help="S3 bucket to which the documentation will be published.",
default="auto",
type=BetterChoice(["auto", "live", "staging"]),
)
@click.option(
"--skip-write-to-stable-folder",
help="Skip writing to stable folder.",
is_flag=True,
)
@click.option(
"--airflow-version",
help="Override Airflow Version to use for the docs build. "
"If not provided, it will be extracted from the ref. If only base version is provided, it will be "
"set to the same as the base version.",
default=None,
type=str,
)
@click.option(
"--airflow-base-version",
help="Override Airflow Base Version to use for the docs build. "
"If not provided, it will be extracted from the ref. If airflow-version is provided, the "
"base version of the version provided (i.e. stripped pre-/post-/dev- suffixes).",
default=None,
type=str,
)
@click.option(
"--apply-commits",
help="Apply commits before building the docs - for example to patch fixes "
"to the docs (comma separated list of commits). ",
default=None,
type=str,
)
@click.option(
"--workflow-branch",
help="Branch to run the workflow on. Defaults to 'main'.",
default="main",
type=str,
)
@argument_doc_packages
def workflow_run_publish(
ref: str,
exclude_docs: str,
site_env: str,
skip_tag_validation: bool,
doc_packages: tuple[str, ...],
skip_write_to_stable_folder: bool = False,
airflow_version: str | None = None,
airflow_base_version: str | None = None,
apply_commits: str | None = None,
workflow_branch: str = "main",
):
if len(doc_packages) == 0:
get_console().print(
"[red]Error: No doc packages provided. Please provide at least one doc package.[/red]",
)
sys.exit(1)
if os.environ.get("GITHUB_TOKEN", ""):
get_console().print("\n[warning]Your authentication will use GITHUB_TOKEN environment variable.")
get_console().print(
"\nThis might not be what you want unless your token has "
"sufficient permissions to trigger workflows."
)
get_console().print(
"If you remove GITHUB_TOKEN, workflow_run will use the authentication you already "
"set-up with `gh auth login`.\n"
)
get_console().print(
f"[blue]Validating ref: {ref}[/blue]",
)
if not skip_tag_validation:
tag_result = run_command(
["gh", "api", f"repos/apache/airflow/git/refs/tags/{ref}"],
capture_output=True,
check=False,
)
stdout = tag_result.stdout.decode("utf-8")
tag_respo = json.loads(stdout)
if not tag_respo.get("ref"):
get_console().print(
f"[red]Error: Ref {ref} does not exists in repo apache/airflow .[/red]",
)
get_console().print("\nYou can add --skip-tag-validation to skip this validation.")
sys.exit(1)
get_console().print(
f"[blue]Triggering workflow {WORKFLOW_NAME_MAPS['publish-docs']}: at {APACHE_AIRFLOW_REPO}[/blue]",
)
from packaging.version import InvalidVersion, Version
if airflow_version:
try:
Version(airflow_version)
except InvalidVersion as e:
f"[red]Invalid version passed as --airflow-version: {airflow_version}[/red]: {e}"
sys.exit(1)
get_console().print(
f"[blue]Using provided Airflow version: {airflow_version}[/blue]",
)
if airflow_base_version:
try:
Version(airflow_base_version)
except InvalidVersion as e:
f"[red]Invalid base version passed as --airflow-base-version: {airflow_version}[/red]: {e}"
sys.exit(1)
get_console().print(
f"[blue]Using provided Airflow base version: {airflow_base_version}[/blue]",
)
if not airflow_version and airflow_base_version:
airflow_version = airflow_base_version
if airflow_version and not airflow_base_version:
airflow_base_version = Version(airflow_version).base_version
joined_packages = " ".join(doc_packages)
if "providers" in joined_packages and "apache-airflow-providers" not in joined_packages:
joined_packages = joined_packages + " apache-airflow-providers"
workflow_fields = {
"ref": ref,
"destination": site_env,
"include-docs": joined_packages,
"exclude-docs": exclude_docs,
"skip-write-to-stable-folder": skip_write_to_stable_folder,
"build-sboms": "true" if "apache-airflow" in doc_packages else "false",
"apply-commits": apply_commits if apply_commits else "",
}
if airflow_version:
workflow_fields["airflow-version"] = airflow_version
if airflow_base_version:
workflow_fields["airflow-base-version"] = airflow_base_version
trigger_workflow_and_monitor(
workflow_name=WORKFLOW_NAME_MAPS["publish-docs"],
repo=APACHE_AIRFLOW_REPO,
branch=workflow_branch,
**workflow_fields,
)
if site_env == "auto":
pattern = re.compile(r"^.*[0-9]+\.[0-9]+\.[0-9]+$")
if pattern.match(ref):
site_env = "live"
else:
site_env = "staging"
branch = "main" if site_env == "live" else "staging"
get_console().print(
f"[blue]Refreshing site at {APACHE_AIRFLOW_SITE_REPO}[/blue]",
)
wf_name = WORKFLOW_NAME_MAPS["airflow-refresh-site"]
get_console().print(
f"[blue]Triggering workflow {wf_name}: at {APACHE_AIRFLOW_SITE_REPO}[/blue]",
)
trigger_workflow_and_monitor(
workflow_name=wf_name,
repo=APACHE_AIRFLOW_SITE_REPO,
branch=branch,
)
get_console().print(
f"[blue]Refreshing completed workflow {wf_name}: at {APACHE_AIRFLOW_SITE_REPO}[/blue]",
)
workflow_fields = {"source": site_env}
get_console().print(
f"[blue]Syncing S3 docs to GitHub repository at {APACHE_AIRFLOW_SITE_ARCHIVE_REPO}[/blue]",
)
trigger_workflow_and_monitor(
workflow_name=WORKFLOW_NAME_MAPS["sync-s3-to-github"],
repo=APACHE_AIRFLOW_SITE_ARCHIVE_REPO,
branch=branch,
**workflow_fields,
monitor=False,
)
| {
"repo_id": "apache/airflow",
"file_path": "dev/breeze/src/airflow_breeze/commands/workflow_commands.py",
"license": "Apache License 2.0",
"lines": 221,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:dev/breeze/src/airflow_breeze/commands/workflow_commands_config.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
WORKFLOW_RUN_COMMANDS: dict[str, str | list[str]] = {
"name": "Airflow github actions workflow commands",
"commands": ["publish-docs"],
}
WORKFLOW_RUN_PARAMETERS: dict[str, list[dict[str, str | list[str]]]] = {
"breeze workflow-run publish-docs": [
{
"name": "Select branch or tag to build docs",
"options": [
"--ref",
"--skip-tag-validation",
"--apply-commits",
"--workflow-branch",
],
},
{
"name": "Optional airflow versions to build.",
"options": [
"--airflow-version",
"--airflow-base-version",
],
},
{
"name": "Select docs to exclude and destination",
"options": [
"--exclude-docs",
"--site-env",
"--skip-write-to-stable-folder",
],
},
],
}
| {
"repo_id": "apache/airflow",
"file_path": "dev/breeze/src/airflow_breeze/commands/workflow_commands_config.py",
"license": "Apache License 2.0",
"lines": 49,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:dev/breeze/src/airflow_breeze/utils/gh_workflow_utils.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import json
import re
import subprocess
import sys
import time
from shutil import which
from airflow_breeze.global_constants import MIN_GH_VERSION
from airflow_breeze.utils.console import get_console
from airflow_breeze.utils.run_utils import run_command
def tigger_workflow(workflow_name: str, repo: str, branch: str = "main", **kwargs):
"""
Trigger a GitHub Actions workflow using the `gh` CLI.
:param workflow_name: The name of the workflow to trigger.
:param repo: Workflow repository example: 'apache/airflow'
:param branch: The branch to run the workflow on.
:param kwargs: Additional parameters to pass to the workflow.
"""
command = ["gh", "workflow", "run", workflow_name, "--ref", branch, "--repo", repo]
# These are the input parameters to workflow
for key, value_raw in kwargs.items():
# GH cli requires bool inputs to be converted to string format
if isinstance(value_raw, bool):
value = "true" if value_raw else "false"
else:
value = value_raw
command.extend(["-f", f"{key}={value}"])
get_console().print(f"[blue]Running command: {' '.join(command)}[/blue]")
result = run_command(command, capture_output=True, check=False)
if result.returncode != 0:
get_console().print(f"[red]Error running workflow: {result.stderr}[/red]")
sys.exit(1)
# Wait for a few seconds to start the workflow run
time.sleep(5)
def make_sure_gh_is_installed():
if not which("gh"):
get_console().print(
"[red]Error! The `gh` tool is not installed.[/]\n\n"
"[yellow]You need to install `gh` tool (see https://github.com/cli/cli) and "
"run `gh auth login` to connect your repo to GitHub."
)
sys.exit(1)
version_string = subprocess.check_output(["gh", "version"]).decode("utf-8")
match = re.search(r"gh version (\d+\.\d+\.\d+)", version_string)
if match:
version = match.group(1)
from packaging.version import Version
if Version(version) < Version(MIN_GH_VERSION):
get_console().print(
f"[red]Error! The `gh` tool version is too old. "
f"Please upgrade to at least version {MIN_GH_VERSION}[/]"
)
sys.exit(1)
else:
get_console().print(
"[red]Error! Could not determine the version of the `gh` tool. Please ensure it is installed correctly.[/]"
)
sys.exit(1)
def get_workflow_run_id(workflow_name: str, repo: str) -> int:
"""
Get the latest workflow run ID for a given workflow name and repository.
:param workflow_name: The name of the workflow to check.
:param repo: The repository in the format 'owner/repo'.
"""
make_sure_gh_is_installed()
command = [
"gh",
"run",
"list",
"--workflow",
workflow_name,
"--repo",
repo,
"--limit",
"1",
"--json",
"databaseId",
]
result = run_command(command, capture_output=True, check=False)
if result.returncode != 0:
get_console().print(f"[red]Error fetching workflow run ID: {result.stderr}[/red]")
sys.exit(1)
runs_data = result.stdout.strip()
if not runs_data:
get_console().print("[red]No workflow runs found.[/red]")
sys.exit(1)
run_id = json.loads(runs_data)[0].get("databaseId")
get_console().print(
f"[blue]Running workflow {workflow_name} at https://github.com/{repo}/actions/runs/{run_id}[/blue]",
)
return run_id
def get_workflow_run_info(run_id: str, repo: str, fields: str) -> dict:
"""
Get the workflow information for a specific run ID and return the specified fields.
:param run_id: The ID of the workflow run to check.
:param repo: Workflow repository example: 'apache/airflow'
:param fields: Comma-separated fields to retrieve from the workflow run to fetch. eg: "status,conclusion,name,jobs"
"""
make_sure_gh_is_installed()
command = ["gh", "run", "view", run_id, "--json", fields, "--repo", repo]
result = run_command(command, capture_output=True, check=False)
if result.returncode != 0:
get_console().print(f"[red]Error fetching workflow run status: {result.stderr}[/red]")
sys.exit(1)
return json.loads(result.stdout.strip())
def monitor_workflow_run(run_id: str, repo: str):
"""
Monitor the status of a workflow run until it completes.
:param run_id: The ID of the workflow run to monitor.
:param repo: Workflow repository example: 'apache/airflow'
"""
completed_jobs = []
while True:
jobs_data = get_workflow_run_info(run_id, repo, "jobs")
for job in jobs_data.get("jobs", []):
name = job["name"]
status = job["status"]
conclusion = job["conclusion"]
if name not in completed_jobs and status != "completed":
get_console().print(
f"[yellow]- Job: {name} | Status: {status} | Conclusion: {conclusion}[/yellow]"
)
continue
if name not in completed_jobs:
get_console().print(
f"[green]- Job: {name} | Status: {status} | Conclusion: {conclusion}[/green]"
)
completed_jobs.append(name)
workflow_run_status_conclusion = get_workflow_run_info(run_id, repo, "status,conclusion,name")
status = workflow_run_status_conclusion.get("status")
conclusion = workflow_run_status_conclusion.get("conclusion")
name = workflow_run_status_conclusion.get("name")
if status == "completed":
if conclusion == "success":
get_console().print(f"[green]Workflow {name} run {run_id} completed successfully.[/green]")
elif conclusion == "failure":
get_console().print(
f"[red]Workflow {name} run {run_id} failed, see for more info: https://github.com/{repo}/actions/runs/{run_id}[/red]"
)
sys.exit(1)
break
# Check status of jobs every 30 seconds
time.sleep(30)
def trigger_workflow_and_monitor(
workflow_name: str, repo: str, branch: str = "main", monitor=True, **workflow_fields
):
make_sure_gh_is_installed()
tigger_workflow(
workflow_name=workflow_name,
repo=repo,
branch=branch,
**workflow_fields,
)
workflow_run_id = get_workflow_run_id(
workflow_name=workflow_name,
repo=repo,
)
get_console().print(
f"[blue]Workflow run ID: {workflow_run_id}[/blue]",
)
if not monitor:
return
monitor_workflow_run(
run_id=str(workflow_run_id),
repo=repo,
)
| {
"repo_id": "apache/airflow",
"file_path": "dev/breeze/src/airflow_breeze/utils/gh_workflow_utils.py",
"license": "Apache License 2.0",
"lines": 182,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:airflow-core/src/airflow/api_fastapi/core_api/services/public/config.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import warnings
from fastapi import HTTPException, status
from fastapi.responses import Response
from airflow.api_fastapi.common.types import Mimetype
from airflow.api_fastapi.core_api.datamodels.config import Config
from airflow.configuration import conf
def _check_expose_config() -> bool:
display_sensitive: bool | None = None
if conf.get("api", "expose_config").lower() == "non-sensitive-only":
expose_config = True
display_sensitive = False
warnings.warn(
"The value 'non-sensitive-only' for [api] expose_config is deprecated. "
"Use 'true' instead; sensitive configuration values are always masked.",
DeprecationWarning,
stacklevel=2,
)
else:
expose_config = conf.getboolean("api", "expose_config")
display_sensitive = False
if not expose_config:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail="Your Airflow administrator chose not to expose the configuration, most likely for security reasons.",
)
return display_sensitive
def _response_based_on_accept(accept: Mimetype, config: Config):
if accept == Mimetype.TEXT:
return Response(content=config.text_format, media_type=Mimetype.TEXT)
return config
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/api_fastapi/core_api/services/public/config.py",
"license": "Apache License 2.0",
"lines": 47,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:airflow-core/src/airflow/api_fastapi/core_api/services/public/providers.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import re
from airflow.api_fastapi.core_api.datamodels.providers import ProviderResponse
from airflow.providers_manager import ProviderInfo
def _remove_rst_syntax(value: str) -> str:
return re.sub("[`_<>]", "", value.strip(" \n."))
def _provider_mapper(provider: ProviderInfo) -> ProviderResponse:
return ProviderResponse(
package_name=provider.data["package-name"],
description=_remove_rst_syntax(provider.data["description"]),
version=provider.version,
documentation_url=provider.data["documentation-url"],
)
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/api_fastapi/core_api/services/public/providers.py",
"license": "Apache License 2.0",
"lines": 29,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:helm-tests/tests/helm_tests/security/test_fernetkey_secret.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import base64
import jmespath
from chart_utils.helm_template_generator import render_chart
from cryptography.fernet import Fernet
class TestFernetKeySecret:
"""Tests fernet key secret."""
def test_should_add_annotations_to_fernetkey_secret(self):
# Create a Fernet key
fernet_key_provided = Fernet.generate_key().decode()
docs = render_chart(
values={
"fernetKey": fernet_key_provided,
"fernetKeySecretAnnotations": {"test_annotation": "test_annotation_value"},
},
show_only=["templates/secrets/fernetkey-secret.yaml"],
)[0]
assert "annotations" in jmespath.search("metadata", docs)
assert jmespath.search("metadata.annotations", docs)["test_annotation"] == "test_annotation_value"
# Extract the base64 encoded fernet key from the secret
fernet_key_b64 = jmespath.search('data."fernet-key"', docs).strip('"')
fernet_key = base64.b64decode(fernet_key_b64).decode()
# Verify the key is valid by creating a Fernet instance
Fernet(fernet_key.encode()) # Raise: Fernet key must be 32 url-safe base64-encoded bytes.
def test_should_generate_valid_fernetkey_secret(self):
"""Test that a valid Fernet key is generated."""
docs = render_chart(
values={}, # No fernetKey provided
show_only=["templates/secrets/fernetkey-secret.yaml"],
)[0]
# Extract the base64 encoded fernet key from the secret
fernet_key_b64 = jmespath.search('data."fernet-key"', docs).strip('"')
fernet_key = base64.b64decode(fernet_key_b64).decode()
# Verify the key is valid by creating a Fernet instance
Fernet(fernet_key.encode()) # Raise: Fernet key must be 32 url-safe base64-encoded bytes.
| {
"repo_id": "apache/airflow",
"file_path": "helm-tests/tests/helm_tests/security/test_fernetkey_secret.py",
"license": "Apache License 2.0",
"lines": 51,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:scripts/in_container/run_capture_airflowctl_help.py | #!/usr/bin/env python3
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import hashlib
import os
import pty
import subprocess
import sys
from pathlib import Path
from airflowctl import __file__ as AIRFLOW_CTL_SRC_PATH
from rich.console import Console
sys.path.insert(0, str(Path(__file__).parent.resolve()))
AIRFLOW_CTL_ROOT_PATH = Path(AIRFLOW_CTL_SRC_PATH).parents[2]
AIRFLOW_CTL_SOURCES_PATH = AIRFLOW_CTL_ROOT_PATH / "src"
sys.path.insert(0, str(Path(__file__).parent.resolve())) # make sure common_prek_utils is imported
AIRFLOWCTL_IMAGES_PATH = AIRFLOW_CTL_ROOT_PATH / "docs" / "images"
HASH_FILE = AIRFLOW_CTL_ROOT_PATH / "docs" / "images" / "command_hashes.txt"
COMMANDS = [
"", # for `airflowctl -h`, main help
"assets",
"auth",
"backfill",
"config",
"connections",
"dags",
"dagrun",
"jobs",
"pools",
"providers",
"variables",
"version",
]
SUBCOMMANDS = [
"auth login",
]
console = Console(color_system="standard", force_terminal=True, width=200, force_interactive=False)
# Get new hashes
def get_airflowctl_command_hash_dict(commands):
hash_dict = {}
env = os.environ.copy()
env["CI"] = "true" # Set CI environment variable to ensure consistent behavior
env["COLUMNS"] = "80"
for command in commands:
console.print(f"[bright_blue]Getting hash for command: {command}[/]")
run_command = command if command != "main" else ""
output = subprocess.check_output(
[f"python {AIRFLOW_CTL_SOURCES_PATH}/airflowctl/__main__.py {run_command} -h"],
shell=True,
text=True,
env=env,
)
help_text = output.strip()
hash_dict[command if command != "" else "main"] = hashlib.md5(help_text.encode("utf-8")).hexdigest()
return hash_dict
def regenerate_help_images_for_all_airflowctl_commands(commands: list[str], skip_hash_check: bool) -> int:
hash_file = AIRFLOWCTL_IMAGES_PATH / "command_hashes.txt"
os.makedirs(AIRFLOWCTL_IMAGES_PATH, exist_ok=True)
env = os.environ.copy()
env["TERM"] = "xterm-256color"
env["COLUMNS"] = "75"
old_hash_dict = {}
new_hash_dict = {}
if not skip_hash_check:
# Load old hashes if present
if hash_file.exists():
for line in hash_file.read_text().splitlines():
if line.strip():
cmd, hash_val = line.split(":", 1)
old_hash_dict[cmd] = hash_val
new_hash_dict = get_airflowctl_command_hash_dict(commands)
# Check for changes
changed_commands = []
for command_raw in commands:
command = command_raw or "main"
console.print(f"[bright_blue]Checking command: {command}[/]", end="")
if skip_hash_check:
console.print("[yellow] forced generation")
changed_commands.append(command)
elif old_hash_dict.get(command) != new_hash_dict[command]:
console.print("[yellow] has changed")
changed_commands.append(command)
else:
console.print("[green] has not changed")
if not changed_commands:
console.print("[bright_blue]The hash dumps old/new are the same. Returning with return code 0.")
return 0
# Generate SVGs for changed commands
for command in changed_commands:
path = (AIRFLOWCTL_IMAGES_PATH / f"output_{command.replace(' ', '_')}.svg").as_posix()
run_command = command if command != "main" else ""
# Update environment and use pty.spawn to allocate a pseudo-TTY for proper terminal rendering
original_env = os.environ.copy()
os.environ.update(env)
try:
cmd_args = ["airflowctl"] + (run_command.split() if run_command else []) + ["--preview", path]
exit_code = pty.spawn(cmd_args)
if exit_code != 0:
raise subprocess.CalledProcessError(exit_code, f"airflowctl {run_command} --preview {path}")
console.print(f"[bright_blue]Generated SVG for command: {command}")
finally:
# Restore original environment
os.environ.clear()
os.environ.update(original_env)
# Write new hashes
with open(hash_file, "w") as f:
for cmd, hash_val in new_hash_dict.items():
f.write(f"{cmd}:{hash_val}\n")
console.print("[info]New hash of airflowctl commands written")
return 0
_skip_hash_check = False
if "--skip-hash-check" in sys.argv:
_skip_hash_check = True
console.print("[bright_blue]Skipping hash check")
sys.argv.remove("--skip-hash-check")
if len(sys.argv) > 1:
selected_commands = sys.argv[1:]
console.print(f"[bright_blue]Filtering commands to: {selected_commands}")
else:
selected_commands = COMMANDS + SUBCOMMANDS
try:
regenerate_help_images_for_all_airflowctl_commands(selected_commands, _skip_hash_check)
except Exception as e:
print(f"Error: {e}")
sys.exit(1)
| {
"repo_id": "apache/airflow",
"file_path": "scripts/in_container/run_capture_airflowctl_help.py",
"license": "Apache License 2.0",
"lines": 140,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:airflow-core/tests/unit/timetables/test_once_timetable.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import datetime
import pytest
import time_machine
from airflow._shared.timezones import timezone
from airflow.timetables.base import DagRunInfo, TimeRestriction
from airflow.timetables.simple import OnceTimetable
FROZEN_NOW = timezone.coerce_datetime(datetime.datetime(2025, 3, 4, 5, 6, 7, 8))
PREVIOUS_INFO = DagRunInfo.exact(FROZEN_NOW - datetime.timedelta(days=1))
@pytest.mark.parametrize(
("prev_info", "end_date", "expected_next_info"),
[
(None, None, DagRunInfo.exact(FROZEN_NOW)),
(None, FROZEN_NOW + datetime.timedelta(days=1), DagRunInfo.exact(FROZEN_NOW)),
(None, FROZEN_NOW - datetime.timedelta(days=1), None),
(PREVIOUS_INFO, None, None),
(PREVIOUS_INFO, FROZEN_NOW + datetime.timedelta(days=1), None),
(PREVIOUS_INFO, FROZEN_NOW - datetime.timedelta(days=1), None),
],
)
@pytest.mark.parametrize("catchup", [True, False]) # Irrelevant for @once.
@time_machine.travel(FROZEN_NOW)
def test_no_start_date_means_now(catchup, prev_info, end_date, expected_next_info):
timetable = OnceTimetable()
next_info = timetable.next_dagrun_info(
last_automated_data_interval=prev_info,
restriction=TimeRestriction(earliest=None, latest=end_date, catchup=catchup),
)
assert next_info == expected_next_info
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/tests/unit/timetables/test_once_timetable.py",
"license": "Apache License 2.0",
"lines": 46,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/edge3/src/airflow/providers/edge3/cli/signalling.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import logging
import os
import signal
import sys
from pathlib import Path
import psutil
from lockfile.pidlockfile import (
read_pid_from_pidfile,
remove_existing_pidfile,
write_pid_to_pidfile as write_pid,
)
from airflow.utils import cli as cli_utils
from airflow.utils.platform import IS_WINDOWS
EDGE_WORKER_PROCESS_NAME = "edge-worker"
logger = logging.getLogger(__name__)
def _status_signal() -> signal.Signals:
if IS_WINDOWS:
return signal.SIGBREAK # type: ignore[attr-defined]
return signal.SIGUSR2
SIG_STATUS = _status_signal()
def write_pid_to_pidfile(pid_file_path: str):
"""Write PIDs for Edge Workers to disk, handling existing PID files."""
if Path(pid_file_path).exists():
# Handle existing PID files on disk
logger.info("An existing PID file has been found: %s.", pid_file_path)
pid_stored_in_pid_file = read_pid_from_pidfile(pid_file_path)
if os.getpid() == pid_stored_in_pid_file:
raise SystemExit("A PID file has already been written")
# PID file was written by dead or already running instance
if psutil.pid_exists(pid_stored_in_pid_file):
# case 1: another instance uses the same path for its PID file
raise SystemExit(
f"The PID file {pid_file_path} contains the PID of another running process. "
"Configuration issue: edge worker instance must use different PID file paths!"
)
# case 2: previous instance crashed without cleaning up its PID file
logger.warning("PID file is orphaned. Cleaning up.")
remove_existing_pidfile(pid_file_path)
logger.debug("PID file written to %s.", pid_file_path)
write_pid(pid_file_path)
def pid_file_path(pid_file: str | None) -> str:
return cli_utils.setup_locations(process=EDGE_WORKER_PROCESS_NAME, pid=pid_file)[0]
def get_pid(pid_file: str | None) -> int:
pid = read_pid_from_pidfile(pid_file_path(pid_file))
if not pid:
logger.warning("Could not find PID of worker.")
sys.exit(1)
return pid
def status_file_path(pid_file: str | None) -> str:
return cli_utils.setup_locations(process=EDGE_WORKER_PROCESS_NAME, pid=pid_file)[1]
def maintenance_marker_file_path(pid_file: str | None) -> str:
return cli_utils.setup_locations(process=EDGE_WORKER_PROCESS_NAME, pid=pid_file)[1][:-4] + ".in"
| {
"repo_id": "apache/airflow",
"file_path": "providers/edge3/src/airflow/providers/edge3/cli/signalling.py",
"license": "Apache License 2.0",
"lines": 69,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/edge3/src/airflow/providers/edge3/cli/worker.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import logging
import os
import signal
import sys
import traceback
from asyncio import Task, create_task, get_running_loop, sleep
from datetime import datetime
from functools import cache
from http import HTTPStatus
from multiprocessing import Process, Queue
from pathlib import Path
from typing import TYPE_CHECKING
import anyio
from aiofiles import open as aio_open
from aiohttp import ClientResponseError
from lockfile.pidlockfile import remove_existing_pidfile
from airflow import __version__ as airflow_version
from airflow.configuration import conf
from airflow.providers.common.compat.sdk import timezone
from airflow.providers.edge3 import __version__ as edge_provider_version
from airflow.providers.edge3.cli.api_client import (
jobs_fetch,
jobs_set_state,
logs_push,
worker_register,
worker_set_state,
)
from airflow.providers.edge3.cli.dataclasses import Job, MaintenanceMarker, WorkerStatus
from airflow.providers.edge3.cli.signalling import (
SIG_STATUS,
maintenance_marker_file_path,
status_file_path,
write_pid_to_pidfile,
)
from airflow.providers.edge3.models.edge_worker import (
EdgeWorkerDuplicateException,
EdgeWorkerState,
EdgeWorkerVersionException,
)
from airflow.utils.net import getfqdn
from airflow.utils.state import TaskInstanceState
if TYPE_CHECKING:
from airflow.executors.workloads import ExecuteTask
logger = logging.getLogger(__name__)
base_log_folder = conf.get("logging", "base_log_folder", fallback="NOT AVAILABLE")
push_logs = conf.getboolean("edge", "push_logs")
push_log_chunk_size = conf.getint("edge", "push_log_chunk_size")
if sys.platform == "darwin":
setproctitle = lambda title: logger.debug("Mac OS detected, skipping setproctitle")
else:
from setproctitle import setproctitle
def _edge_hostname() -> str:
"""Get the hostname of the edge worker that should be reported by tasks."""
return os.environ.get("HOSTNAME", getfqdn())
@cache
def _execution_api_server_url() -> str:
"""Get the execution api server url from config or environment."""
api_url = conf.get("edge", "api_url")
execution_api_server_url = conf.get("core", "execution_api_server_url", fallback="")
if not execution_api_server_url and api_url:
# Derive execution api url from edge api url as fallback
execution_api_server_url = api_url.replace("edge_worker/v1/rpcapi", "execution")
logger.info("Using execution api server url: %s", execution_api_server_url)
return execution_api_server_url
class EdgeWorker:
"""Runner instance which executes the Edge Worker."""
jobs: list[Job] = []
"""List of jobs that the worker is running currently."""
drain: bool = False
"""Flag if job processing should be completed and no new jobs fetched for a graceful stop/shutdown."""
maintenance_mode: bool = False
"""Flag if job processing should be completed and no new jobs fetched for maintenance mode. """
maintenance_comments: str | None = None
"""Comments for maintenance mode."""
background_tasks: set[Task] = set()
def __init__(
self,
pid_file_path: str,
hostname: str,
queues: list[str] | None,
concurrency: int,
job_poll_interval: int,
heartbeat_interval: int,
daemon: bool = False,
):
self.pid_file_path = pid_file_path
self.job_poll_interval = job_poll_interval
self.hb_interval = heartbeat_interval
self.hostname = hostname
self.queues = queues
self.concurrency = concurrency
self.daemon = daemon
@property
def free_concurrency(self) -> int:
"""Calculate the free concurrency of the worker."""
used_concurrency = sum(job.edge_job.concurrency_slots for job in self.jobs)
return self.concurrency - used_concurrency
def signal_status(self):
marker_path = Path(maintenance_marker_file_path(None))
if marker_path.exists():
request = MaintenanceMarker.from_json(marker_path.read_text())
logger.info("Requested to set maintenance mode to %s", request.maintenance)
self.maintenance_mode = request.maintenance == "on"
if self.maintenance_mode and request.comments:
logger.info("Comments: %s", request.comments)
self.maintenance_comments = request.comments
marker_path.unlink()
# send heartbeat immediately to update state
task = get_running_loop().create_task(self.heartbeat(self.maintenance_comments))
self.background_tasks.add(task)
task.add_done_callback(self.background_tasks.discard)
else:
logger.info("Request to get status of Edge Worker received.")
status_path = Path(status_file_path(None))
status_path.write_text(
WorkerStatus(
job_count=len(self.jobs),
jobs=[job.edge_job.key for job in self.jobs],
state=self._get_state(),
maintenance=self.maintenance_mode,
maintenance_comments=self.maintenance_comments,
drain=self.drain,
).json
)
def signal_drain(self):
self.drain = True
logger.info("Request to shut down Edge Worker received, waiting for jobs to complete.")
def shutdown_handler(self):
self.drain = True
msg = "SIGTERM received. Sending SIGTERM to all jobs and quit"
logger.info(msg)
for job in self.jobs:
if job.process.pid:
os.setpgid(job.process.pid, 0)
os.kill(job.process.pid, signal.SIGTERM)
def _get_sysinfo(self) -> dict:
"""Produce the sysinfo from worker to post to central site."""
return {
"airflow_version": airflow_version,
"edge_provider_version": edge_provider_version,
"concurrency": self.concurrency,
"free_concurrency": self.free_concurrency,
}
def _get_state(self) -> EdgeWorkerState:
"""State of the Edge Worker."""
if self.jobs:
if self.drain:
return EdgeWorkerState.TERMINATING
if self.maintenance_mode:
return EdgeWorkerState.MAINTENANCE_PENDING
return EdgeWorkerState.RUNNING
if self.drain:
if self.maintenance_mode:
return EdgeWorkerState.OFFLINE_MAINTENANCE
return EdgeWorkerState.OFFLINE
if self.maintenance_mode:
return EdgeWorkerState.MAINTENANCE_MODE
return EdgeWorkerState.IDLE
def _run_job_via_supervisor(self, workload: ExecuteTask, results_queue: Queue) -> int:
from airflow.sdk.execution_time.supervisor import supervise
# Ignore ctrl-c in this process -- we don't want to kill _this_ one. we let tasks run to completion
os.setpgrp()
logger.info("Worker starting up pid=%d", os.getpid())
ti = workload.ti
setproctitle(
"airflow edge supervisor: "
f"dag_id={ti.dag_id} task_id={ti.task_id} run_id={ti.run_id} map_index={ti.map_index} "
f"try_number={ti.try_number}"
)
try:
supervise(
# This is the "wrong" ti type, but it duck types the same. TODO: Create a protocol for this.
# Same like in airflow/executors/local_executor.py:_execute_work()
ti=ti, # type: ignore[arg-type]
dag_rel_path=workload.dag_rel_path,
bundle_info=workload.bundle_info,
token=workload.token,
server=_execution_api_server_url(),
log_path=workload.log_path,
)
return 0
except Exception as e:
logger.exception("Task execution failed")
results_queue.put(e)
return 1
def _launch_job(self, workload: ExecuteTask) -> tuple[Process, Queue[Exception]]:
# Improvement: Use frozen GC to prevent child process from copying unnecessary memory
# See _spawn_workers_with_gc_freeze() in airflow-core/src/airflow/executors/local_executor.py
results_queue: Queue[Exception] = Queue()
process = Process(
target=self._run_job_via_supervisor,
kwargs={"workload": workload, "results_queue": results_queue},
)
process.start()
return process, results_queue
async def _push_logs_in_chunks(self, job: Job):
aio_logfile = anyio.Path(job.logfile)
if push_logs and await aio_logfile.exists() and (await aio_logfile.stat()).st_size > job.logsize:
async with aio_open(job.logfile, mode="rb") as logf:
await logf.seek(job.logsize, os.SEEK_SET)
read_data = await logf.read()
job.logsize += len(read_data)
# backslashreplace to keep not decoded characters and not raising exception
# replace null with question mark to fix issue during DB push
log_data = read_data.decode(errors="backslashreplace").replace("\x00", "\ufffd")
while True:
chunk_data = log_data[:push_log_chunk_size]
log_data = log_data[push_log_chunk_size:]
if not chunk_data:
break
await logs_push(
task=job.edge_job.key,
log_chunk_time=timezone.utcnow(),
log_chunk_data=chunk_data,
)
async def start(self):
"""Start the execution in a loop until terminated."""
try:
await worker_register(self.hostname, EdgeWorkerState.STARTING, self.queues, self._get_sysinfo())
except EdgeWorkerVersionException as e:
logger.info("Version mismatch of Edge worker and Core. Shutting down worker.")
raise SystemExit(str(e))
except EdgeWorkerDuplicateException as e:
logger.error(str(e))
raise SystemExit(str(e))
except ClientResponseError as e:
# Note: Method not allowed is raised by FastAPI if the API is not enabled (not 404)
if e.status in {HTTPStatus.NOT_FOUND, HTTPStatus.METHOD_NOT_ALLOWED}:
raise SystemExit(
"Error: API endpoint is not ready, please set [edge] api_enabled=True. Or check if the URL is correct to your deployment."
)
raise SystemExit(str(e))
if not self.daemon:
write_pid_to_pidfile(self.pid_file_path)
loop = get_running_loop()
loop.add_signal_handler(signal.SIGINT, self.signal_drain)
loop.add_signal_handler(SIG_STATUS, self.signal_status)
loop.add_signal_handler(signal.SIGTERM, self.shutdown_handler)
setproctitle(f"airflow edge worker: {self.hostname}")
os.environ["HOSTNAME"] = self.hostname
os.environ["AIRFLOW__CORE__HOSTNAME_CALLABLE"] = f"{_edge_hostname.__module__}._edge_hostname"
try:
await self.loop()
logger.info("Quitting worker, signal being offline.")
try:
await worker_set_state(
self.hostname,
EdgeWorkerState.OFFLINE_MAINTENANCE if self.maintenance_mode else EdgeWorkerState.OFFLINE,
0,
self.queues,
self._get_sysinfo(),
)
except EdgeWorkerVersionException:
logger.info("Version mismatch of Edge worker and Core. Quitting worker anyway.")
finally:
if not self.daemon:
remove_existing_pidfile(self.pid_file_path)
async def loop(self):
"""Run a loop of scheduling and monitoring tasks."""
last_hb = datetime.now()
worker_state_changed = True # force heartbeat at start
previous_jobs = 0
while not self.drain or self.jobs:
if (
self.drain
or datetime.now().timestamp() - last_hb.timestamp() > self.hb_interval
or worker_state_changed # send heartbeat immediately if the state is different in db
or previous_jobs != len(self.jobs) # when number of jobs changes
):
worker_state_changed = await self.heartbeat()
last_hb = datetime.now()
previous_jobs = len(self.jobs)
if self.maintenance_mode:
logger.info("in maintenance mode%s", f", {len(self.jobs)} draining jobs" if self.jobs else "")
elif not self.drain and self.free_concurrency > 0:
task = create_task(self.fetch_and_run_job())
self.background_tasks.add(task)
task.add_done_callback(self.background_tasks.discard)
else:
logger.info("%i %s running", len(self.jobs), "job is" if len(self.jobs) == 1 else "jobs are")
await self.interruptible_sleep()
async def fetch_and_run_job(self) -> None:
"""Fetch, start and monitor a new job."""
logger.debug("Attempting to fetch a new job...")
edge_job = await jobs_fetch(self.hostname, self.queues, self.free_concurrency)
if not edge_job:
logger.info(
"No new job to process%s",
f", {len(self.jobs)} still running" if self.jobs else "",
)
return
logger.info("Received job: %s", edge_job.identifier)
workload: ExecuteTask = edge_job.command
process, results_queue = self._launch_job(workload)
if TYPE_CHECKING:
assert workload.log_path # We need to assume this is defined in here
logfile = Path(base_log_folder, workload.log_path)
job = Job(edge_job, process, logfile)
self.jobs.append(job)
await jobs_set_state(edge_job.key, TaskInstanceState.RUNNING)
# As we got one job, directly fetch another one if possible
if self.free_concurrency > 0:
task = create_task(self.fetch_and_run_job())
self.background_tasks.add(task)
task.add_done_callback(self.background_tasks.discard)
while job.is_running:
await self._push_logs_in_chunks(job)
for _ in range(0, self.job_poll_interval * 10):
await sleep(0.1)
if not job.is_running:
break
await self._push_logs_in_chunks(job)
self.jobs.remove(job)
if job.is_success:
logger.info("Job completed: %s", job.edge_job.identifier)
await jobs_set_state(job.edge_job.key, TaskInstanceState.SUCCESS)
else:
if results_queue.empty():
ex_txt = "(Unknown error, no exception details available)"
else:
ex = results_queue.get()
ex_txt = "\n".join(traceback.format_exception(ex))
logger.error("Job failed: %s with:\n%s", job.edge_job.identifier, ex_txt)
# Push it upwards to logs for better diagnostic as well
await logs_push(
task=job.edge_job.key,
log_chunk_time=timezone.utcnow(),
log_chunk_data=f"Error starting job:\n{ex_txt}",
)
await jobs_set_state(job.edge_job.key, TaskInstanceState.FAILED)
async def heartbeat(self, new_maintenance_comments: str | None = None) -> bool:
"""Report liveness state of worker to central site with stats."""
state = self._get_state()
sysinfo = self._get_sysinfo()
worker_state_changed: bool = False
try:
worker_info = await worker_set_state(
self.hostname,
state,
len(self.jobs),
self.queues,
sysinfo,
new_maintenance_comments,
)
self.queues = worker_info.queues
if worker_info.state == EdgeWorkerState.MAINTENANCE_REQUEST:
logger.info("Maintenance mode requested!")
self.maintenance_mode = True
elif (
worker_info.state in [EdgeWorkerState.IDLE, EdgeWorkerState.RUNNING] and self.maintenance_mode
):
logger.info("Exit Maintenance mode requested!")
self.maintenance_mode = False
if self.maintenance_mode:
self.maintenance_comments = worker_info.maintenance_comments
else:
self.maintenance_comments = None
if worker_info.state == EdgeWorkerState.SHUTDOWN_REQUEST:
logger.info("Shutdown requested!")
self.drain = True
worker_state_changed = worker_info.state != state
except EdgeWorkerVersionException:
logger.info("Version mismatch of Edge worker and Core. Shutting down worker.")
self.drain = True
return worker_state_changed
async def interruptible_sleep(self):
"""Sleeps but stops sleeping if drain is made or some job completed."""
drain_before_sleep = self.drain
jobcount_before_sleep = len(self.jobs)
for _ in range(0, self.job_poll_interval * 10):
await sleep(0.1)
if drain_before_sleep != self.drain or len(self.jobs) < jobcount_before_sleep:
return
| {
"repo_id": "apache/airflow",
"file_path": "providers/edge3/src/airflow/providers/edge3/cli/worker.py",
"license": "Apache License 2.0",
"lines": 390,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/edge3/tests/unit/edge3/cli/test_signalling.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import logging
import os
from unittest.mock import patch
import pytest
from airflow.providers.edge3.cli.signalling import write_pid_to_pidfile
def test_write_pid_to_pidfile_success(caplog, tmp_path):
with caplog.at_level(logging.DEBUG):
pid_file_path = tmp_path / "file.pid"
write_pid_to_pidfile(pid_file_path)
assert pid_file_path.exists()
assert "An existing PID file has been found" not in caplog.text
def test_write_pid_to_pidfile_called_twice(tmp_path):
pid_file_path = tmp_path / "file.pid"
write_pid_to_pidfile(pid_file_path)
with pytest.raises(SystemExit, match=r"A PID file has already been written"):
write_pid_to_pidfile(pid_file_path)
assert pid_file_path.exists()
def test_write_pid_to_pidfile_created_by_other_instance(tmp_path):
# write a PID file with the PID of this process
pid_file_path = tmp_path / "file.pid"
write_pid_to_pidfile(pid_file_path)
# write a PID file, but set the current PID to 0
with patch("os.getpid", return_value=0):
with pytest.raises(SystemExit, match=r"contains the PID of another running process"):
write_pid_to_pidfile(pid_file_path)
def test_write_pid_to_pidfile_created_by_crashed_instance(tmp_path):
# write a PID file with process ID 0
with patch("os.getpid", return_value=0):
pid_file_path = tmp_path / "file.pid"
write_pid_to_pidfile(pid_file_path)
assert pid_file_path.read_text().strip() == "0"
# write a PID file with the current process ID, call should not raise an exception
write_pid_to_pidfile(pid_file_path)
assert str(os.getpid()) == pid_file_path.read_text().strip()
| {
"repo_id": "apache/airflow",
"file_path": "providers/edge3/tests/unit/edge3/cli/test_signalling.py",
"license": "Apache License 2.0",
"lines": 51,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/edge3/tests/unit/edge3/cli/test_worker.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import contextlib
import importlib
import json
from datetime import datetime
from io import StringIO
from multiprocessing import Process, Queue
from pathlib import Path
from unittest import mock
from unittest.mock import call, patch
import anyio
import pytest
import time_machine
from aiohttp import ClientResponseError, RequestInfo
from yarl import URL
from airflow.cli import cli_parser
from airflow.providers.common.compat.sdk import timezone
from airflow.providers.edge3.cli import edge_command, worker as worker_module
from airflow.providers.edge3.cli.dataclasses import Job
from airflow.providers.edge3.cli.worker import EdgeWorker, _execution_api_server_url
from airflow.providers.edge3.models.edge_worker import (
EdgeWorkerModel,
EdgeWorkerState,
EdgeWorkerVersionException,
)
from airflow.providers.edge3.worker_api.datamodels import (
EdgeJobFetched,
WorkerRegistrationReturn,
WorkerSetStateReturn,
)
from tests_common.test_utils.config import conf_vars
from tests_common.test_utils.version_compat import AIRFLOW_V_3_2_PLUS
pytest.importorskip("pydantic", minversion="2.0.0")
pytestmark = [pytest.mark.asyncio]
MOCK_COMMAND = {
"token": "mock",
"ti": {
"id": "4d828a62-a417-4936-a7a6-2b3fabacecab",
"task_id": "mock",
"dag_id": "mock",
"run_id": "mock",
"try_number": 1,
"dag_version_id": "01234567-89ab-cdef-0123-456789abcdef",
"pool_slots": 1,
"queue": "default",
"priority_weight": 1,
"start_date": "2023-01-01T00:00:00+00:00",
"map_index": -1,
},
"dag_rel_path": "mock.py",
"log_path": "mock.log",
"bundle_info": {"name": "hello", "version": "abc"},
}
class _MockProcess(Process):
def __init__(self, returncode=None):
self.generated_returncode = None
def poll(self):
pass
@property
def returncode(self):
return self.generated_returncode
class TestEdgeWorker:
@pytest.fixture(autouse=True)
def setup_parser(self):
if AIRFLOW_V_3_2_PLUS:
importlib.reload(cli_parser)
self.parser = cli_parser.get_parser()
else:
with patch(
"airflow.executors.executor_loader.ExecutorLoader.get_executor_names",
) as mock_get_executor_names:
mock_get_executor_names.return_value = [
mock.MagicMock(
name="EdgeExecutor", module_path="airflow.providers.edge3.executors.EdgeExecutor"
)
]
importlib.reload(cli_parser)
self.parser = cli_parser.get_parser()
@pytest.fixture
def mock_joblist(self, tmp_path: Path) -> list[Job]:
logfile = tmp_path / "file.log"
logfile.touch()
return [
Job(
edge_job=EdgeJobFetched(
dag_id="test",
task_id="test1",
run_id="test",
map_index=-1,
try_number=1,
concurrency_slots=1,
command=MOCK_COMMAND, # type: ignore[arg-type]
),
process=_MockProcess(),
logfile=logfile,
logsize=0,
),
]
@pytest.fixture
def worker_with_job(self, tmp_path: Path, mock_joblist: list[Job]) -> EdgeWorker:
test_worker = EdgeWorker(str(tmp_path / "mock.pid"), "mock", None, 8, 5, 5)
EdgeWorker.jobs = mock_joblist
return test_worker
@pytest.fixture
def mock_edgeworker(self) -> EdgeWorkerModel:
test_edgeworker = EdgeWorkerModel(
worker_name="test_edge_worker",
state="idle",
queues=["default"],
)
return test_edgeworker
@pytest.mark.parametrize(
("configs", "expected_url"),
[
(
{("edge", "api_url"): "https://api-host/edge_worker/v1/rpcapi"},
"https://api-host/execution",
),
(
{("edge", "api_url"): "https://api:1234/subpath/edge_worker/v1/rpcapi"},
"https://api:1234/subpath/execution",
),
(
{
("edge", "api_url"): "https://api-endpoint",
("core", "execution_api_server_url"): "https://other-endpoint",
},
"https://other-endpoint",
),
],
)
def test_execution_api_server_url(
self,
configs,
expected_url,
):
with conf_vars(configs):
_execution_api_server_url.cache_clear()
url = _execution_api_server_url()
assert url == expected_url
@patch("airflow.sdk.execution_time.supervisor.supervise")
@pytest.mark.asyncio
async def test_supervise_launch(
self,
mock_supervise,
worker_with_job: EdgeWorker,
):
edge_job = worker_with_job.jobs.pop().edge_job
q = mock.MagicMock()
result = worker_with_job._run_job_via_supervisor(edge_job.command, q)
assert result == 0
q.put.assert_not_called()
@patch("airflow.sdk.execution_time.supervisor.supervise")
@pytest.mark.asyncio
async def test_supervise_launch_fail(
self,
mock_supervise,
worker_with_job: EdgeWorker,
):
mock_supervise.side_effect = Exception("Supervise failed")
edge_job = worker_with_job.jobs.pop().edge_job
q = mock.MagicMock()
result = worker_with_job._run_job_via_supervisor(edge_job.command, q)
assert result == 1
q.put.assert_called_once()
@patch("airflow.providers.edge3.cli.worker.jobs_fetch")
@patch("airflow.providers.edge3.cli.worker.EdgeWorker._launch_job", return_value=(Process(), Queue()))
@pytest.mark.asyncio
async def test_fetch_and_run_job_no_job(
self,
mock_launch_job,
mock_jobs_fetch,
worker_with_job: EdgeWorker,
):
mock_jobs_fetch.return_value = None
await worker_with_job.fetch_and_run_job()
mock_jobs_fetch.assert_called_once()
assert len(worker_with_job.jobs) == 1 # no new job added
mock_launch_job.assert_not_called()
@patch("airflow.providers.edge3.cli.worker.jobs_fetch")
@patch("airflow.providers.edge3.cli.worker.EdgeWorker._launch_job", return_value=(Process(), Queue()))
@patch("airflow.providers.edge3.cli.worker.jobs_set_state")
@patch("airflow.providers.edge3.cli.worker.EdgeWorker._push_logs_in_chunks")
@patch("airflow.providers.edge3.cli.worker.logs_push")
@patch.object(Job, "is_running", property(lambda _: False))
@patch.object(Job, "is_success", property(lambda _: True))
@pytest.mark.asyncio
async def test_fetch_and_run_job_one_job(
self,
mock_logs_push,
mock_push_log_chunks,
mock_jobs_set_state,
mock_launch_job,
mock_jobs_fetch,
worker_with_job: EdgeWorker,
):
mock_jobs_fetch.side_effect = [
EdgeJobFetched(
dag_id="test",
task_id="test",
run_id="test",
map_index=-1,
try_number=1,
concurrency_slots=1,
command=MOCK_COMMAND, # type: ignore[arg-type]
),
None,
]
worker_with_job.concurrency = 1 # only one job at a time
assert worker_with_job.free_concurrency == 0
await worker_with_job.fetch_and_run_job()
mock_jobs_fetch.assert_called_once()
mock_launch_job.assert_called_once()
assert mock_jobs_set_state.call_count == 2
mock_push_log_chunks.assert_called_once()
assert len(worker_with_job.jobs) == 1 # no new job added (was removed at the end...)
mock_logs_push.assert_not_called()
@patch("airflow.providers.edge3.cli.worker.jobs_fetch")
@patch("airflow.providers.edge3.cli.worker.EdgeWorker._launch_job", return_value=(Process(), Queue()))
@patch("airflow.providers.edge3.cli.worker.jobs_set_state")
@patch("airflow.providers.edge3.cli.worker.EdgeWorker._push_logs_in_chunks")
@patch("airflow.providers.edge3.cli.worker.logs_push")
@patch.object(Job, "is_running", property(lambda _: False))
@patch.object(Job, "is_success", property(lambda _: False))
@patch("traceback.format_exception", return_value=[])
@pytest.mark.asyncio
async def test_fetch_and_run_job_one_job_fail(
self,
mock_traceback,
mock_logs_push,
mock_push_log_chunks,
mock_jobs_set_state,
mock_launch_job,
mock_jobs_fetch,
worker_with_job: EdgeWorker,
):
mock_jobs_fetch.side_effect = [
EdgeJobFetched(
dag_id="test",
task_id="test",
run_id="test",
map_index=-1,
try_number=1,
concurrency_slots=1,
command=MOCK_COMMAND, # type: ignore[arg-type]
),
None,
]
worker_with_job.concurrency = 1 # only one job at a time
assert worker_with_job.free_concurrency == 0
await worker_with_job.fetch_and_run_job()
mock_jobs_fetch.assert_called_once()
mock_launch_job.assert_called_once()
assert mock_jobs_set_state.call_count == 2
mock_push_log_chunks.assert_called_once()
assert len(worker_with_job.jobs) == 1 # no new job added (was removed at the end...)
mock_logs_push.assert_called_once()
@time_machine.travel(datetime.now(), tick=False)
@patch("airflow.providers.edge3.cli.worker.logs_push")
@pytest.mark.asyncio
async def test_push_logs_in_chunks(self, mock_logs_push, worker_with_job: EdgeWorker):
job = EdgeWorker.jobs[0]
await anyio.Path(job.logfile).write_text("some log content")
with conf_vars({("edge", "api_url"): "https://invalid-api-test-endpoint"}):
await worker_with_job._push_logs_in_chunks(job)
assert len(EdgeWorker.jobs) == 1
mock_logs_push.assert_called_once_with(
task=job.edge_job.key, log_chunk_time=timezone.utcnow(), log_chunk_data="some log content"
)
@time_machine.travel(datetime.now(), tick=False)
@patch("airflow.providers.edge3.cli.worker.logs_push")
@pytest.mark.asyncio
async def test_check_running_jobs_log_push_increment(self, mock_logs_push, worker_with_job: EdgeWorker):
job = EdgeWorker.jobs[0]
aio_logfile = anyio.Path(job.logfile)
await aio_logfile.write_text("hello ")
job.logsize = (await aio_logfile.stat()).st_size
await aio_logfile.write_text("hello world")
with conf_vars({("edge", "api_url"): "https://invalid-api-test-endpoint"}):
await worker_with_job._push_logs_in_chunks(job)
assert len(EdgeWorker.jobs) == 1
mock_logs_push.assert_called_once_with(
task=job.edge_job.key, log_chunk_time=timezone.utcnow(), log_chunk_data="world"
)
@time_machine.travel(datetime.now(), tick=False)
@patch("airflow.providers.edge3.cli.worker.logs_push")
@patch.object(worker_module, "push_log_chunk_size", 4)
@pytest.mark.asyncio
async def test_check_running_jobs_log_push_chunks(self, mock_logs_push, worker_with_job: EdgeWorker):
job = EdgeWorker.jobs[0]
job.logfile.write_bytes("log1log2ülog3".encode("latin-1"))
with conf_vars({("edge", "api_url"): "https://invalid-api-test-endpoint"}):
await worker_with_job._push_logs_in_chunks(job)
assert len(EdgeWorker.jobs) == 1
calls = mock_logs_push.call_args_list
assert len(calls) == 4
assert calls[0] == call(
task=job.edge_job.key, log_chunk_time=timezone.utcnow(), log_chunk_data="log1"
)
assert calls[1] == call(
task=job.edge_job.key, log_chunk_time=timezone.utcnow(), log_chunk_data="log2"
)
assert calls[2] == call(
task=job.edge_job.key, log_chunk_time=timezone.utcnow(), log_chunk_data="\\xfc"
)
assert calls[3] == call(
task=job.edge_job.key, log_chunk_time=timezone.utcnow(), log_chunk_data="log3"
)
@pytest.mark.parametrize(
("drain", "maintenance_mode", "jobs", "expected_state"),
[
pytest.param(False, False, False, EdgeWorkerState.IDLE, id="idle"),
pytest.param(False, False, True, EdgeWorkerState.RUNNING, id="running_jobs"),
pytest.param(False, True, False, EdgeWorkerState.MAINTENANCE_MODE, id="maintenance_no_job"),
pytest.param(
False, True, True, EdgeWorkerState.MAINTENANCE_PENDING, id="maintenance_running_jobs"
),
pytest.param(True, False, False, EdgeWorkerState.OFFLINE, id="shut_down"),
pytest.param(True, False, True, EdgeWorkerState.TERMINATING, id="terminating"),
pytest.param(True, True, False, EdgeWorkerState.OFFLINE_MAINTENANCE, id="offline_maintenance"),
pytest.param(True, True, True, EdgeWorkerState.TERMINATING, id="maintenance_shut_down"),
],
)
@patch("airflow.providers.edge3.cli.worker.worker_set_state")
async def test_heartbeat(
self, mock_set_state, drain, maintenance_mode, jobs, expected_state, worker_with_job: EdgeWorker
):
if not jobs:
EdgeWorker.jobs = []
EdgeWorker.drain = drain
EdgeWorker.maintenance_mode = maintenance_mode
mock_set_state.return_value = WorkerSetStateReturn(
state=EdgeWorkerState.RUNNING, queues=["queue1", "queue2"]
)
with conf_vars({("edge", "api_url"): "https://invalid-api-test-endpoint"}):
await worker_with_job.heartbeat()
assert mock_set_state.call_args.args[1] == expected_state
queue_list = worker_with_job.queues or []
assert len(queue_list) == 2
assert "queue1" in (queue_list)
assert "queue2" in (queue_list)
@patch("airflow.providers.edge3.cli.worker.worker_set_state")
async def test_version_mismatch(self, mock_set_state, worker_with_job):
mock_set_state.side_effect = EdgeWorkerVersionException("")
await worker_with_job.heartbeat()
assert worker_with_job.drain
@pytest.mark.parametrize(
"http_error",
[
pytest.param(404, id="HTTP 404 Not Found"),
pytest.param(405, id="HTTP 405 Method Not Allowed"),
],
)
@patch("airflow.providers.edge3.cli.worker.worker_register")
async def test_start_missing_apiserver(
self, mock_register_worker, http_error, worker_with_job: EdgeWorker
):
mock_register_worker.side_effect = ClientResponseError(
request_info=RequestInfo(url=URL("mock.com"), method="GET", headers=None), # type:ignore[arg-type]
message=f"Something with {http_error}: Means API is not active",
status=http_error,
history=(),
)
with pytest.raises(SystemExit, match=r"API endpoint is not ready"):
await worker_with_job.start()
@patch("airflow.providers.edge3.cli.worker.worker_register")
async def test_start_server_error(self, mock_register_worker, worker_with_job: EdgeWorker):
mock_register_worker.side_effect = ClientResponseError(
request_info=RequestInfo(url=URL("mock.com"), method="GET", headers=None), # type:ignore[arg-type]
message="Something other error not FourhundretFour",
status=500,
history=(),
)
with pytest.raises(SystemExit, match=r"Something other"):
await worker_with_job.start()
@patch("airflow.providers.edge3.cli.worker.worker_register")
@patch("airflow.providers.edge3.cli.worker.EdgeWorker.loop")
@patch("airflow.providers.edge3.cli.worker.worker_set_state")
async def test_start_and_run_one(
self, mock_set_state, mock_loop, mock_register, worker_with_job: EdgeWorker
):
def stop_running():
worker_with_job.drain = True
worker_with_job.jobs = []
mock_loop.side_effect = stop_running
mock_register.side_effect = [WorkerRegistrationReturn(last_update=datetime.now())]
await worker_with_job.start()
mock_register.assert_called_once()
mock_loop.assert_called_once()
assert mock_set_state.call_count == 1
def test_get_sysinfo(self, worker_with_job: EdgeWorker):
concurrency = 8
worker_with_job.concurrency = concurrency
sysinfo = worker_with_job._get_sysinfo()
assert "airflow_version" in sysinfo
assert "edge_provider_version" in sysinfo
assert "concurrency" in sysinfo
assert sysinfo["concurrency"] == concurrency
@pytest.mark.db_test
def test_list_edge_workers(self, mock_edgeworker: EdgeWorkerModel):
args = self.parser.parse_args(["edge", "list-workers", "--output", "json"])
with contextlib.redirect_stdout(StringIO()) as temp_stdout:
with (
patch(
"airflow.providers.edge3.cli.edge_command._check_valid_db_connection",
),
patch(
"airflow.providers.edge3.models.edge_worker.get_registered_edge_hosts",
return_value=[mock_edgeworker],
),
):
edge_command.list_edge_workers(args)
out = temp_stdout.getvalue()
edge_workers = json.loads(out)
for key in [
"worker_name",
"state",
"queues",
"jobs_active",
"concurrency",
"free_concurrency",
"maintenance_comment",
]:
assert key in edge_workers[0]
assert any("test_edge_worker" in h["worker_name"] for h in edge_workers)
| {
"repo_id": "apache/airflow",
"file_path": "providers/edge3/tests/unit/edge3/cli/test_worker.py",
"license": "Apache License 2.0",
"lines": 441,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/amazon/src/airflow/providers/amazon/aws/executors/aws_lambda/docker/app.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import json
import logging
import os
import subprocess
from tempfile import mkdtemp
import boto3
"""
Example Lambda function to execute an Airflow command or workload. Use or modify this code as needed.
"""
log = logging.getLogger()
log.setLevel(logging.INFO)
# Get the S3 URI from the environment variable. Set either on the Lambda function or in the
# docker image used for the lambda invocations.
S3_URI = os.environ.get("S3_URI", None)
# Input and output keys
TASK_KEY_KEY = "task_key"
COMMAND_KEY = "command"
EXECUTOR_CONFIG_KEY = "executor_config"
RETURN_CODE_KEY = "return_code"
def lambda_handler(event, context):
log.info("Received event: %s", event)
log.info("Received context: %s", context)
command = event.get(COMMAND_KEY)
task_key = event.get(TASK_KEY_KEY)
executor_config = event.get(EXECUTOR_CONFIG_KEY, {}) # noqa: F841
# Any pre-processing or validation of the command or use of the executor_config can be done here or above.
# Sync dags from s3 to the local dags directory
if S3_URI:
fetch_dags_from_s3(S3_URI)
# This function must be called, it executes the Airflow command and reports to SQS.
run_and_report(command, task_key)
# Any post-processing or cleanup can be done here.
def run_and_report(command, task_key):
"""Execute the provided Airflow command or workload and report the result via SQS."""
try:
log.info("Starting execution for task: %s", task_key)
result = subprocess.run(
command,
check=False,
shell=isinstance(command, str),
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
)
return_code = result.returncode
log.info("Execution completed for task %s with return code %s", task_key, return_code)
log.info("Output:")
log.info("%s", result.stdout.decode())
except Exception:
log.exception("Error executing task %s: ", task_key)
return_code = 1 # Non-zero indicates failure to run the task
queue_url = get_queue_url()
message = json.dumps({TASK_KEY_KEY: task_key, RETURN_CODE_KEY: return_code})
try:
sqs_client = get_sqs_client()
sqs_client.send_message(QueueUrl=queue_url, MessageBody=message)
log.info("Sent result to SQS %s", message)
except Exception:
log.exception("Failed to send message to SQS for task %s", task_key)
def get_sqs_client():
"""Create an SQS client. Credentials and region are automatically picked up from the environment."""
return boto3.client("sqs")
def get_queue_url():
"""
Get the SQS queue URL from the environment variable.
Set either on the Lambda function or in the image used for the lambda invocations.
"""
queue_url = os.environ.get("AIRFLOW__AWS_LAMBDA_EXECUTOR__QUEUE_URL", os.environ.get("QUEUE_URL", None))
if not queue_url:
raise RuntimeError(
"No Queue URL detected (either AIRFLOW__AWS_LAMBDA_EXECUTOR__QUEUE_URL or "
"QUEUE_URL); Will be unable to send task results. Exiting!"
)
return queue_url
def fetch_dags_from_s3(s3_uri):
"""Fetch DAGs from S3 and sync them to the local dags directory."""
log.info("Fetching DAGs from S3 URI: %s", s3_uri)
# Use a named temporary directory for the local dags folder, only tmp is writeable in Lambda
local_dags_dir = mkdtemp(prefix="airflow_dags_")
log.info("Setting AIRFLOW__CORE__DAGS_FOLDER to: %s", local_dags_dir)
os.environ["AIRFLOW__CORE__DAGS_FOLDER"] = local_dags_dir
# S3 URI format s3://bucket-name/path/to/dags/
bucket_name = s3_uri.split("/")[2]
prefix = "/".join(s3_uri.split("/")[3:])
s3_resource = boto3.resource("s3")
bucket = s3_resource.Bucket(bucket_name)
for obj in bucket.objects.filter(Prefix=prefix):
if obj.key.endswith("/"):
# Skip directories
continue
key = obj.key
local_path = os.path.join(local_dags_dir, os.path.basename(key))
log.info("Downloading %s to %s", key, local_path)
bucket.download_file(key, local_path)
| {
"repo_id": "apache/airflow",
"file_path": "providers/amazon/src/airflow/providers/amazon/aws/executors/aws_lambda/docker/app.py",
"license": "Apache License 2.0",
"lines": 110,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/amazon/src/airflow/providers/amazon/aws/executors/aws_lambda/lambda_executor.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import json
import time
from collections import deque
from collections.abc import Sequence
from typing import TYPE_CHECKING
from boto3.session import NoCredentialsError
from botocore.utils import ClientError
from airflow.executors.base_executor import BaseExecutor
from airflow.models.taskinstancekey import TaskInstanceKey
from airflow.providers.amazon.aws.executors.aws_lambda.utils import (
CONFIG_GROUP_NAME,
INVALID_CREDENTIALS_EXCEPTIONS,
AllLambdaConfigKeys,
CommandType,
LambdaQueuedTask,
)
from airflow.providers.amazon.aws.executors.utils.exponential_backoff_retry import (
calculate_next_attempt_delay,
exponential_backoff_retry,
)
from airflow.providers.amazon.aws.hooks.lambda_function import LambdaHook
from airflow.providers.amazon.aws.hooks.sqs import SqsHook
from airflow.providers.amazon.version_compat import AIRFLOW_V_3_0_PLUS
from airflow.providers.common.compat.sdk import AirflowException, Stats, timezone
if TYPE_CHECKING:
from sqlalchemy.orm import Session
from airflow.executors import workloads
from airflow.models.taskinstance import TaskInstance
class AwsLambdaExecutor(BaseExecutor):
"""
An Airflow Executor that submits tasks to AWS Lambda asynchronously.
When execute_async() is called, the executor invokes a specified AWS Lambda function (asynchronously)
with a payload that includes the task command and a unique task key.
The Lambda function writes its result directly to an SQS queue, which is then polled by this executor
to update task state in Airflow.
"""
supports_multi_team: bool = True
if TYPE_CHECKING and AIRFLOW_V_3_0_PLUS:
# In the v3 path, we store workloads, not commands as strings.
# TODO: TaskSDK: move this type change into BaseExecutor
queued_tasks: dict[TaskInstanceKey, workloads.All] # type: ignore[assignment]
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.pending_tasks: deque = deque()
self.running_tasks: dict[str, TaskInstanceKey] = {}
# Check if self has the ExecutorConf set on the self.conf attribute, and if not, set it to the global
# configuration object. This allows the changes to be backwards compatible with older versions of
# Airflow.
# Can be removed when minimum supported provider version is equal to the version of core airflow
# which introduces multi-team configuration.
if not hasattr(self, "conf"):
from airflow.providers.common.compat.sdk import conf
self.conf = conf
self.lambda_function_name = self.conf.get(CONFIG_GROUP_NAME, AllLambdaConfigKeys.FUNCTION_NAME)
self.sqs_queue_url = self.conf.get(CONFIG_GROUP_NAME, AllLambdaConfigKeys.QUEUE_URL)
self.dlq_url = self.conf.get(CONFIG_GROUP_NAME, AllLambdaConfigKeys.DLQ_URL)
self.qualifier = self.conf.get(CONFIG_GROUP_NAME, AllLambdaConfigKeys.QUALIFIER, fallback=None)
# Maximum number of retries to invoke Lambda.
self.max_invoke_attempts = self.conf.get(
CONFIG_GROUP_NAME,
AllLambdaConfigKeys.MAX_INVOKE_ATTEMPTS,
)
self.attempts_since_last_successful_connection = 0
self.IS_BOTO_CONNECTION_HEALTHY = False
self.load_connections(check_connection=False)
def start(self):
"""Call this when the Executor is run for the first time by the scheduler."""
check_health = self.conf.getboolean(CONFIG_GROUP_NAME, AllLambdaConfigKeys.CHECK_HEALTH_ON_STARTUP)
if not check_health:
return
self.log.info("Starting Lambda Executor and determining health...")
try:
self.check_health()
except AirflowException:
self.log.error("Stopping the Airflow Scheduler from starting until the issue is resolved.")
raise
def check_health(self):
"""
Check the health of the Lambda and SQS connections.
For lambda: Use get_function to test if the lambda connection works and the function can be
described.
For SQS: Use get_queue_attributes is used as a close analog to describe to test if the SQS
connection is working.
"""
self.IS_BOTO_CONNECTION_HEALTHY = False
def _check_queue(queue_url):
sqs_get_queue_attrs_response = self.sqs_client.get_queue_attributes(
QueueUrl=queue_url, AttributeNames=["ApproximateNumberOfMessages"]
)
approx_num_msgs = sqs_get_queue_attrs_response.get("Attributes").get(
"ApproximateNumberOfMessages"
)
self.log.info(
"SQS connection is healthy and queue %s is present with %s messages.",
queue_url,
approx_num_msgs,
)
self.log.info("Checking Lambda and SQS connections")
try:
# Check Lambda health
lambda_get_response = self.lambda_client.get_function(FunctionName=self.lambda_function_name)
if self.lambda_function_name not in lambda_get_response["Configuration"]["FunctionName"]:
raise AirflowException("Lambda function %s not found.", self.lambda_function_name)
self.log.info(
"Lambda connection is healthy and function %s is present.", self.lambda_function_name
)
# Check SQS results queue
_check_queue(self.sqs_queue_url)
# Check SQS dead letter queue
_check_queue(self.dlq_url)
# If we reach this point, both connections are healthy and all resources are present
self.IS_BOTO_CONNECTION_HEALTHY = True
except Exception:
self.log.exception("Lambda Executor health check failed")
raise AirflowException(
"The Lambda executor will not be able to run Airflow tasks until the issue is addressed."
)
def load_connections(self, check_connection: bool = True):
"""
Retrieve the AWS connection via Hooks to leverage the Airflow connection system.
:param check_connection: If True, check the health of the connection after loading it.
"""
self.log.info("Loading Connections")
aws_conn_id = self.conf.get(CONFIG_GROUP_NAME, AllLambdaConfigKeys.AWS_CONN_ID)
region_name = self.conf.get(CONFIG_GROUP_NAME, AllLambdaConfigKeys.REGION_NAME, fallback=None)
self.sqs_client = SqsHook(aws_conn_id=aws_conn_id, region_name=region_name).conn
self.lambda_client = LambdaHook(aws_conn_id=aws_conn_id, region_name=region_name).conn
self.attempts_since_last_successful_connection += 1
self.last_connection_reload = timezone.utcnow()
if check_connection:
self.check_health()
self.attempts_since_last_successful_connection = 0
def sync(self):
"""
Sync the executor with the current state of tasks.
Check in on currently running tasks and attempt to run any new tasks that have been queued.
"""
if not self.IS_BOTO_CONNECTION_HEALTHY:
exponential_backoff_retry(
self.last_connection_reload,
self.attempts_since_last_successful_connection,
self.load_connections,
)
if not self.IS_BOTO_CONNECTION_HEALTHY:
return
try:
self.sync_running_tasks()
self.attempt_task_runs()
except (ClientError, NoCredentialsError) as error:
error_code = error.response["Error"]["Code"]
if error_code in INVALID_CREDENTIALS_EXCEPTIONS:
self.IS_BOTO_CONNECTION_HEALTHY = False
self.log.warning(
"AWS credentials are either missing or expired: %s.\nRetrying connection", error
)
except Exception:
self.log.exception("An error occurred while syncing tasks")
def queue_workload(self, workload: workloads.All, session: Session | None) -> None:
from airflow.executors import workloads
if not isinstance(workload, workloads.ExecuteTask):
raise RuntimeError(f"{type(self)} cannot handle workloads of type {type(workload)}")
ti = workload.ti
self.queued_tasks[ti.key] = workload
def _process_workloads(self, workloads: Sequence[workloads.All]) -> None:
from airflow.executors.workloads import ExecuteTask
for w in workloads:
if not isinstance(w, ExecuteTask):
raise RuntimeError(f"{type(self)} cannot handle workloads of type {type(w)}")
command = [w]
key = w.ti.key
queue = w.ti.queue
executor_config = w.ti.executor_config or {}
del self.queued_tasks[key]
self.execute_async(key=key, command=command, queue=queue, executor_config=executor_config) # type: ignore[arg-type]
self.running.add(key)
def execute_async(self, key: TaskInstanceKey, command: CommandType, queue=None, executor_config=None):
"""
Save the task to be executed in the next sync by inserting the commands into a queue.
:param key: A unique task key (typically a tuple identifying the task instance).
:param command: The shell command string to execute.
:param executor_config: (Unused) to keep the same signature as the base.
:param queue: (Unused) to keep the same signature as the base.
"""
if len(command) == 1:
from airflow.executors.workloads import ExecuteTask
if isinstance(command[0], ExecuteTask):
workload = command[0]
ser_input = workload.model_dump_json()
command = [
"python",
"-m",
"airflow.sdk.execution_time.execute_workload",
"--json-string",
ser_input,
]
else:
raise RuntimeError(
f"LambdaExecutor doesn't know how to handle workload of type: {type(command[0])}"
)
self.pending_tasks.append(
LambdaQueuedTask(
key, command, queue if queue else "", executor_config or {}, 1, timezone.utcnow()
)
)
def attempt_task_runs(self):
"""
Attempt to run tasks that are queued in the pending_tasks.
Each task is submitted to AWS Lambda with a payload containing the task key and command.
The task key is used to track the task's state in Airflow.
"""
queue_len = len(self.pending_tasks)
for _ in range(queue_len):
task_to_run = self.pending_tasks.popleft()
task_key = task_to_run.key
cmd = task_to_run.command
attempt_number = task_to_run.attempt_number
failure_reasons = []
ser_task_key = json.dumps(task_key._asdict())
payload = {
"task_key": ser_task_key,
"command": cmd,
"executor_config": task_to_run.executor_config,
}
if timezone.utcnow() < task_to_run.next_attempt_time:
self.pending_tasks.append(task_to_run)
continue
self.log.info("Submitting task %s to Lambda function %s", task_key, self.lambda_function_name)
try:
invoke_kwargs = {
"FunctionName": self.lambda_function_name,
"InvocationType": "Event",
"Payload": json.dumps(payload),
}
if self.qualifier:
invoke_kwargs["Qualifier"] = self.qualifier
response = self.lambda_client.invoke(**invoke_kwargs)
except NoCredentialsError:
self.pending_tasks.append(task_to_run)
raise
except ClientError as e:
error_code = e.response["Error"]["Code"]
if error_code in INVALID_CREDENTIALS_EXCEPTIONS:
self.pending_tasks.append(task_to_run)
raise
failure_reasons.append(str(e))
except Exception as e:
# Failed to even get a response back from the Boto3 API or something else went
# wrong. For any possible failure we want to add the exception reasons to the
# failure list so that it is logged to the user and most importantly the task is
# added back to the pending list to be retried later.
failure_reasons.append(str(e))
if failure_reasons:
# Make sure the number of attempts does not exceed max invoke attempts
if int(attempt_number) < int(self.max_invoke_attempts):
task_to_run.attempt_number += 1
task_to_run.next_attempt_time = timezone.utcnow() + calculate_next_attempt_delay(
attempt_number
)
self.pending_tasks.append(task_to_run)
else:
reasons_str = ", ".join(failure_reasons)
self.log.error(
"Lambda invoke %s has failed a maximum of %s times. Marking as failed. Reasons: %s",
task_key,
attempt_number,
reasons_str,
)
self.log_task_event(
event="lambda invoke failure",
ti_key=task_key,
extra=(
f"Task could not be queued after {attempt_number} attempts. "
f"Marking as failed. Reasons: {reasons_str}"
),
)
self.fail(task_key)
else:
status_code = response.get("StatusCode")
self.log.info("Invoked Lambda for task %s with status %s", task_key, status_code)
self.running_tasks[ser_task_key] = task_key
# Add the serialized task key as the info, this will be assigned on the ti as the external_executor_id
self.running_state(task_key, ser_task_key)
def sync_running_tasks(self):
"""
Poll the SQS queue for messages indicating task completion.
Each message is expected to contain a JSON payload with 'task_key' and 'return_code'.
Based on the return code, update the task state accordingly.
"""
if not len(self.running_tasks):
self.log.debug("No running tasks to process.")
return
self.process_queue(self.sqs_queue_url)
if self.dlq_url and self.running_tasks:
self.process_queue(self.dlq_url)
def process_queue(self, queue_url: str):
"""
Poll the SQS queue for messages indicating task completion.
Each message is expected to contain a JSON payload with 'task_key' and 'return_code'.
Based on the return code, update the task state accordingly.
"""
response = self.sqs_client.receive_message(
QueueUrl=queue_url,
MaxNumberOfMessages=10,
)
# Pagination? Maybe we don't need it. But we don't always delete messages after viewing them so we
# could possibly accumulate a lot of messages in the queue and get stuck if we don't read bigger
# chunks and paginate.
messages = response.get("Messages", [])
# The keys that we validate in the messages below will be different depending on whether or not
# the message is from the dead letter queue or the main results queue.
message_keys = ("return_code", "task_key")
if messages and queue_url == self.dlq_url:
self.log.warning("%d messages received from the dead letter queue", len(messages))
message_keys = ("command", "task_key")
for message in messages:
delete_message = False
receipt_handle = message["ReceiptHandle"]
try:
body = json.loads(message["Body"])
except json.JSONDecodeError:
self.log.warning(
"Received a message from the queue that could not be parsed as JSON: %s",
message["Body"],
)
delete_message = True
# If the message is not already marked for deletion, check if it has the required keys.
if not delete_message and not all(key in body for key in message_keys):
self.log.warning(
"Message is not formatted correctly, %s and/or %s are missing: %s", *message_keys, body
)
delete_message = True
if delete_message:
self.log.warning("Deleting the message to avoid processing it again.")
self.sqs_client.delete_message(QueueUrl=queue_url, ReceiptHandle=receipt_handle)
continue
return_code = body.get("return_code")
ser_task_key = body.get("task_key")
# Fetch the real task key from the running_tasks dict, using the serialized task key.
try:
task_key = self.running_tasks[ser_task_key]
except KeyError:
self.log.debug(
"Received task %s from the queue which is not found in running tasks, it is likely "
"from another Lambda Executor sharing this queue or might be a stale message that needs "
"deleting manually. Marking the message as visible again.",
ser_task_key,
)
# Mark task as visible again in SQS so that another executor can pick it up.
self.sqs_client.change_message_visibility(
QueueUrl=queue_url,
ReceiptHandle=receipt_handle,
VisibilityTimeout=0,
)
continue
if task_key:
if return_code == 0:
self.success(task_key)
self.log.info(
"Successful Lambda invocation for task %s received from SQS queue.", task_key
)
else:
self.fail(task_key)
if queue_url == self.dlq_url and return_code is None:
# DLQ failure: AWS Lambda service could not complete the invocation after retries.
# This indicates a Lambda-level failure (timeout, memory limit, crash, etc.)
# where the function was unable to successfully execute to return a result.
self.log.error(
"DLQ message received: Lambda invocation for task: %s was unable to successfully execute. This likely indicates a Lambda-level failure (timeout, memory limit, crash, etc.).",
task_key,
)
else:
# In this case the Lambda likely started but failed at run time since we got a non-zero
# return code. We could consider retrying these tasks within the executor, because this _likely_
# means the Airflow task did not run to completion, however we can't be sure (maybe the
# lambda runtime code has a bug and is returning a non-zero when it actually passed?). So
# perhaps not retrying is the safest option.
self.log.debug(
"Lambda invocation for task: %s completed but the underlying Airflow task has returned a non-zero exit code %s",
task_key,
return_code,
)
# Remove the task from the tracking mapping.
self.running_tasks.pop(ser_task_key)
# Delete the message from the queue.
self.sqs_client.delete_message(QueueUrl=queue_url, ReceiptHandle=receipt_handle)
def try_adopt_task_instances(self, tis: Sequence[TaskInstance]) -> Sequence[TaskInstance]:
"""
Adopt task instances which have an external_executor_id (the serialized task key).
Anything that is not adopted will be cleared by the scheduler and becomes eligible for re-scheduling.
:param tis: The task instances to adopt.
"""
with Stats.timer("lambda_executor.adopt_task_instances.duration"):
adopted_tis: list[TaskInstance] = []
if serialized_task_keys := [
(ti, ti.external_executor_id) for ti in tis if ti.external_executor_id
]:
for ti, ser_task_key in serialized_task_keys:
try:
task_key = TaskInstanceKey.from_dict(json.loads(ser_task_key))
except Exception:
# If that task fails to deserialize, we should just skip it.
self.log.exception(
"Task failed to be adopted because the key could not be deserialized"
)
continue
self.running_tasks[ser_task_key] = task_key
adopted_tis.append(ti)
if adopted_tis:
tasks = [f"{task} in state {task.state}" for task in adopted_tis]
task_instance_str = "\n\t".join(tasks)
self.log.info(
"Adopted the following %d tasks from a dead executor:\n\t%s",
len(adopted_tis),
task_instance_str,
)
not_adopted_tis = [ti for ti in tis if ti not in adopted_tis]
return not_adopted_tis
def end(self, heartbeat_interval=10):
"""
End execution. Poll until all outstanding tasks are marked as completed.
This is a blocking call and async Lambda tasks can not be cancelled, so this will wait until
all tasks are either completed or the timeout is reached.
:param heartbeat_interval: The interval in seconds to wait between checks for task completion.
"""
self.log.info("Received signal to end, waiting for outstanding tasks to finish.")
time_to_wait = int(
self.conf.get(CONFIG_GROUP_NAME, AllLambdaConfigKeys.END_WAIT_TIMEOUT, fallback="0")
)
start_time = timezone.utcnow()
while True:
if time_to_wait:
current_time = timezone.utcnow()
elapsed_time = (current_time - start_time).total_seconds()
if elapsed_time > time_to_wait:
self.log.warning(
"Timed out waiting for tasks to finish. Some tasks may not be handled gracefully"
" as the executor is force ending due to timeout."
)
break
self.sync()
if not self.running_tasks:
self.log.info("All tasks completed; executor ending.")
break
self.log.info("Waiting for %d task(s) to complete.", len(self.running_tasks))
time.sleep(heartbeat_interval)
def terminate(self):
"""Get called when the daemon receives a SIGTERM."""
self.log.warning("Terminating Lambda executor. In-flight tasks cannot be stopped.")
| {
"repo_id": "apache/airflow",
"file_path": "providers/amazon/src/airflow/providers/amazon/aws/executors/aws_lambda/lambda_executor.py",
"license": "Apache License 2.0",
"lines": 465,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/amazon/src/airflow/providers/amazon/aws/executors/aws_lambda/utils.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import datetime
from collections.abc import Sequence
from dataclasses import dataclass
from typing import TYPE_CHECKING, Any
from airflow.providers.amazon.aws.executors.utils.base_config_keys import BaseConfigKeys
if TYPE_CHECKING:
from airflow.models.taskinstancekey import TaskInstanceKey
CONFIG_GROUP_NAME = "aws_lambda_executor"
INVALID_CREDENTIALS_EXCEPTIONS = [
"ExpiredTokenException",
"InvalidClientTokenId",
"UnrecognizedClientException",
]
@dataclass
class LambdaQueuedTask:
"""Represents a Lambda task that is queued. The task will be run in the next heartbeat."""
key: TaskInstanceKey
command: CommandType
queue: str
executor_config: ExecutorConfigType
attempt_number: int
next_attempt_time: datetime.datetime
class InvokeLambdaKwargsConfigKeys(BaseConfigKeys):
"""Config keys loaded which are valid lambda invoke args."""
FUNCTION_NAME = "function_name"
QUALIFIER = "function_qualifier"
class AllLambdaConfigKeys(InvokeLambdaKwargsConfigKeys):
"""All config keys which are related to the Lambda Executor."""
AWS_CONN_ID = "conn_id"
CHECK_HEALTH_ON_STARTUP = "check_health_on_startup"
MAX_INVOKE_ATTEMPTS = "max_invoke_attempts"
REGION_NAME = "region_name"
QUEUE_URL = "queue_url"
DLQ_URL = "dead_letter_queue_url"
END_WAIT_TIMEOUT = "end_wait_timeout"
CommandType = Sequence[str]
ExecutorConfigType = dict[str, Any]
| {
"repo_id": "apache/airflow",
"file_path": "providers/amazon/src/airflow/providers/amazon/aws/executors/aws_lambda/utils.py",
"license": "Apache License 2.0",
"lines": 54,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/amazon/tests/unit/amazon/aws/executors/aws_lambda/test_lambda_executor.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import datetime as dt
import json
from unittest import mock
import pytest
from botocore.exceptions import ClientError
from semver import VersionInfo
from airflow.executors.base_executor import BaseExecutor
from airflow.models.taskinstance import TaskInstance
from airflow.models.taskinstancekey import TaskInstanceKey
from airflow.providers.amazon.aws.executors.aws_lambda import lambda_executor
from airflow.providers.amazon.aws.executors.aws_lambda.lambda_executor import AwsLambdaExecutor
from airflow.providers.amazon.aws.executors.aws_lambda.utils import CONFIG_GROUP_NAME, AllLambdaConfigKeys
from airflow.providers.common.compat.sdk import AirflowException
from airflow.utils.state import TaskInstanceState
from airflow.version import version as airflow_version_str
from tests_common.test_utils.compat import timezone
from tests_common.test_utils.config import conf_vars
from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS, AIRFLOW_V_3_1_PLUS
airflow_version = VersionInfo(*map(int, airflow_version_str.split(".")[:3]))
DEFAULT_QUEUE_URL = "queue-url"
DEFAULT_DLQ_URL = "dlq-url"
DEFAULT_FUNCTION_NAME = "function-name"
@pytest.fixture
def set_env_vars():
overrides: dict[tuple[str, str], str] = {
(CONFIG_GROUP_NAME, AllLambdaConfigKeys.AWS_CONN_ID): "aws_default",
(CONFIG_GROUP_NAME, AllLambdaConfigKeys.REGION_NAME): "us-west-1",
(CONFIG_GROUP_NAME, AllLambdaConfigKeys.FUNCTION_NAME): DEFAULT_FUNCTION_NAME,
(CONFIG_GROUP_NAME, AllLambdaConfigKeys.QUEUE_URL): DEFAULT_QUEUE_URL,
(CONFIG_GROUP_NAME, AllLambdaConfigKeys.DLQ_URL): DEFAULT_DLQ_URL,
(CONFIG_GROUP_NAME, AllLambdaConfigKeys.QUALIFIER): "1",
(CONFIG_GROUP_NAME, AllLambdaConfigKeys.MAX_INVOKE_ATTEMPTS): "3",
(CONFIG_GROUP_NAME, AllLambdaConfigKeys.CHECK_HEALTH_ON_STARTUP): "True",
}
with conf_vars(overrides):
yield
@pytest.fixture
def mock_airflow_key():
def _key():
key_mock = mock.Mock()
# Use a "random" value (memory id of the mock obj) so each key serializes uniquely
key_mock._asdict = mock.Mock(return_value={"mock_key": id(key_mock)})
return key_mock
return _key
def _generate_mock_cmd():
return ["airflow", "tasks", "run", "dag_id", "task_id", "run_id", "--local"]
# The following two fixtures look different because no existing test
# cares if they have unique values, so the same value is always used.
@pytest.fixture
def mock_cmd():
return _generate_mock_cmd()
@pytest.fixture
def mock_executor(set_env_vars) -> AwsLambdaExecutor:
"""Mock Lambda to a repeatable starting state.."""
executor = AwsLambdaExecutor()
executor.IS_BOTO_CONNECTION_HEALTHY = True
# Replace boto3 clients with mocks
lambda_mock = mock.Mock(spec=executor.lambda_client)
lambda_mock.invoke.return_value = {"StatusCode": 0, "failures": []}
executor.lambda_client = lambda_mock
sqs_mock = mock.Mock(spec=executor.sqs_client)
sqs_mock.receive_message.return_value = {"Messages": []}
executor.sqs_client = sqs_mock
return executor
class TestAwsLambdaExecutor:
@mock.patch(
"airflow.providers.amazon.aws.executors.aws_lambda.lambda_executor.AwsLambdaExecutor.change_state"
)
def test_execute(self, change_state_mock, mock_airflow_key, mock_executor, mock_cmd):
"""Test execution from end-to-end."""
airflow_key = mock_airflow_key()
ser_airflow_key = json.dumps(airflow_key._asdict())
assert len(mock_executor.pending_tasks) == 0
mock_executor.execute_async(airflow_key, mock_cmd)
assert len(mock_executor.pending_tasks) == 1
mock_executor.attempt_task_runs()
mock_executor.lambda_client.invoke.assert_called_once()
payload = json.loads(mock_executor.lambda_client.invoke.call_args.kwargs["Payload"])
assert payload["executor_config"] == {}
# Task is stored in active worker.
assert len(mock_executor.running_tasks) == 1
assert json.dumps(airflow_key._asdict()) in mock_executor.running_tasks
change_state_mock.assert_called_once_with(
airflow_key, TaskInstanceState.RUNNING, ser_airflow_key, remove_running=False
)
@pytest.mark.skipif(not AIRFLOW_V_3_0_PLUS, reason="Test requires Airflow 3+")
@mock.patch(
"airflow.providers.amazon.aws.executors.aws_lambda.lambda_executor.AwsLambdaExecutor.change_state"
)
def test_task_sdk(self, change_state_mock, mock_airflow_key, mock_executor, mock_cmd):
"""Test task sdk execution from end-to-end."""
from airflow.executors.workloads import ExecuteTask
airflow_key = mock_airflow_key()
ser_airflow_key = json.dumps(airflow_key._asdict())
executor_config = {"config_key": "config_value"}
workload = mock.Mock(spec=ExecuteTask)
workload.ti = mock.Mock(spec=TaskInstance)
workload.ti.key = airflow_key
workload.ti.executor_config = executor_config
ser_workload = json.dumps({"test_key": "test_value"})
workload.model_dump_json.return_value = ser_workload
mock_executor.queue_workload(workload, mock.Mock())
assert mock_executor.queued_tasks[workload.ti.key] == workload
assert len(mock_executor.pending_tasks) == 0
assert len(mock_executor.running) == 0
mock_executor._process_workloads([workload])
assert len(mock_executor.queued_tasks) == 0
assert len(mock_executor.running) == 1
assert workload.ti.key in mock_executor.running
assert len(mock_executor.pending_tasks) == 1
assert mock_executor.pending_tasks[0].command == [
"python",
"-m",
"airflow.sdk.execution_time.execute_workload",
"--json-string",
'{"test_key": "test_value"}',
]
mock_executor.attempt_task_runs()
mock_executor.lambda_client.invoke.assert_called_once()
payload = json.loads(mock_executor.lambda_client.invoke.call_args.kwargs["Payload"])
assert payload["executor_config"] == executor_config
assert len(mock_executor.pending_tasks) == 0
# Task is stored in active worker.
assert len(mock_executor.running_tasks) == 1
assert mock_executor.running_tasks[ser_airflow_key] == workload.ti.key
change_state_mock.assert_called_once_with(
workload.ti.key, TaskInstanceState.RUNNING, ser_airflow_key, remove_running=False
)
@mock.patch.object(lambda_executor, "calculate_next_attempt_delay", return_value=dt.timedelta(seconds=0))
def test_success_execute_api_exception(self, mock_backoff, mock_executor, mock_cmd, mock_airflow_key):
"""Test what happens when Lambda throws an initial exception on invoke, but ultimately passes on retries."""
airflow_key = mock_airflow_key()
ser_airflow_key = json.dumps(airflow_key._asdict())
invoke_exception = Exception("Test exception")
invoke_success = {"StatusCode": 0, "failures": []}
mock_executor.lambda_client.invoke.side_effect = [invoke_exception, invoke_exception, invoke_success]
mock_executor.execute_async(airflow_key, mock_cmd)
expected_retry_count = 2
# Fail 2 times
for _ in range(expected_retry_count):
mock_executor.attempt_task_runs()
# Task is not stored in active workers.
assert len(mock_executor.running_tasks) == 0
# Pass in last attempt
mock_executor.attempt_task_runs()
assert len(mock_executor.pending_tasks) == 0
assert ser_airflow_key in mock_executor.running_tasks
assert mock_backoff.call_count == expected_retry_count
for attempt_number in range(1, expected_retry_count):
mock_backoff.assert_has_calls([mock.call(attempt_number)])
def test_failed_execute_api_exception(self, mock_executor, mock_cmd, mock_airflow_key):
"""Test what happens when Lambda refuses to execute a task and throws an exception"""
mock_airflow_key = mock_airflow_key()
mock_executor.lambda_client.invoke.side_effect = Exception("Test exception")
mock_executor.execute_async(mock_airflow_key, mock_cmd)
# No matter what, don't schedule until invoke becomes successful.
for _ in range(int(mock_executor.max_invoke_attempts) * 2):
mock_executor.attempt_task_runs()
# Task is not stored in running tasks
assert len(mock_executor.running_tasks) == 0
def test_failed_execute_creds_exception(self, mock_executor, mock_cmd, mock_airflow_key):
"""Test what happens when Lambda refuses to execute a task and throws an exception due to credentials"""
airflow_key = mock_airflow_key()
mock_executor.IS_BOTO_CONNECTION_HEALTHY = True
mock_executor.execute_async(airflow_key, mock_cmd)
assert mock_executor.pending_tasks[0].attempt_number == 1
error_to_raise = ClientError(
{"Error": {"Code": "ExpiredTokenException", "Message": "foobar"}}, "OperationName"
)
mock_executor.lambda_client.invoke.side_effect = error_to_raise
# Sync will ultimately call attempt_task_runs, which is the code under test
mock_executor.sync()
# Task should end up back in the queue
assert mock_executor.pending_tasks[0].key == airflow_key
# The connection should get marked as unhealthy
assert not mock_executor.IS_BOTO_CONNECTION_HEALTHY
# We retry on connections issues indefinitely, so the attempt number should be 1
assert mock_executor.pending_tasks[0].attempt_number == 1
def test_failed_execute_client_error_exception(self, mock_executor, mock_cmd, mock_airflow_key):
"""Test what happens when Lambda refuses to execute a task and throws an exception for non-credentials issue"""
airflow_key = mock_airflow_key()
mock_executor.IS_BOTO_CONNECTION_HEALTHY = True
mock_executor.execute_async(airflow_key, mock_cmd)
assert mock_executor.pending_tasks[0].attempt_number == 1
error_to_raise = ClientError(
{"Error": {"Code": "RandomeError", "Message": "foobar"}}, "OperationName"
)
mock_executor.lambda_client.invoke.side_effect = error_to_raise
# Sync will ultimately call attempt_task_runs, which is the code under test
mock_executor.sync()
# Task should end up back in the queue
assert mock_executor.pending_tasks[0].key == airflow_key
# The connection should stay marked as healthy because the error is something else
assert mock_executor.IS_BOTO_CONNECTION_HEALTHY
# Not a retry so increment attempts
assert mock_executor.pending_tasks[0].attempt_number == 2
@mock.patch.object(lambda_executor, "calculate_next_attempt_delay", return_value=dt.timedelta(seconds=0))
def test_attempt_task_runs_attempts_when_tasks_fail(self, _, mock_executor):
"""
Test case when all tasks fail to run.
The executor should attempt each task exactly once per sync() iteration.
It should preserve the order of tasks, and attempt each task up to
`max_invoke_attempts` times before dropping the task.
"""
airflow_keys = [
TaskInstanceKey("a", "task_a", "c", 1, -1),
TaskInstanceKey("a", "task_b", "c", 1, -1),
]
airflow_cmd1 = _generate_mock_cmd()
airflow_cmd2 = _generate_mock_cmd()
commands = [airflow_cmd1, airflow_cmd2]
failures = [Exception("Failure 1"), Exception("Failure 2")]
mock_executor.execute_async(airflow_keys[0], commands[0])
mock_executor.execute_async(airflow_keys[1], commands[1])
assert len(mock_executor.pending_tasks) == 2
assert len(mock_executor.running_tasks) == 0
mock_executor.lambda_client.invoke.side_effect = failures
mock_executor.attempt_task_runs()
for i in range(2):
payload = json.loads(mock_executor.lambda_client.invoke.call_args_list[i].kwargs["Payload"])
assert airflow_keys[i].task_id in payload["task_key"]
assert len(mock_executor.pending_tasks) == 2
assert len(mock_executor.running_tasks) == 0
mock_executor.lambda_client.invoke.call_args_list.clear()
mock_executor.lambda_client.invoke.side_effect = failures
mock_executor.attempt_task_runs()
for i in range(2):
payload = json.loads(mock_executor.lambda_client.invoke.call_args_list[i].kwargs["Payload"])
assert airflow_keys[i].task_id in payload["task_key"]
assert len(mock_executor.pending_tasks) == 2
assert len(mock_executor.running_tasks) == 0
mock_executor.lambda_client.invoke.call_args_list.clear()
mock_executor.lambda_client.invoke.side_effect = failures
mock_executor.attempt_task_runs()
assert (
len(mock_executor.pending_tasks) == 0
) # Pending now zero since we've had three failures to invoke
assert len(mock_executor.running_tasks) == 0
if airflow_version >= (2, 10, 0):
events = [(x.event, x.task_id, x.try_number) for x in mock_executor._task_event_logs]
assert events == [
("lambda invoke failure", "task_a", 1),
("lambda invoke failure", "task_b", 1),
]
@mock.patch.object(lambda_executor, "calculate_next_attempt_delay", return_value=dt.timedelta(seconds=0))
def test_attempt_task_runs_attempts_when_some_tasks_fal(self, _, mock_executor):
"""
Test case when one task fail to run, others succeed, and a new task gets queued.
"""
airflow_keys = [
TaskInstanceKey("a", "task_a", "c", 1, -1),
TaskInstanceKey("a", "task_b", "c", 1, -1),
]
airflow_cmd1 = _generate_mock_cmd()
airflow_cmd2 = _generate_mock_cmd()
airflow_commands = [airflow_cmd1, airflow_cmd2]
success_response = {"StatusCode": 0, "failures": []}
responses = [Exception("Failure 1"), success_response]
mock_executor.execute_async(airflow_keys[0], airflow_commands[0])
mock_executor.execute_async(airflow_keys[1], airflow_commands[1])
assert len(mock_executor.pending_tasks) == 2
mock_executor.lambda_client.invoke.side_effect = responses
mock_executor.attempt_task_runs()
for i in range(2):
payload = json.loads(mock_executor.lambda_client.invoke.call_args_list[i].kwargs["Payload"])
assert airflow_keys[i].task_id in payload["task_key"]
assert len(mock_executor.pending_tasks) == 1
assert len(mock_executor.running_tasks) == 1
mock_executor.lambda_client.invoke.call_args_list.clear()
# queue new task
airflow_keys[1] = TaskInstanceKey("a", "task_c", "c", 1, -1)
airflow_commands[1] = _generate_mock_cmd()
mock_executor.execute_async(airflow_keys[1], airflow_commands[1])
assert len(mock_executor.pending_tasks) == 2
# assert that the order of pending tasks is preserved i.e. the first task is 1st etc.
assert mock_executor.pending_tasks[0].key == airflow_keys[0]
assert mock_executor.pending_tasks[0].command == airflow_commands[0]
responses = [Exception("Failure 1"), success_response]
mock_executor.lambda_client.invoke.side_effect = responses
mock_executor.attempt_task_runs()
for i in range(2):
payload = json.loads(mock_executor.lambda_client.invoke.call_args_list[i].kwargs["Payload"])
assert airflow_keys[i].task_id in payload["task_key"]
assert len(mock_executor.pending_tasks) == 1
assert len(mock_executor.running_tasks) == 2
mock_executor.lambda_client.invoke.call_args_list.clear()
responses = [Exception("Failure 1")]
mock_executor.lambda_client.invoke.side_effect = responses
mock_executor.attempt_task_runs()
payload = json.loads(mock_executor.lambda_client.invoke.call_args_list[0].kwargs["Payload"])
assert airflow_keys[0].task_id in payload["task_key"]
if airflow_version >= (2, 10, 0):
events = [(x.event, x.task_id, x.try_number) for x in mock_executor._task_event_logs]
assert events == [("lambda invoke failure", "task_a", 1)]
@mock.patch.object(BaseExecutor, "fail")
@mock.patch.object(BaseExecutor, "success")
def test_sync_running_dlq(self, success_mock, fail_mock, mock_executor, mock_airflow_key):
airflow_key = mock_airflow_key()
ser_airflow_key = json.dumps(airflow_key._asdict())
mock_executor.running_tasks.clear()
mock_executor.running_tasks[ser_airflow_key] = airflow_key
mock_executor.sqs_client.receive_message.side_effect = [
{}, # First request from the results queue will be empty
{
# Second request from the DLQ will have a message
"Messages": [
{
"ReceiptHandle": "receipt_handle",
"Body": json.dumps(
{
"task_key": ser_airflow_key,
# DLQ messages will have the input (task_key, command) instead of return_code
"command": "command",
}
),
}
]
},
]
mock_executor.sync_running_tasks()
# Receive messages should be called twice
assert mock_executor.sqs_client.receive_message.call_count == 2
assert mock_executor.sqs_client.receive_message.call_args_list[0].kwargs == {
"QueueUrl": DEFAULT_QUEUE_URL,
"MaxNumberOfMessages": 10,
}
assert mock_executor.sqs_client.receive_message.call_args_list[1].kwargs == {
"QueueUrl": DEFAULT_DLQ_URL,
"MaxNumberOfMessages": 10,
}
# Task is not stored in active workers.
assert len(mock_executor.running_tasks) == 0
success_mock.assert_not_called()
fail_mock.assert_called_once()
assert mock_executor.sqs_client.delete_message.call_count == 1
assert mock_executor.sqs_client.delete_message.call_args_list[0].kwargs == {
"QueueUrl": DEFAULT_DLQ_URL,
"ReceiptHandle": "receipt_handle",
}
@mock.patch.object(BaseExecutor, "fail")
@mock.patch.object(BaseExecutor, "success")
def test_sync_running_success(self, success_mock, fail_mock, mock_executor, mock_airflow_key):
airflow_key = mock_airflow_key()
ser_airflow_key = json.dumps(airflow_key._asdict())
mock_executor.running_tasks.clear()
mock_executor.running_tasks[ser_airflow_key] = airflow_key
# Success message
mock_executor.sqs_client.receive_message.return_value = {
"Messages": [
{
"ReceiptHandle": "receipt_handle",
"Body": json.dumps(
{
"task_key": ser_airflow_key,
"return_code": 0,
}
),
}
]
}
mock_executor.sync_running_tasks()
mock_executor.sqs_client.receive_message.assert_called_once()
assert mock_executor.sqs_client.receive_message.call_args_list[0].kwargs == {
"QueueUrl": DEFAULT_QUEUE_URL,
"MaxNumberOfMessages": 10,
}
# Task is not stored in active workers.
assert len(mock_executor.running_tasks) == 0
# Task is immediately succeeded.
success_mock.assert_called_once()
fail_mock.assert_not_called()
assert mock_executor.sqs_client.delete_message.call_count == 1
assert mock_executor.sqs_client.delete_message.call_args_list[0].kwargs == {
"QueueUrl": DEFAULT_QUEUE_URL,
"ReceiptHandle": "receipt_handle",
}
@mock.patch.object(BaseExecutor, "fail")
@mock.patch.object(BaseExecutor, "success")
def test_sync_running_fail(self, success_mock, fail_mock, mock_executor, mock_airflow_key):
airflow_key = mock_airflow_key()
ser_airflow_key = json.dumps(airflow_key._asdict())
mock_executor.running_tasks.clear()
mock_executor.running_tasks[ser_airflow_key] = airflow_key
# Failure message
mock_executor.sqs_client.receive_message.return_value = {
"Messages": [
{
"ReceiptHandle": "receipt_handle",
"Body": json.dumps(
{
"task_key": ser_airflow_key,
"return_code": 1, # Non-zero return code, task failed
}
),
}
]
}
mock_executor.sync_running_tasks()
mock_executor.sqs_client.receive_message.assert_called_once()
# Task is not stored in active workers.
assert len(mock_executor.running_tasks) == 0
# Task is immediately succeeded.
success_mock.assert_not_called()
fail_mock.assert_called_once()
assert mock_executor.sqs_client.delete_message.call_count == 1
def test_sync_running_fail_bad_json(self, mock_executor, mock_airflow_key):
airflow_key = mock_airflow_key()
ser_airflow_key = json.dumps(airflow_key._asdict())
mock_executor.running_tasks.clear()
mock_executor.running_tasks[ser_airflow_key] = airflow_key
mock_executor.sqs_client.receive_message.side_effect = [
{
"Messages": [
{
"ReceiptHandle": "receipt_handle",
"Body": "Banana", # Body not json format
}
]
},
{}, # Second request from the DLQ will be empty
]
mock_executor.sync_running_tasks()
# Assert that the message is deleted if the message is not formatted as json
assert mock_executor.sqs_client.receive_message.call_count == 2
assert mock_executor.sqs_client.delete_message.call_count == 1
def test_sync_running_fail_bad_format(self, mock_executor, mock_airflow_key):
airflow_key = mock_airflow_key()
ser_airflow_key = json.dumps(airflow_key._asdict())
mock_executor.running_tasks.clear()
mock_executor.running_tasks[ser_airflow_key] = airflow_key
mock_executor.sqs_client.receive_message.side_effect = [
{
"Messages": [
{
"ReceiptHandle": "receipt_handle",
"Body": json.dumps(
{
"foo": "bar", # Missing expected keys like "task_key"
"return_code": 1, # Non-zero return code, task failed
}
),
}
]
},
{}, # Second request from the DLQ will be empty
]
mock_executor.sync_running_tasks()
# Assert that the message is deleted if the message does not contain the expected keys
assert mock_executor.sqs_client.receive_message.call_count == 2
assert mock_executor.sqs_client.delete_message.call_count == 1
def test_sync_running_fail_bad_format_dlq(self, mock_executor, mock_airflow_key):
airflow_key = mock_airflow_key()
ser_airflow_key = json.dumps(airflow_key._asdict())
mock_executor.running_tasks.clear()
mock_executor.running_tasks[ser_airflow_key] = airflow_key
# Failure message
mock_executor.sqs_client.receive_message.side_effect = [
{}, # First request from the results queue will be empty
{
# Second request from the DLQ will have a message
"Messages": [
{
"ReceiptHandle": "receipt_handle",
"Body": json.dumps(
{
"foo": "bar", # Missing expected keys like "task_key"
"return_code": 1,
}
),
}
]
},
]
mock_executor.sync_running_tasks()
# Assert that the message is deleted if the message does not contain the expected keys
assert mock_executor.sqs_client.receive_message.call_count == 2
assert mock_executor.sqs_client.delete_message.call_count == 1
@mock.patch.object(BaseExecutor, "fail")
@mock.patch.object(BaseExecutor, "success")
def test_sync_running_short_circuit(self, success_mock, fail_mock, mock_executor, mock_airflow_key):
mock_executor.running_tasks.clear()
# No running tasks, so we will short circuit
mock_executor.sync_running_tasks()
mock_executor.sqs_client.receive_message.assert_not_called()
# Task is still stored in active workers.
assert len(mock_executor.running_tasks) == 0
# Task is immediately succeeded.
success_mock.assert_not_called()
fail_mock.assert_not_called()
assert mock_executor.sqs_client.delete_message.call_count == 0
@mock.patch.object(BaseExecutor, "fail")
@mock.patch.object(BaseExecutor, "success")
def test_sync_running_no_updates(self, success_mock, fail_mock, mock_executor, mock_airflow_key):
airflow_key = mock_airflow_key()
ser_airflow_key = json.dumps(airflow_key._asdict())
mock_executor.running_tasks.clear()
mock_executor.running_tasks[ser_airflow_key] = airflow_key
# No messages, so we will not loop
mock_executor.sqs_client.receive_message.return_value = {"Messages": []}
mock_executor.sync_running_tasks()
# Both the results queue and DLQ should have been checked
assert mock_executor.sqs_client.receive_message.call_count == 2
# Task is still stored in active workers.
assert len(mock_executor.running_tasks) == 1
# Task is immediately succeeded.
success_mock.assert_not_called()
fail_mock.assert_not_called()
assert mock_executor.sqs_client.delete_message.call_count == 0
@mock.patch.object(BaseExecutor, "fail")
@mock.patch.object(BaseExecutor, "success")
def test_sync_running_two_tasks_one_relevant(
self, success_mock, fail_mock, mock_executor, mock_airflow_key
):
airflow_key = mock_airflow_key()
ser_airflow_key = json.dumps(airflow_key._asdict())
airflow_key_2 = mock_airflow_key()
ser_airflow_key_2 = json.dumps(airflow_key_2._asdict())
mock_executor.running_tasks.clear()
mock_executor.running_tasks[ser_airflow_key] = airflow_key
mock_executor.running_tasks[ser_airflow_key_2] = airflow_key_2
# Success message
mock_executor.sqs_client.receive_message.side_effect = [
{
"Messages": [
{
"ReceiptHandle": "receipt_handle",
"Body": json.dumps(
{
"task_key": ser_airflow_key,
"return_code": 0,
}
),
}
]
},
{}, # No messages from DLQ
]
mock_executor.sync_running_tasks()
# Both the results queue and DLQ should have been checked
assert mock_executor.sqs_client.receive_message.call_count == 2
# One task left running
assert len(mock_executor.running_tasks) == 1
# Task one completed, task two is still running
assert ser_airflow_key_2 in mock_executor.running_tasks
# Task is immediately succeeded.
success_mock.assert_called_once()
fail_mock.assert_not_called()
assert mock_executor.sqs_client.delete_message.call_count == 1
@mock.patch.object(BaseExecutor, "fail")
@mock.patch.object(BaseExecutor, "success")
def test_sync_running_unknown_task(self, success_mock, fail_mock, mock_executor, mock_airflow_key):
airflow_key = mock_airflow_key()
ser_airflow_key = json.dumps(airflow_key._asdict())
airflow_key_2 = mock_airflow_key()
ser_airflow_key_2 = json.dumps(airflow_key_2._asdict())
mock_executor.running_tasks.clear()
# Only add one of the tasks to the running list, the other will be unknown
mock_executor.running_tasks[ser_airflow_key] = airflow_key
# Receive the known task and unknown task
known_task_receipt = "receipt_handle_known"
unknown_task_receipt = "receipt_handle_unknown"
mock_executor.sqs_client.receive_message.return_value = {
"Messages": [
{
"ReceiptHandle": known_task_receipt,
"Body": json.dumps(
{
"task_key": ser_airflow_key,
"return_code": 0,
}
),
},
{
"ReceiptHandle": unknown_task_receipt,
"Body": json.dumps(
{
"task_key": ser_airflow_key_2,
"return_code": 0,
}
),
},
]
}
mock_executor.sync_running_tasks()
mock_executor.sqs_client.receive_message.assert_called_once()
# The known task is set to succeeded, unknown task is dropped
assert len(mock_executor.running_tasks) == 0
success_mock.assert_called_once()
fail_mock.assert_not_called()
# Only the known message from the queue should be deleted, the other should be marked as visible again
assert mock_executor.sqs_client.delete_message.call_count == 1
assert mock_executor.sqs_client.change_message_visibility.call_count == 1
# The argument to delete_message should be the known task
assert mock_executor.sqs_client.delete_message.call_args_list[0].kwargs == {
"QueueUrl": DEFAULT_QUEUE_URL,
"ReceiptHandle": known_task_receipt,
}
# The change_message_visibility should be called with the unknown task
assert mock_executor.sqs_client.change_message_visibility.call_args_list[0].kwargs == {
"QueueUrl": DEFAULT_QUEUE_URL,
"ReceiptHandle": unknown_task_receipt,
"VisibilityTimeout": 0,
}
def test_start_no_check_health(self, mock_executor):
mock_executor.check_health = mock.Mock()
with conf_vars({(CONFIG_GROUP_NAME, AllLambdaConfigKeys.CHECK_HEALTH_ON_STARTUP): "False"}):
mock_executor.start()
assert mock_executor.check_health.call_count == 0
def test_start_check_health_success(self, mock_executor):
mock_executor.check_health = mock.Mock()
with conf_vars({(CONFIG_GROUP_NAME, AllLambdaConfigKeys.CHECK_HEALTH_ON_STARTUP): "True"}):
mock_executor.start()
assert mock_executor.check_health.call_count == 1
def test_start_check_health_fail(self, mock_executor):
mock_executor.check_health = mock.Mock()
mock_executor.check_health.side_effect = AirflowException("Test exception")
with conf_vars({(CONFIG_GROUP_NAME, AllLambdaConfigKeys.CHECK_HEALTH_ON_STARTUP): "True"}):
with pytest.raises(AirflowException):
mock_executor.start()
assert mock_executor.check_health.call_count == 1
def test_check_health_success(self, mock_executor):
mock_executor.IS_BOTO_CONNECTION_HEALTHY = False
mock_executor.sqs_client.get_queue_attributes.return_value = {
"Attributes": {"ApproximateNumberOfMessages": 0}
}
mock_executor.lambda_client.get_function.return_value = {
"Configuration": {
"FunctionName": DEFAULT_FUNCTION_NAME,
"State": "Active",
}
}
mock_executor.check_health()
assert mock_executor.sqs_client.get_queue_attributes.call_count == 2
assert mock_executor.lambda_client.get_function.call_count == 1
assert mock_executor.IS_BOTO_CONNECTION_HEALTHY
def test_check_health_lambda_fails(self, mock_executor):
mock_executor.IS_BOTO_CONNECTION_HEALTHY = False
mock_executor.sqs_client.get_queue_attributes.return_value = {
"Attributes": {"ApproximateNumberOfMessages": 0}
}
mock_executor.lambda_client.get_function.return_value = ClientError(
{"Error": {"Code": "ResourceNotFoundException", "Message": "foobar"}}, "OperationName"
)
with pytest.raises(AirflowException):
mock_executor.check_health()
assert mock_executor.lambda_client.get_function.call_count == 1
# Lambda has already failed so SQS should not be called
assert mock_executor.sqs_client.get_queue_attributes.call_count == 0
assert not mock_executor.IS_BOTO_CONNECTION_HEALTHY
def test_check_health_sqs_fails(self, mock_executor):
mock_executor.IS_BOTO_CONNECTION_HEALTHY = False
mock_executor.sqs_client.get_queue_attributes.return_value = ClientError(
{"Error": {"Code": "ResourceNotFoundException", "Message": "foobar"}}, "OperationName"
)
mock_executor.lambda_client.get_function.return_value = {
"Configuration": {
"FunctionName": DEFAULT_FUNCTION_NAME,
"State": "Active",
}
}
with pytest.raises(AirflowException):
mock_executor.check_health()
assert mock_executor.lambda_client.get_function.call_count == 1
# Lambda has already failed so SQS should not be called
assert mock_executor.sqs_client.get_queue_attributes.call_count == 1
assert not mock_executor.IS_BOTO_CONNECTION_HEALTHY
def test_check_health_sqs_results_queue_success_dlq_fails(self, mock_executor):
mock_executor.IS_BOTO_CONNECTION_HEALTHY = False
mock_executor.sqs_client.get_queue_attributes.side_effect = [
{"Attributes": {"ApproximateNumberOfMessages": 0}},
ClientError(
{"Error": {"Code": "ResourceNotFoundException", "Message": "foobar"}}, "OperationName"
),
]
mock_executor.lambda_client.get_function.return_value = {
"Configuration": {
"FunctionName": DEFAULT_FUNCTION_NAME,
"State": "Active",
}
}
with pytest.raises(AirflowException):
mock_executor.check_health()
assert mock_executor.lambda_client.get_function.call_count == 1
# Lambda has already failed so SQS should not be called
assert mock_executor.sqs_client.get_queue_attributes.call_count == 2
assert not mock_executor.IS_BOTO_CONNECTION_HEALTHY
def test_sync_already_unhealthy(self, mock_executor):
# Something has set the connection to unhealthy (tested elsewhere)
mock_executor.IS_BOTO_CONNECTION_HEALTHY = False
mock_executor.sync_running_tasks = mock.Mock()
mock_executor.attempt_task_runs = mock.Mock()
mock_executor.load_connections = mock.Mock()
# Set the last connection reload to be more than 60 seconds ago so that we get a reload
mock_executor.last_connection_reload = timezone.utcnow() - dt.timedelta(seconds=100)
# We should not be able to sync
mock_executor.sync()
assert not mock_executor.IS_BOTO_CONNECTION_HEALTHY
mock_executor.sync_running_tasks.assert_not_called()
mock_executor.attempt_task_runs.assert_not_called()
mock_executor.load_connections.assert_called_once()
def test_sync_already_unhealthy_then_repaired(self, mock_executor):
# Something has set the connection to unhealthy (tested elsewhere)
mock_executor.IS_BOTO_CONNECTION_HEALTHY = False
mock_executor.sync_running_tasks = mock.Mock()
mock_executor.attempt_task_runs = mock.Mock()
def check_health_side_effect():
mock_executor.IS_BOTO_CONNECTION_HEALTHY = True
mock_executor.check_health = mock.Mock(side_effect=check_health_side_effect)
# Set the last connection reload to be more than 60 seconds ago so that we get a reload
mock_executor.last_connection_reload = timezone.utcnow() - dt.timedelta(seconds=100)
# Sync should repair itself and continue to call the sync methods
mock_executor.sync()
assert mock_executor.IS_BOTO_CONNECTION_HEALTHY
mock_executor.sync_running_tasks.assert_called_once()
mock_executor.attempt_task_runs.assert_called_once()
@pytest.mark.parametrize(
"error_code",
[
"ExpiredTokenException",
"InvalidClientTokenId",
"UnrecognizedClientException",
],
)
def test_sync_become_unhealthy_no_creds(self, error_code, mock_executor):
# Something has set the connection to unhealthy (tested elsewhere)
mock_executor.IS_BOTO_CONNECTION_HEALTHY = True
mock_executor.log.warning = mock.Mock()
mock_executor.attempt_task_runs = mock.Mock()
error_to_raise = ClientError({"Error": {"Code": error_code, "Message": "foobar"}}, "OperationName")
mock_executor.sync_running_tasks = mock.Mock(side_effect=error_to_raise)
# sync should catch the error and handle it, setting connection to unhealthy
mock_executor.sync()
assert not mock_executor.IS_BOTO_CONNECTION_HEALTHY
mock_executor.sync_running_tasks.assert_called_once()
mock_executor.attempt_task_runs.assert_not_called()
# Check that the substring "AWS credentials are either missing or expired" was logged
mock_executor.log.warning.assert_called_once()
assert "AWS credentials are either missing or expired" in mock_executor.log.warning.call_args[0][0]
def test_sync_exception(self, mock_executor):
# Something has set the connection to unhealthy (tested elsewhere)
mock_executor.IS_BOTO_CONNECTION_HEALTHY = True
mock_executor.log.exception = mock.Mock()
mock_executor.attempt_task_runs = mock.Mock()
mock_executor.sync_running_tasks = mock.Mock(side_effect=Exception())
# sync should catch the error and log, don't kill scheduler by letting it raise up higher.
mock_executor.sync()
# Not a credentials error that we can tell, so connection stays healthy
assert mock_executor.IS_BOTO_CONNECTION_HEALTHY
mock_executor.sync_running_tasks.assert_called_once()
mock_executor.attempt_task_runs.assert_not_called()
# Check that the substring "AWS credentials are either missing or expired" was logged
mock_executor.log.exception.assert_called_once()
assert "An error occurred while syncing tasks" in mock_executor.log.exception.call_args[0][0]
def test_try_adopt_task_instances(self, mock_executor, mock_airflow_key):
"""Test that executor can adopt orphaned task instances from a SchedulerJob shutdown event."""
# airflow_key_1 = mock_airflow_key()
airflow_key_1 = TaskInstanceKey("a", "task_a", "c", 1, -1)
ser_airflow_key_1 = json.dumps(airflow_key_1._asdict())
# airflow_key_2 = mock_airflow_key()
airflow_key_2 = TaskInstanceKey("a", "task_b", "c", 1, -1)
ser_airflow_key_2 = json.dumps(airflow_key_2._asdict())
orphaned_tasks = [
mock.Mock(spec=TaskInstance),
mock.Mock(spec=TaskInstance),
mock.Mock(spec=TaskInstance),
]
orphaned_tasks[0].external_executor_id = ser_airflow_key_1
orphaned_tasks[1].external_executor_id = ser_airflow_key_2
orphaned_tasks[
2
].external_executor_id = None # One orphaned task has no external_executor_id, not adopted
for task in orphaned_tasks:
task.try_number = 1
not_adopted_tasks = mock_executor.try_adopt_task_instances(orphaned_tasks)
# Two of the three tasks should be adopted.
assert len(orphaned_tasks) - 1 == len(mock_executor.running_tasks)
assert ser_airflow_key_1 in mock_executor.running_tasks
assert mock_executor.running_tasks[ser_airflow_key_1] == airflow_key_1
assert ser_airflow_key_2 in mock_executor.running_tasks
assert mock_executor.running_tasks[ser_airflow_key_2] == airflow_key_2
# The remaining one task is unable to be adopted.
assert len(not_adopted_tasks) == 1
assert not_adopted_tasks[0] == orphaned_tasks[2]
@mock.patch.object(BaseExecutor, "fail")
@mock.patch.object(BaseExecutor, "success")
def test_end(self, success_mock, fail_mock, mock_executor, mock_airflow_key):
"""Test that executor can end successfully; waiting for all tasks to naturally exit."""
airflow_key = mock_airflow_key()
ser_airflow_key = json.dumps(airflow_key._asdict())
mock_executor.running_tasks.clear()
mock_executor.running_tasks[ser_airflow_key] = airflow_key
# First message is empty, so we loop again while waiting for tasks to finish
mock_executor.sqs_client.receive_message.side_effect = [
{},
{},
{
"Messages": [
{
"ReceiptHandle": "receipt_handle",
"Body": json.dumps(
{
"task_key": ser_airflow_key,
"return_code": 0,
}
),
}
]
},
]
mock_executor.end(heartbeat_interval=0)
# Assert that the sqs_client mock method receive_message was called exactly twice
assert mock_executor.sqs_client.receive_message.call_count == 3
# Task is not stored in active workers.
assert len(mock_executor.running_tasks) == 0
success_mock.assert_called_once()
fail_mock.assert_not_called()
assert mock_executor.sqs_client.delete_message.call_count == 1
@mock.patch("airflow.providers.amazon.aws.executors.aws_lambda.lambda_executor.timezone")
def test_end_timeout(self, mock_timezone, mock_executor, mock_airflow_key):
"""Test that executor can end successfully; waiting for all tasks to naturally exit."""
# Mock the sync method of the mock_executor object so we can count how many times it was called
mock_executor.sync = mock.Mock()
mock_executor.log.warning = mock.Mock()
current_time = timezone.utcnow()
mock_timezone.utcnow.side_effect = [
current_time,
current_time,
current_time + dt.timedelta(seconds=5),
current_time + dt.timedelta(seconds=10),
]
airflow_key = mock_airflow_key()
ser_airflow_key = json.dumps(airflow_key._asdict())
mock_executor.running_tasks.clear()
mock_executor.running_tasks[ser_airflow_key] = airflow_key
with conf_vars({(CONFIG_GROUP_NAME, AllLambdaConfigKeys.END_WAIT_TIMEOUT): "5"}):
mock_executor.end(heartbeat_interval=0)
# Task is still stored in active workers.
assert len(mock_executor.running_tasks) == 1
assert mock_executor.sync.call_count == 2
mock_executor.log.warning.assert_called_once_with(
"Timed out waiting for tasks to finish. Some tasks may not be handled gracefully"
" as the executor is force ending due to timeout."
)
def test_terminate(self, mock_executor, mock_airflow_key):
"""Test that executor can terminate successfully."""
airflow_key = mock_airflow_key()
ser_airflow_key = json.dumps(airflow_key._asdict())
mock_executor.running_tasks.clear()
mock_executor.running_tasks[ser_airflow_key] = airflow_key
mock_executor.log.warning = mock.Mock()
mock_executor.terminate()
mock_executor.log.warning.assert_called_once_with(
"Terminating Lambda executor. In-flight tasks cannot be stopped."
)
assert len(mock_executor.running_tasks) == 1
@pytest.mark.skipif(not AIRFLOW_V_3_1_PLUS, reason="Multi-team support requires Airflow 3.1+")
def test_team_config(self):
"""Test that the executor uses team-specific configuration when provided via self.conf."""
from unittest.mock import patch
# Team name to be used throughout
team_name = "team_a"
# Patch environment to include two sets of configs for the Lambda executor. One that is related to a
# team and one that is not. Then we will create two executors (one with a team and one without) and
# ensure the correct configs are used.
config_overrides = [
(f"AIRFLOW__{CONFIG_GROUP_NAME}__{AllLambdaConfigKeys.FUNCTION_NAME}", "global-function"),
(f"AIRFLOW__{CONFIG_GROUP_NAME}__{AllLambdaConfigKeys.QUEUE_URL}", "global-queue-url"),
(f"AIRFLOW__{CONFIG_GROUP_NAME}__{AllLambdaConfigKeys.DLQ_URL}", "global-dlq-url"),
(f"AIRFLOW__{CONFIG_GROUP_NAME}__{AllLambdaConfigKeys.QUALIFIER}", "global-qualifier"),
(f"AIRFLOW__{CONFIG_GROUP_NAME}__{AllLambdaConfigKeys.REGION_NAME}", "us-west-1"),
(f"AIRFLOW__{CONFIG_GROUP_NAME}__{AllLambdaConfigKeys.AWS_CONN_ID}", "aws_default"),
(f"AIRFLOW__{CONFIG_GROUP_NAME}__{AllLambdaConfigKeys.MAX_INVOKE_ATTEMPTS}", "3"),
(
f"AIRFLOW__{CONFIG_GROUP_NAME}__{AllLambdaConfigKeys.CHECK_HEALTH_ON_STARTUP}",
"False",
),
# Team Config
(
f"AIRFLOW__{team_name}___{CONFIG_GROUP_NAME}__{AllLambdaConfigKeys.FUNCTION_NAME}",
"team_a_function",
),
(
f"AIRFLOW__{team_name}___{CONFIG_GROUP_NAME}__{AllLambdaConfigKeys.QUEUE_URL}",
"team_a_queue_url",
),
(
f"AIRFLOW__{team_name}___{CONFIG_GROUP_NAME}__{AllLambdaConfigKeys.DLQ_URL}",
"team_a_dlq_url",
),
(
f"AIRFLOW__{team_name}___{CONFIG_GROUP_NAME}__{AllLambdaConfigKeys.QUALIFIER}",
"team_a_qualifier",
),
(
f"AIRFLOW__{team_name}___{CONFIG_GROUP_NAME}__{AllLambdaConfigKeys.REGION_NAME}",
"us-west-2",
),
(
f"AIRFLOW__{team_name}___{CONFIG_GROUP_NAME}__{AllLambdaConfigKeys.MAX_INVOKE_ATTEMPTS}",
"5",
),
(
f"AIRFLOW__{team_name}___{CONFIG_GROUP_NAME}__{AllLambdaConfigKeys.CHECK_HEALTH_ON_STARTUP}",
"True",
),
]
with patch("os.environ", {key.upper(): value for key, value in config_overrides}):
# Create a team-specific executor
team_executor = AwsLambdaExecutor(team_name=team_name)
assert team_executor.lambda_function_name == "team_a_function"
assert team_executor.sqs_queue_url == "team_a_queue_url"
assert team_executor.dlq_url == "team_a_dlq_url"
assert team_executor.qualifier == "team_a_qualifier"
assert team_executor.conf.get(CONFIG_GROUP_NAME, AllLambdaConfigKeys.MAX_INVOKE_ATTEMPTS) == "5"
assert team_executor.conf.get(CONFIG_GROUP_NAME, AllLambdaConfigKeys.REGION_NAME) == "us-west-2"
assert team_executor.conf.get(CONFIG_GROUP_NAME, AllLambdaConfigKeys.AWS_CONN_ID) == "aws_default"
assert (
team_executor.conf.get(CONFIG_GROUP_NAME, AllLambdaConfigKeys.CHECK_HEALTH_ON_STARTUP)
== "True"
)
# Now create an executor without a team and ensure the global configs are used
global_executor = AwsLambdaExecutor()
assert global_executor.lambda_function_name == "global-function"
assert global_executor.sqs_queue_url == "global-queue-url"
assert global_executor.dlq_url == "global-dlq-url"
assert global_executor.qualifier == "global-qualifier"
assert global_executor.conf.get(CONFIG_GROUP_NAME, AllLambdaConfigKeys.MAX_INVOKE_ATTEMPTS) == "3"
assert global_executor.conf.get(CONFIG_GROUP_NAME, AllLambdaConfigKeys.REGION_NAME) == "us-west-1"
assert (
global_executor.conf.get(CONFIG_GROUP_NAME, AllLambdaConfigKeys.AWS_CONN_ID) == "aws_default"
)
assert (
global_executor.conf.get(CONFIG_GROUP_NAME, AllLambdaConfigKeys.CHECK_HEALTH_ON_STARTUP)
== "False"
)
| {
"repo_id": "apache/airflow",
"file_path": "providers/amazon/tests/unit/amazon/aws/executors/aws_lambda/test_lambda_executor.py",
"license": "Apache License 2.0",
"lines": 952,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-ctl/docs/conf.py | # Disable Flake8 because of all the sphinx imports
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Configuration of Providers docs building."""
from __future__ import annotations
import logging
import os
import pathlib
import re
from typing import Any
from docs.utils.conf_constants import (
AIRFLOW_CTL_DOC_STATIC_PATH,
AIRFLOW_CTL_SRC_PATH,
AIRFLOW_FAVICON_PATH,
AUTOAPI_OPTIONS,
BASIC_AUTOAPI_IGNORE_PATTERNS,
BASIC_SPHINX_EXTENSIONS,
REDOC_SCRIPT_URL,
SMARTQUOTES_EXCLUDES,
SPELLING_WORDLIST_PATH,
SPHINX_DESIGN_STATIC_PATH,
SPHINX_REDOC_EXTENSIONS,
SUPPRESS_WARNINGS,
filter_autoapi_ignore_entries,
get_autodoc_mock_imports,
get_configs_and_deprecations,
get_google_intersphinx_mapping,
get_html_context,
get_html_sidebars,
get_html_theme_options,
get_intersphinx_mapping,
get_rst_epilogue,
get_rst_filepath_from_path,
skip_util_classes_extension,
)
from packaging.version import Version, parse as parse_version
import airflowctl
from airflow.configuration import retrieve_configuration_description
PACKAGE_NAME = "apache-airflow-ctl"
PACKAGE_VERSION = airflowctl.__version__
SYSTEM_TESTS_DIR: pathlib.Path | None
# SYSTEM_TESTS_DIR = AIRFLOW_REPO_ROOT_PATH / "airflow-ctl" / "tests" / "system" / "core"
os.environ["AIRFLOW_PACKAGE_NAME"] = PACKAGE_NAME
# Disable color output for documentation generation
os.environ["NO_COLOR"] = "1"
# Hack to allow changing for piece of the code to behave differently while
# the docs are being built. The main objective was to alter the
# behavior of the utils.apply_default that was hiding function headers
os.environ["BUILDING_AIRFLOW_DOCS"] = "TRUE"
# General information about the project.
project = PACKAGE_NAME
# # The version info for the project you're documenting
version = PACKAGE_VERSION
# The full version, including alpha/beta/rc tags.
release = PACKAGE_VERSION
rst_epilog = get_rst_epilogue(PACKAGE_VERSION, False)
# The language for content autogenerated by Sphinx. Refer to documentation
smartquotes_excludes = SMARTQUOTES_EXCLUDES
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = BASIC_SPHINX_EXTENSIONS
# -- Options for sphinxcontrib.redoc -------------------------------------------
# See: https://sphinxcontrib-redoc.readthedocs.io/en/stable/
extensions.extend(SPHINX_REDOC_EXTENSIONS)
redoc_script_url = REDOC_SCRIPT_URL
extensions.extend(
[
"autoapi.extension",
"sphinx_jinja",
"sphinx.ext.graphviz",
"sphinxcontrib.httpdomain",
"extra_files_with_substitutions",
]
)
exclude_patterns = [
# We only link to selected subpackages.
"_api/airflowctl/index.rst",
"_api/airflowctl/api/*",
"_api/airflowctl/api/datamodels/*",
"_api/airflowctl/ctl/*",
"_api/airflowctl/exceptions/index.rst",
"_api/airflowctl/utils/*",
"README.rst",
]
# Exclude top-level packages
# do not exclude these top-level modules from the doc build:
ALLOWED_TOP_LEVEL_FILES = ("exceptions.py",)
def add_airflow_ctl_exclude_patterns_to_sphinx(exclude_patterns: list[str]):
"""
Add excluded files to Sphinx exclude patterns.
Excludes all files from autoapi except the ones we want to allow.
:param root: The root directory of the package.
:param allowed_top_level_files: Tuple of allowed top-level files.
:param browsable_packages: Set of browsable packages.
:param browsable_utils: Set of browsable utils.
:param models_included: Set of included models.
"""
# first - excluded everything that is not allowed or browsable
root = AIRFLOW_CTL_SRC_PATH / "airflowctl"
for path in root.iterdir():
if path.is_file() and path.name not in ALLOWED_TOP_LEVEL_FILES:
exclude_patterns.append(get_rst_filepath_from_path(path, root.parent))
print(f"Excluding {path} from Sphinx docs")
add_airflow_ctl_exclude_patterns_to_sphinx(exclude_patterns)
# Add any paths that contain templates here, relative to this directory.
templates_path = ["templates"]
# If true, keep warnings as "system message" paragraphs in the built documents.
keep_warnings = True
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = "sphinx_airflow_theme"
html_title = "airflowctl Documentation"
# A shorter title for the navigation bar. Default is the same as html_title.
html_short_title = ""
# given, this must be the name of an image file that is the favicon of the docs
html_favicon = AIRFLOW_FAVICON_PATH.as_posix()
# Custom static files (such as style sheets) here,
html_static_path = [AIRFLOW_CTL_DOC_STATIC_PATH.as_posix(), SPHINX_DESIGN_STATIC_PATH.as_posix()]
# A list of JavaScript filenames.
html_js_files = ["gh-jira-links.js", "redirects.js"]
# Substitute in links
manual_substitutions_in_generated_html = [
"installation/installing-from-pypi.html",
"installation/installing-from-sources.html",
"installation/prerequisites.html",
]
html_css_files = ["custom.css"]
html_sidebars = get_html_sidebars(PACKAGE_VERSION)
# If false, no index is generated.
html_use_index = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
html_show_copyright = False
# html theme options
html_theme_options: dict[str, Any] = get_html_theme_options()
conf_py_path = "/airflow-ctl/docs/"
# A dictionary of values to pass into the template engine's context for all pages.
html_context = get_html_context(conf_py_path)
# -- Options for sphinx_jinja ------------------------------------------
# See: https://github.com/tardyp/sphinx-jinja
airflowctl_version: Version = parse_version(
re.search( # type: ignore[union-attr,arg-type]
r"__version__ = \"([0-9.]*)(\.dev[0-9]*|b[0-9])?\"",
(AIRFLOW_CTL_SRC_PATH / "airflowctl" / "__init__.py").read_text(),
).groups(0)[0]
)
config_descriptions = retrieve_configuration_description(include_providers=False)
configs, deprecated_options = get_configs_and_deprecations(airflowctl_version, config_descriptions)
jinja_contexts = {
"config_ctx": {"configs": configs, "deprecated_options": deprecated_options},
"quick_start_ctx": {"doc_root_url": f"https://airflow.apache.org/docs/apache-airflow/{PACKAGE_VERSION}/"},
"official_download_page": {
"base_url": f"https://downloads.apache.org/airflow/airflow-ctl/{PACKAGE_VERSION}",
"closer_lua_url": f"https://www.apache.org/dyn/closer.lua/airflow/airflow-ctl/{PACKAGE_VERSION}",
"airflowctl_version": PACKAGE_VERSION,
},
}
# Use for generate rst_epilog and other post-generation substitutions
global_substitutions = {
"version": PACKAGE_VERSION,
"experimental": "This is an :ref:`experimental feature <experimental>`.",
}
# -- Options for sphinx.ext.autodoc --------------------------------------------
# See: https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html
# This value contains a list of modules to be mocked up. This is useful when some external dependencies
# are not met at build time and break the building process.
autodoc_mock_imports = get_autodoc_mock_imports()
# The default options for autodoc directives. They are applied to all autodoc directives automatically.
autodoc_default_options = {"show-inheritance": True, "members": True}
autodoc_typehints = "description"
autodoc_typehints_description_target = "documented"
autodoc_typehints_format = "short"
# -- Options for sphinx.ext.intersphinx ----------------------------------------
# See: https://www.sphinx-doc.org/en/master/usage/extensions/intersphinx.html
# This config value contains names of other projects that should
# be linked to in this documentation.
# Inventories are only downloaded once by exts/docs_build/fetch_inventories.py.
intersphinx_mapping = get_intersphinx_mapping()
intersphinx_mapping.update(get_google_intersphinx_mapping())
# -- Options for sphinx.ext.viewcode -------------------------------------------
# See: https://www.sphinx-doc.org/es/master/usage/extensions/viewcode.html
# If this is True, viewcode extension will emit viewcode-follow-imported event to resolve the name of
# the module by other extensions. The default is True.
viewcode_follow_imported_members = True
# -- Options for sphinx-autoapi ------------------------------------------------
# See: https://sphinx-autoapi.readthedocs.io/en/latest/config.html
# your API documentation from.
autoapi_dirs = [AIRFLOW_CTL_SRC_PATH.as_posix()]
# A directory that has user-defined templates to override our default templates.
autoapi_template_dir = "autoapi_templates"
# A list of patterns to ignore when finding files
autoapi_ignore = BASIC_AUTOAPI_IGNORE_PATTERNS
# filter logging
autoapi_log = logging.getLogger("sphinx.autoapi.mappers.base")
autoapi_log.addFilter(filter_autoapi_ignore_entries)
# Keep the AutoAPI generated files on the filesystem after the run.
# Useful for debugging.
autoapi_keep_files = True
# Relative path to output the AutoAPI files into. This can also be used to place the generated documentation
# anywhere in your documentation hierarchy.
autoapi_root = "_api"
# Whether to insert the generated documentation into the TOC tree. If this is False, the default AutoAPI
# index page is not generated and you will need to include the generated documentation in a
# TOC tree entry yourself.
autoapi_add_toctree_entry = False
# By default autoapi will include private members -- we don't want that!
autoapi_options = AUTOAPI_OPTIONS
suppress_warnings = SUPPRESS_WARNINGS
# -- Options for ext.exampleinclude --------------------------------------------
exampleinclude_sourceroot = os.path.abspath("..")
# -- Options for ext.redirects -------------------------------------------------
redirects_file = "redirects.txt"
# -- Options for sphinxcontrib-spelling ----------------------------------------
spelling_word_list_filename = [SPELLING_WORDLIST_PATH.as_posix()]
spelling_exclude_patterns = ["project.rst", "changelog.rst"]
spelling_ignore_contributor_names = False
spelling_ignore_importable_modules = True
graphviz_output_format = "svg"
def setup(sphinx):
sphinx.connect("autoapi-skip-member", skip_util_classes_extension)
| {
"repo_id": "apache/airflow",
"file_path": "airflow-ctl/docs/conf.py",
"license": "Apache License 2.0",
"lines": 235,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/databricks/src/airflow/providers/databricks/sensors/databricks.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from __future__ import annotations
from collections.abc import Sequence
from functools import cached_property
from typing import TYPE_CHECKING, Any
from airflow.providers.common.compat.sdk import AirflowException, BaseSensorOperator, conf
from airflow.providers.databricks.hooks.databricks import DatabricksHook, SQLStatementState
from airflow.providers.databricks.operators.databricks import DEFER_METHOD_NAME
from airflow.providers.databricks.utils.mixins import DatabricksSQLStatementsMixin
if TYPE_CHECKING:
from airflow.providers.common.compat.sdk import Context
XCOM_STATEMENT_ID_KEY = "statement_id"
class DatabricksSQLStatementsSensor(DatabricksSQLStatementsMixin, BaseSensorOperator):
"""DatabricksSQLStatementsSensor."""
template_fields: Sequence[str] = (
"databricks_conn_id",
"statement",
"statement_id",
)
template_ext: Sequence[str] = (".json-tpl",)
ui_color = "#1CB1C2"
ui_fgcolor = "#fff"
def __init__(
self,
warehouse_id: str,
*,
statement: str | None = None,
statement_id: str | None = None,
catalog: str | None = None,
schema: str | None = None,
parameters: list[dict[str, Any]] | None = None,
databricks_conn_id: str = "databricks_default",
polling_period_seconds: int = 30,
databricks_retry_limit: int = 3,
databricks_retry_delay: int = 1,
databricks_retry_args: dict[Any, Any] | None = None,
do_xcom_push: bool = True,
wait_for_termination: bool = True,
timeout: float = 3600,
deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
**kwargs,
):
# Handle the scenario where either both statement and statement_id are set/not set
if statement and statement_id:
raise AirflowException("Cannot provide both statement and statement_id.")
if not statement and not statement_id:
raise AirflowException("One of either statement or statement_id must be provided.")
if not warehouse_id:
raise AirflowException("warehouse_id must be provided.")
super().__init__(**kwargs)
self.statement = statement
self.statement_id = statement_id
self.warehouse_id = warehouse_id
self.catalog = catalog
self.schema = schema
self.parameters = parameters
self.databricks_conn_id = databricks_conn_id
self.polling_period_seconds = polling_period_seconds
self.databricks_retry_limit = databricks_retry_limit
self.databricks_retry_delay = databricks_retry_delay
self.databricks_retry_args = databricks_retry_args
self.wait_for_termination = wait_for_termination
self.deferrable = deferrable
self.timeout = timeout
self.do_xcom_push = do_xcom_push
@cached_property
def _hook(self):
return self._get_hook(caller="DatabricksSQLStatementsSensor")
def _get_hook(self, caller: str) -> DatabricksHook:
return DatabricksHook(
self.databricks_conn_id,
retry_limit=self.databricks_retry_limit,
retry_delay=self.databricks_retry_delay,
retry_args=self.databricks_retry_args,
caller=caller,
)
def execute(self, context: Context):
if not self.statement_id:
# Otherwise, we'll go ahead and "submit" the statement
json = {
"statement": self.statement,
"warehouse_id": self.warehouse_id,
"catalog": self.catalog,
"schema": self.schema,
"parameters": self.parameters,
"wait_timeout": "0s",
}
self.statement_id = self._hook.post_sql_statement(json)
self.log.info("SQL Statement submitted with statement_id: %s", self.statement_id)
if self.do_xcom_push and context is not None:
context["ti"].xcom_push(key=XCOM_STATEMENT_ID_KEY, value=self.statement_id)
# If we're not waiting for the query to complete execution, then we'll go ahead and return. However, a
# recommendation to use the DatabricksSQLStatementOperator is made in this case
if not self.wait_for_termination:
self.log.info(
"If setting wait_for_termination = False, consider using the DatabricksSQLStatementsOperator instead."
)
return
if self.deferrable:
self._handle_deferrable_execution(defer_method_name=DEFER_METHOD_NAME) # type: ignore[misc]
def poke(self, context: Context):
"""
Handle non-deferrable Sensor execution.
:param context: (Context)
:return: (bool)
"""
# This is going to very closely mirror the execute_complete
statement_state: SQLStatementState = self._hook.get_sql_statement_state(self.statement_id)
if statement_state.is_running:
self.log.info("SQL Statement with ID %s is running", self.statement_id)
return False
if statement_state.is_successful:
self.log.info("SQL Statement with ID %s completed successfully.", self.statement_id)
return True
raise AirflowException(
f"SQL Statement with ID {statement_state} failed with error: {statement_state.error_message}"
)
| {
"repo_id": "apache/airflow",
"file_path": "providers/databricks/src/airflow/providers/databricks/sensors/databricks.py",
"license": "Apache License 2.0",
"lines": 134,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/databricks/src/airflow/providers/databricks/utils/mixins.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from __future__ import annotations
import time
from logging import Logger
from typing import TYPE_CHECKING, Any, Protocol
from airflow.providers.common.compat.sdk import AirflowException
from airflow.providers.databricks.hooks.databricks import DatabricksHook, SQLStatementState
from airflow.providers.databricks.triggers.databricks import DatabricksSQLStatementExecutionTrigger
if TYPE_CHECKING:
from airflow.sdk import Context
class GetHookHasFields(Protocol):
"""Protocol for get_hook method."""
databricks_conn_id: str
databricks_retry_args: dict | None
databricks_retry_delay: int
databricks_retry_limit: int
class HandleExecutionHasFields(Protocol):
"""Protocol for _handle_execution method."""
_hook: DatabricksHook
log: Logger
polling_period_seconds: int
task_id: str
timeout: int
statement_id: str
class HandleDeferrableExecutionHasFields(Protocol):
"""Protocol for _handle_deferrable_execution method."""
_hook: DatabricksHook
databricks_conn_id: str
databricks_retry_args: dict[Any, Any] | None
databricks_retry_delay: int
databricks_retry_limit: int
defer: Any
log: Logger
polling_period_seconds: int
statement_id: str
task_id: str
timeout: int
class ExecuteCompleteHasFields(Protocol):
"""Protocol for execute_complete method."""
statement_id: str
_hook: DatabricksHook
log: Logger
class OnKillHasFields(Protocol):
"""Protocol for on_kill method."""
_hook: DatabricksHook
log: Logger
statement_id: str
task_id: str
class DatabricksSQLStatementsMixin:
"""
Mixin class to be used by both the DatabricksSqlStatementsOperator, and the DatabricksSqlStatementSensor.
- _handle_operator_execution (renamed to _handle_execution)
- _handle_deferrable_operator_execution (renamed to _handle_deferrable_execution)
- execute_complete
- on_kill
"""
def _handle_execution(self: HandleExecutionHasFields) -> None:
"""Execute a SQL statement in non-deferrable mode."""
# Determine the time at which the Task will timeout. The statement_state is defined here in the event
# the while-loop is never entered
end_time = time.time() + self.timeout
while end_time > time.time():
statement_state: SQLStatementState = self._hook.get_sql_statement_state(self.statement_id)
if statement_state.is_terminal:
if statement_state.is_successful:
self.log.info("%s completed successfully.", self.task_id)
return
error_message = (
f"{self.task_id} failed with terminal state: {statement_state.state} "
f"and with the error code {statement_state.error_code} "
f"and error message {statement_state.error_message}"
)
raise AirflowException(error_message)
self.log.info("%s in run state: %s", self.task_id, statement_state.state)
self.log.info("Sleeping for %s seconds.", self.polling_period_seconds)
time.sleep(self.polling_period_seconds)
# Once the timeout is exceeded, the query is cancelled. This is an important steps; if a query takes
# to log, it needs to be killed. Otherwise, it may be the case that there are "zombie" queries running
# that are no longer being orchestrated
self._hook.cancel_sql_statement(self.statement_id)
raise AirflowException(
f"{self.task_id} timed out after {self.timeout} seconds with state: {statement_state}",
)
def _handle_deferrable_execution(
self: HandleDeferrableExecutionHasFields, defer_method_name: str = "execute_complete"
) -> None:
"""Execute a SQL statement in deferrable mode."""
statement_state: SQLStatementState = self._hook.get_sql_statement_state(self.statement_id)
end_time: float = time.time() + self.timeout
if not statement_state.is_terminal:
# If the query is still running and there is no statement_id, this is somewhat of a "zombie"
# query, and should throw an exception
if not self.statement_id:
raise AirflowException("Failed to retrieve statement_id after submitting SQL statement.")
self.defer(
trigger=DatabricksSQLStatementExecutionTrigger(
statement_id=self.statement_id,
databricks_conn_id=self.databricks_conn_id,
end_time=end_time,
polling_period_seconds=self.polling_period_seconds,
retry_limit=self.databricks_retry_limit,
retry_delay=self.databricks_retry_delay,
retry_args=self.databricks_retry_args,
),
method_name=defer_method_name,
)
else:
if statement_state.is_successful:
self.log.info("%s completed successfully.", self.task_id)
else:
error_message = (
f"{self.task_id} failed with terminal state: {statement_state.state} "
f"and with the error code {statement_state.error_code} "
f"and error message {statement_state.error_message}"
)
raise AirflowException(error_message)
def execute_complete(self: ExecuteCompleteHasFields, context: Context, event: dict):
statement_state = SQLStatementState.from_json(event["state"])
error = event["error"]
# Save as instance attribute again after coming back from defer (e.g., for later use in listeners)
self.statement_id = event["statement_id"]
if statement_state.is_successful:
self.log.info("SQL Statement with ID %s completed successfully.", self.statement_id)
return
error_message = f"SQL Statement execution failed with terminal state: {statement_state} and with the error {error}"
raise AirflowException(error_message)
def on_kill(self: OnKillHasFields) -> None:
if self.statement_id:
self._hook.cancel_sql_statement(self.statement_id)
self.log.info(
"Task: %s with statement ID: %s was requested to be cancelled.",
self.task_id,
self.statement_id,
)
else:
self.log.error(
"Error: Task: %s with invalid statement_id was requested to be cancelled.", self.task_id
)
| {
"repo_id": "apache/airflow",
"file_path": "providers/databricks/src/airflow/providers/databricks/utils/mixins.py",
"license": "Apache License 2.0",
"lines": 155,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/databricks/tests/unit/databricks/sensors/test_databricks.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from unittest import mock
import pytest
from airflow.providers.common.compat.sdk import AirflowException, TaskDeferred
from airflow.providers.databricks.hooks.databricks import SQLStatementState
from airflow.providers.databricks.sensors.databricks import DatabricksSQLStatementsSensor
from airflow.providers.databricks.triggers.databricks import DatabricksSQLStatementExecutionTrigger
DEFAULT_CONN_ID = "databricks_default"
STATEMENT = "select * from test.test;"
STATEMENT_ID = "statement_id"
TASK_ID = "task_id"
WAREHOUSE_ID = "warehouse_id"
class TestDatabricksSQLStatementsSensor:
"""
Validate and test the functionality of the DatabricksSQLStatementsSensor. This Sensor borrows heavily
from the DatabricksSQLStatementOperator, meaning that much of the testing logic is also reused.
"""
def test_init_statement(self):
"""Test initialization for traditional use-case (statement)."""
op = DatabricksSQLStatementsSensor(task_id=TASK_ID, statement=STATEMENT, warehouse_id=WAREHOUSE_ID)
assert op.statement == STATEMENT
assert op.warehouse_id == WAREHOUSE_ID
def test_init_statement_id(self):
"""Test initialization when a statement_id is passed, rather than a statement."""
op = DatabricksSQLStatementsSensor(
task_id=TASK_ID, statement_id=STATEMENT_ID, warehouse_id=WAREHOUSE_ID
)
assert op.statement_id == STATEMENT_ID
assert op.warehouse_id == WAREHOUSE_ID
@mock.patch("airflow.providers.databricks.sensors.databricks.DatabricksHook")
def test_exec_success(self, db_mock_class):
"""
Test the execute function for non-deferrable execution. This same exact behavior is expected when the
statement itself fails, so no test_exec_failure_statement is implemented.
"""
expected_json = {
"statement": STATEMENT,
"warehouse_id": WAREHOUSE_ID,
"catalog": None,
"schema": None,
"parameters": None,
"wait_timeout": "0s",
}
op = DatabricksSQLStatementsSensor(task_id=TASK_ID, statement=STATEMENT, warehouse_id=WAREHOUSE_ID)
db_mock = db_mock_class.return_value
db_mock.post_sql_statement.return_value = STATEMENT_ID
op.execute(None) # No context is being passed in
db_mock_class.assert_called_once_with(
DEFAULT_CONN_ID,
retry_limit=op.databricks_retry_limit,
retry_delay=op.databricks_retry_delay,
retry_args=None,
caller="DatabricksSQLStatementsSensor",
)
# Since a statement is being passed in rather than a statement_id, we're asserting that the
# post_sql_statement method is called once
db_mock.post_sql_statement.assert_called_once_with(expected_json)
assert op.statement_id == STATEMENT_ID
@mock.patch("airflow.providers.databricks.sensors.databricks.DatabricksHook")
def test_on_kill(self, db_mock_class):
"""
Test the on_kill method. This is actually part of the DatabricksSQLStatementMixin, so the
test logic will match that with the same name for DatabricksSQLStatementOperator.
"""
# Behavior here will remain the same whether a statement or statement_id is passed
op = DatabricksSQLStatementsSensor(task_id=TASK_ID, statement=STATEMENT, warehouse_id=WAREHOUSE_ID)
db_mock = db_mock_class.return_value
op.statement_id = STATEMENT_ID
# When on_kill is executed, it should call the cancel_sql_statement method
op.on_kill()
db_mock.cancel_sql_statement.assert_called_once_with(STATEMENT_ID)
def test_wait_for_termination_is_default(self):
"""Validate that the default value for wait_for_termination is True."""
op = DatabricksSQLStatementsSensor(
task_id=TASK_ID, statement="select * from test.test;", warehouse_id=WAREHOUSE_ID
)
assert op.wait_for_termination
@pytest.mark.parametrize(
argnames=("statement_state", "expected_poke_result"),
argvalues=[
("RUNNING", False),
("SUCCEEDED", True),
],
)
@mock.patch("airflow.providers.databricks.sensors.databricks.DatabricksHook")
def test_poke(self, db_mock_class, statement_state, expected_poke_result):
op = DatabricksSQLStatementsSensor(
task_id=TASK_ID,
statement=STATEMENT,
warehouse_id=WAREHOUSE_ID,
)
db_mock = db_mock_class.return_value
db_mock.get_sql_statement_state.return_value = SQLStatementState(statement_state)
poke_result = op.poke(None)
assert poke_result == expected_poke_result
@mock.patch("airflow.providers.databricks.sensors.databricks.DatabricksHook")
def test_poke_failure(self, db_mock_class):
op = DatabricksSQLStatementsSensor(
task_id=TASK_ID,
statement=STATEMENT,
warehouse_id=WAREHOUSE_ID,
)
db_mock = db_mock_class.return_value
db_mock.get_sql_statement_state.return_value = SQLStatementState("FAILED")
with pytest.raises(AirflowException):
op.poke(None)
@mock.patch("airflow.providers.databricks.sensors.databricks.DatabricksHook")
def test_execute_task_deferred(self, db_mock_class):
"""
Test that the statement is successfully deferred. This behavior will remain the same whether a
statement or a statement_id is passed.
"""
op = DatabricksSQLStatementsSensor(
task_id=TASK_ID,
statement=STATEMENT,
warehouse_id=WAREHOUSE_ID,
deferrable=True,
)
db_mock = db_mock_class.return_value
db_mock.get_sql_statement_state.return_value = SQLStatementState("RUNNING")
with pytest.raises(TaskDeferred) as exc:
op.execute(None)
assert isinstance(exc.value.trigger, DatabricksSQLStatementExecutionTrigger)
assert exc.value.method_name == "execute_complete"
def test_execute_complete_success(self):
"""
Test the execute_complete function in case the Trigger has returned a successful completion event.
This method is part of the DatabricksSQLStatementsMixin. Note that this is only being tested when
in deferrable mode.
"""
event = {
"statement_id": STATEMENT_ID,
"state": SQLStatementState("SUCCEEDED").to_json(),
"error": {},
}
op = DatabricksSQLStatementsSensor(
task_id=TASK_ID,
statement=STATEMENT,
warehouse_id=WAREHOUSE_ID,
deferrable=True,
)
assert op.execute_complete(context=None, event=event) is None
@mock.patch("airflow.providers.databricks.sensors.databricks.DatabricksHook")
def test_execute_complete_failure(self, db_mock_class):
"""Test execute_complete function in case the Trigger has returned a failure completion event."""
event = {
"statement_id": STATEMENT_ID,
"state": SQLStatementState("FAILED").to_json(),
"error": SQLStatementState(
state="FAILED", error_code="500", error_message="Something Went Wrong"
).to_json(),
}
op = DatabricksSQLStatementsSensor(
task_id=TASK_ID,
statement=STATEMENT,
warehouse_id=WAREHOUSE_ID,
deferrable=True,
)
with pytest.raises(AirflowException, match="^SQL Statement execution failed with terminal state: .*"):
op.execute_complete(context=None, event=event)
| {
"repo_id": "apache/airflow",
"file_path": "providers/databricks/tests/unit/databricks/sensors/test_databricks.py",
"license": "Apache License 2.0",
"lines": 177,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/databricks/tests/unit/databricks/utils/test_mixins.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from unittest.mock import MagicMock
import pytest
from airflow.providers.common.compat.sdk import AirflowException
from airflow.providers.databricks.utils.mixins import DatabricksSQLStatementsMixin
class DatabricksSQLStatements(DatabricksSQLStatementsMixin):
def __init__(self):
self.databricks_conn_id = "databricks_conn_id"
self.databricks_retry_limit = 3
self.databricks_retry_delay = 60
self.databricks_retry_args = None
self.polling_period_seconds = 10
self.statement_id = "statement_id"
self.task_id = "task_id"
self.timeout = 60
# Utilities
self._hook = MagicMock()
self.defer = MagicMock()
self.log = MagicMock()
@pytest.fixture
def databricks_sql_statements():
return DatabricksSQLStatements()
@pytest.fixture
def terminal_success_state():
terminal_success_state = MagicMock()
terminal_success_state.is_terminal = True
terminal_success_state.is_successful = True
return terminal_success_state
@pytest.fixture
def terminal_failure_state():
terminal_fail_state = MagicMock()
terminal_fail_state.is_terminal = True
terminal_fail_state.is_successful = False
terminal_fail_state.state = "FAILED"
terminal_fail_state.error_code = "123"
terminal_fail_state.error_message = "Query failed"
return terminal_fail_state
class TestDatabricksSQLStatementsMixin:
"""
We'll provide tests for each of the following methods:
- _handle_execution
- _handle_deferrable_execution
- execute_complete
- on_kill
"""
def test_handle_execution_success(self, databricks_sql_statements, terminal_success_state):
# Test an immediate success of the SQL statement
databricks_sql_statements._hook.get_sql_statement_state.return_value = terminal_success_state
databricks_sql_statements._handle_execution()
databricks_sql_statements._hook.cancel_sql_statement.assert_not_called()
def test_handle_execution_failure(self, databricks_sql_statements, terminal_failure_state):
# Test an immediate failure of the SQL statement
databricks_sql_statements._hook.get_sql_statement_state.return_value = terminal_failure_state
with pytest.raises(AirflowException):
databricks_sql_statements._handle_execution()
databricks_sql_statements._hook.cancel_sql_statement.assert_not_called()
def test_handle_deferrable_execution_running(self, databricks_sql_statements):
terminal_running_state = MagicMock()
terminal_running_state.is_terminal = False
# Test an immediate success of the SQL statement
databricks_sql_statements._hook.get_sql_statement_state.return_value = terminal_running_state
databricks_sql_statements._handle_deferrable_execution()
databricks_sql_statements.defer.assert_called_once()
def test_handle_deferrable_execution_success(self, databricks_sql_statements, terminal_success_state):
# Test an immediate success of the SQL statement
databricks_sql_statements._hook.get_sql_statement_state.return_value = terminal_success_state
databricks_sql_statements._handle_deferrable_execution()
databricks_sql_statements.defer.assert_not_called()
def test_handle_deferrable_execution_failure(self, databricks_sql_statements, terminal_failure_state):
# Test an immediate failure of the SQL statement
databricks_sql_statements._hook.get_sql_statement_state.return_value = terminal_failure_state
with pytest.raises(AirflowException):
databricks_sql_statements._handle_deferrable_execution()
def test_execute_complete(self):
# Both the TestDatabricksSQLStatementsOperator and TestDatabricksSQLStatementsSensor tests implement
# a test_execute_complete_failure and test_execute_complete_success method, so we'll pass here
pass
def test_on_kill(self):
# This test is implemented in both the TestDatabricksSQLStatementsOperator and
# TestDatabricksSQLStatementsSensor tests, so it will not be implemented here
pass
| {
"repo_id": "apache/airflow",
"file_path": "providers/databricks/tests/unit/databricks/utils/test_mixins.py",
"license": "Apache License 2.0",
"lines": 98,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-core/src/airflow/api_fastapi/execution_api/versions/v2025_05_20.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from cadwyn import ResponseInfo, VersionChange, convert_response_to_previous_version_for, schema
from airflow.api_fastapi.execution_api.datamodels.taskinstance import TIRunContext
class DowngradeUpstreamMapIndexes(VersionChange):
"""Downgrade the upstream map indexes type for older clients."""
description = __doc__
instructions_to_migrate_to_previous_version = (
schema(TIRunContext).field("upstream_map_indexes").had(type=dict[str, int | None] | None),
)
@convert_response_to_previous_version_for(TIRunContext) # type: ignore[arg-type]
def downgrade_upstream_map_indexes(response: ResponseInfo = None) -> None: # type: ignore
"""
Downgrades the `upstream_map_indexes` field when converting to the previous version.
Ensures that the field is only a dictionary of [str, int] (old format).
"""
resp = response.body.get("upstream_map_indexes")
if isinstance(resp, dict):
downgraded: dict[str, int | list | None] = {}
for k, v in resp.items():
if isinstance(v, int):
downgraded[k] = v
elif isinstance(v, list) and v and all(isinstance(i, int) for i in v):
downgraded[k] = v[0]
else:
# Keep values like None as is — the Task SDK expects them unchanged during mapped task expansion,
# and modifying them can cause unexpected failures.
downgraded[k] = None
response.body["upstream_map_indexes"] = downgraded
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/api_fastapi/execution_api/versions/v2025_05_20.py",
"license": "Apache License 2.0",
"lines": 44,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:airflow-core/tests/unit/api_fastapi/execution_api/versions/v2025_04_28/test_task_instances.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import pytest
from airflow._shared.timezones import timezone
from airflow.api_fastapi.common.dagbag import dag_bag_from_app
from airflow.models.dagbag import DBDagBag
from airflow.utils.state import State
from tests_common.test_utils.db import clear_db_assets, clear_db_runs
pytestmark = pytest.mark.db_test
DEFAULT_START_DATE = timezone.parse("2024-10-31T11:00:00Z")
DEFAULT_END_DATE = timezone.parse("2024-10-31T12:00:00Z")
@pytest.fixture
def ver_client(client):
client.headers["Airflow-API-Version"] = "2025-04-28"
return client
class TestTIUpdateState:
def setup_method(self):
clear_db_assets()
clear_db_runs()
def teardown_method(self):
clear_db_assets()
clear_db_runs()
def test_ti_run(
self,
ver_client,
session,
create_task_instance,
time_machine,
get_execution_app,
):
"""
Test that this version of the endpoint works.
upstream_map_indexes is now always None as it's computed by the Task SDK.
"""
instant_str = "2024-09-30T12:00:00Z"
instant = timezone.parse(instant_str)
time_machine.move_to(instant, tick=False)
ti = create_task_instance(
task_id="test_ti_run_state_to_running",
state=State.QUEUED,
session=session,
start_date=instant,
)
dagbag = DBDagBag()
execution_app = get_execution_app(ver_client)
execution_app.dependency_overrides[dag_bag_from_app] = lambda: dagbag
session.commit()
response = ver_client.patch(
f"/execution/task-instances/{ti.id}/run",
json={
"state": "running",
"hostname": "random-hostname",
"unixname": "random-unixname",
"pid": 100,
"start_date": instant_str,
},
)
assert response.status_code == 200
result = response.json()
# upstream_map_indexes is now computed by SDK, server returns None
assert result["upstream_map_indexes"] is None
assert result["dag_run"]["dag_id"] == "dag"
assert result["task_reschedule_count"] == 0
assert result["max_tries"] == 0
assert result["should_retry"] is False
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/tests/unit/api_fastapi/execution_api/versions/v2025_04_28/test_task_instances.py",
"license": "Apache License 2.0",
"lines": 80,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/standard/src/airflow/providers/standard/exceptions.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Exceptions used by Standard Provider."""
from __future__ import annotations
from airflow.providers.common.compat.sdk import AirflowException
class AirflowExternalTaskSensorException(AirflowException):
"""Base exception for all ExternalTaskSensor related errors."""
class ExternalDagNotFoundError(AirflowExternalTaskSensorException):
"""Raised when the external DAG does not exist."""
class ExternalDagDeletedError(AirflowExternalTaskSensorException):
"""Raised when the external DAG was deleted."""
class ExternalTaskNotFoundError(AirflowExternalTaskSensorException):
"""Raised when the external task does not exist."""
class ExternalTaskGroupNotFoundError(AirflowExternalTaskSensorException):
"""Raised when the external task group does not exist."""
class ExternalTaskFailedError(AirflowExternalTaskSensorException):
"""Raised when the external task failed."""
class ExternalTaskGroupFailedError(AirflowExternalTaskSensorException):
"""Raised when the external task group failed."""
class ExternalDagFailedError(AirflowExternalTaskSensorException):
"""Raised when the external DAG failed."""
class DuplicateStateError(AirflowExternalTaskSensorException):
"""Raised when duplicate states are provided across allowed, skipped and failed states."""
class HITLTriggerEventError(Exception):
"""Raised when TriggerEvent contains error."""
class HITLTimeoutError(HITLTriggerEventError):
"""Raised when HITLOperator timeouts."""
class HITLRejectException(AirflowException):
"""Raised when an ApprovalOperator receives a "Reject" response when fail_on_reject is set to True."""
| {
"repo_id": "apache/airflow",
"file_path": "providers/standard/src/airflow/providers/standard/exceptions.py",
"license": "Apache License 2.0",
"lines": 43,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/standard/tests/unit/standard/test_exceptions.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import pytest
from airflow.providers.standard.exceptions import (
AirflowExternalTaskSensorException,
DuplicateStateError,
ExternalDagDeletedError,
ExternalDagFailedError,
ExternalDagNotFoundError,
ExternalTaskFailedError,
ExternalTaskGroupFailedError,
ExternalTaskGroupNotFoundError,
ExternalTaskNotFoundError,
)
def test_external_task_sensor_exception():
"""Test if AirflowExternalTaskSensorException can be raised correctly."""
with pytest.raises(AirflowExternalTaskSensorException, match="Task execution failed"):
raise AirflowExternalTaskSensorException("Task execution failed")
def test_external_dag_not_found_error():
"""Test if ExternalDagNotFoundError can be raised correctly."""
with pytest.raises(ExternalDagNotFoundError, match="External DAG not found"):
raise ExternalDagNotFoundError("External DAG not found")
# Verify it's a subclass of AirflowExternalTaskSensorException
with pytest.raises(AirflowExternalTaskSensorException):
raise ExternalDagNotFoundError("External DAG not found")
def test_external_dag_deleted_error():
"""Test if ExternalDagDeletedError can be raised correctly."""
with pytest.raises(ExternalDagDeletedError, match="External DAG was deleted"):
raise ExternalDagDeletedError("External DAG was deleted")
with pytest.raises(AirflowExternalTaskSensorException):
raise ExternalDagDeletedError("External DAG was deleted")
def test_external_task_not_found_error():
"""Test if ExternalTaskNotFoundError can be raised correctly."""
with pytest.raises(ExternalTaskNotFoundError, match="External task not found"):
raise ExternalTaskNotFoundError("External task not found")
with pytest.raises(AirflowExternalTaskSensorException):
raise ExternalTaskNotFoundError("External task not found")
def test_external_task_group_not_found_error():
"""Test if ExternalTaskGroupNotFoundError can be raised correctly."""
with pytest.raises(ExternalTaskGroupNotFoundError, match="External task group not found"):
raise ExternalTaskGroupNotFoundError("External task group not found")
with pytest.raises(AirflowExternalTaskSensorException):
raise ExternalTaskGroupNotFoundError("External task group not found")
def test_external_task_failed_error():
"""Test if ExternalTaskFailedError can be raised correctly."""
with pytest.raises(ExternalTaskFailedError, match="External task failed"):
raise ExternalTaskFailedError("External task failed")
with pytest.raises(AirflowExternalTaskSensorException):
raise ExternalTaskFailedError("External task failed")
def test_external_task_group_failed_error():
"""Test if ExternalTaskGroupFailedError can be raised correctly."""
with pytest.raises(ExternalTaskGroupFailedError, match="External task group failed"):
raise ExternalTaskGroupFailedError("External task group failed")
with pytest.raises(AirflowExternalTaskSensorException):
raise ExternalTaskGroupFailedError("External task group failed")
def test_external_dag_failed_error():
"""Test if ExternalDagFailedError can be raised correctly."""
with pytest.raises(ExternalDagFailedError, match="External DAG failed"):
raise ExternalDagFailedError("External DAG failed")
with pytest.raises(AirflowExternalTaskSensorException):
raise ExternalDagFailedError("External DAG failed")
def test_duplicate_state_error():
"""Test if DuplicateStateError can be raised correctly."""
with pytest.raises(DuplicateStateError, match="Duplicate state provided"):
raise DuplicateStateError("Duplicate state provided")
with pytest.raises(AirflowExternalTaskSensorException):
raise DuplicateStateError("Duplicate state provided")
| {
"repo_id": "apache/airflow",
"file_path": "providers/standard/tests/unit/standard/test_exceptions.py",
"license": "Apache License 2.0",
"lines": 82,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:docs/images/documentation_architecture.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from pathlib import Path
from diagrams import Cluster, Diagram, Edge
from diagrams.aws.network import CloudFront
from diagrams.aws.storage import S3
from diagrams.custom import Custom
from diagrams.onprem.client import User, Users
from rich.console import Console
MY_DIR = Path(__file__).parent
MY_FILENAME = Path(__file__).with_suffix("").name
console = Console(width=400, color_system="standard")
GITHUB_LOGO = MY_DIR / "logos" / "github.png"
FASTLY_LOGO = MY_DIR / "logos" / "fastly.png"
ASF_LOGO = MY_DIR / "logos" / "asf_logo_wide.png"
graph_attr = {
"concentrate": "false",
"splines": "splines",
}
edge_attr = {
"minlen": "1",
}
def generate_documentation_architecture_diagram():
image_file = (MY_DIR / MY_FILENAME).with_suffix(".png")
console.print(f"[bright_blue]Generating architecture image {image_file}")
with Diagram(
name="",
show=False,
direction="LR",
filename=MY_FILENAME,
edge_attr=edge_attr,
graph_attr=graph_attr,
):
release_manager = User("Release Manager\n")
committer = User("Committer")
with Cluster("Airflow GitHub repos", graph_attr={"margin": "30"}):
apache_airflow_repo = Custom("apache-airflow", GITHUB_LOGO.as_posix())
apache_airflow_site_repo = Custom("apache-airflow-site", GITHUB_LOGO.as_posix())
(
apache_airflow_site_repo
>> Edge(color="black", style="solid", label="Publish site (manual)\nMerge PR")
>> committer
)
(
apache_airflow_site_repo
>> Edge(color="blue", style="solid", label="\n\n\nPull Theme (auto)")
>> release_manager
)
(
apache_airflow_repo
>> Edge(
color="black",
style="solid",
label="Publish package docs (manual)\nPublish Docs to S3 Workflow",
)
>> release_manager
)
with Cluster("Live Docs", graph_attr={"margin": "80"}):
live_bucket = S3("live-docs-airflow-apache-org")
apache_airflow_site_archive_repo = Custom("apache-airflow-site-archive", GITHUB_LOGO.as_posix())
apache_live_webserver = Custom("https://airflow.apache.org", ASF_LOGO.as_posix())
cloudfront_live_cache = CloudFront("CloudFront Live: https://d7fnmbhf26p21.cloudfront.net")
release_manager >> Edge(color="black", style="solid", label="Publish package docs") >> live_bucket
(
committer
>> Edge(color="black", style="solid", label="Publish site\n\nInclude .htaccess proxy")
>> apache_live_webserver
)
(
live_bucket
>> Edge(color="black", style="solid", label="Proxy docs from S3", minlen="2")
>> cloudfront_live_cache
)
(
cloudfront_live_cache
>> Edge(color="black", style="solid", label="Cloudfront cache", minlen="2")
>> apache_live_webserver
)
(
live_bucket
>> Edge(color="black", style="dashed", label="Archive docs (auto)", minlen="2")
>> apache_airflow_site_archive_repo
)
(
apache_airflow_site_archive_repo
>> Edge(color="red", style="dotted", label="Fix docs (manual)", reverse=False, forward=False)
>> live_bucket
)
fastly = Custom("Fastly CDN", FASTLY_LOGO.as_posix())
users = Users("Users")
apache_live_webserver >> Edge(color="black", style="solid", label="Exposed to Fastly") >> fastly
fastly >> Edge(color="black", style="solid", label="Served to users") >> users
console.print(f"[green]Generated architecture image {image_file}")
if __name__ == "__main__":
generate_documentation_architecture_diagram()
| {
"repo_id": "apache/airflow",
"file_path": "docs/images/documentation_architecture.py",
"license": "Apache License 2.0",
"lines": 110,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/databricks/src/airflow/providers/databricks/utils/openlineage.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import datetime
import json
import logging
from typing import TYPE_CHECKING, Any
import requests
from airflow.providers.common.compat.openlineage.check import require_openlineage_version
from airflow.utils import timezone
if TYPE_CHECKING:
from openlineage.client.event_v2 import RunEvent
from openlineage.client.facet_v2 import JobFacet
from airflow.providers.databricks.hooks.databricks import DatabricksHook
from airflow.providers.databricks.hooks.databricks_sql import DatabricksSqlHook
log = logging.getLogger(__name__)
def _get_parent_run_facet(task_instance):
"""
Retrieve the ParentRunFacet associated with a specific Airflow task instance.
This facet helps link OpenLineage events of child jobs - such as queries executed within
external systems (e.g., Databricks) by the Airflow task - to the original Airflow task execution.
Establishing this connection enables better lineage tracking and observability.
"""
from openlineage.client.facet_v2 import parent_run
from airflow.providers.openlineage.plugins.macros import (
lineage_job_name,
lineage_job_namespace,
lineage_root_job_name,
lineage_root_run_id,
lineage_run_id,
)
parent_run_id = lineage_run_id(task_instance)
parent_job_name = lineage_job_name(task_instance)
parent_job_namespace = lineage_job_namespace()
root_parent_run_id = lineage_root_run_id(task_instance)
rot_parent_job_name = lineage_root_job_name(task_instance)
try: # Added in OL provider 2.9.0, try to use it if possible
from airflow.providers.openlineage.plugins.macros import lineage_root_job_namespace
root_parent_job_namespace = lineage_root_job_namespace(task_instance)
except ImportError:
root_parent_job_namespace = lineage_job_namespace()
return parent_run.ParentRunFacet(
run=parent_run.Run(runId=parent_run_id),
job=parent_run.Job(
namespace=parent_job_namespace,
name=parent_job_name,
),
root=parent_run.Root(
run=parent_run.RootRun(runId=root_parent_run_id),
job=parent_run.RootJob(
name=rot_parent_job_name,
namespace=root_parent_job_namespace,
),
),
)
def _run_api_call(hook: DatabricksSqlHook | DatabricksHook, query_ids: list[str]) -> list[dict]:
"""Retrieve execution details for specific queries from Databricks's query history API."""
token = hook._get_token(raise_error=True)
# https://docs.databricks.com/api/azure/workspace/queryhistory/list
response = requests.get(
url=f"https://{hook.host}/api/2.0/sql/history/queries",
headers={"Authorization": f"Bearer {token}"},
data=json.dumps({"filter_by": {"statement_ids": query_ids}}),
timeout=3,
)
response.raise_for_status()
return response.json()["res"]
def _process_data_from_api(data: list[dict[str, Any]]) -> list[dict[str, Any]]:
"""Convert timestamp fields to UTC datetime objects."""
for row in data:
for key in ("query_start_time_ms", "query_end_time_ms"):
row[key] = datetime.datetime.fromtimestamp(row[key] / 1000, tz=datetime.timezone.utc)
return data
def _get_queries_details_from_databricks(
hook: DatabricksSqlHook | DatabricksHook, query_ids: list[str]
) -> dict[str, dict[str, Any]]:
if not query_ids:
return {}
query_details = {}
try:
queries_info_from_api = _run_api_call(hook=hook, query_ids=query_ids)
queries_info_from_api = _process_data_from_api(queries_info_from_api)
query_details = {
query_info["query_id"]: {
"status": query_info.get("status"),
"start_time": query_info.get("query_start_time_ms"),
"end_time": query_info.get("query_end_time_ms"),
"query_text": query_info.get("query_text"),
"error_message": query_info.get("error_message"),
}
for query_info in queries_info_from_api
if query_info["query_id"]
}
except Exception as e:
log.info(
"OpenLineage encountered an error while retrieving additional metadata about SQL queries"
" from Databricks. The process will continue with default values. Error details: %s",
e,
)
return query_details
def _create_ol_event_pair(
job_namespace: str,
job_name: str,
start_time: datetime.datetime,
end_time: datetime.datetime,
is_successful: bool,
run_facets: dict | None = None,
job_facets: dict | None = None,
) -> tuple[RunEvent, RunEvent]:
"""Create a pair of OpenLineage RunEvents representing the start and end of a query execution."""
from openlineage.client.event_v2 import Job, Run, RunEvent, RunState
from openlineage.client.uuid import generate_new_uuid
run = Run(runId=str(generate_new_uuid()), facets=run_facets or {})
job = Job(namespace=job_namespace, name=job_name, facets=job_facets or {})
start = RunEvent(
eventType=RunState.START,
eventTime=start_time.isoformat(),
run=run,
job=job,
)
end = RunEvent(
eventType=RunState.COMPLETE if is_successful else RunState.FAIL,
eventTime=end_time.isoformat(),
run=run,
job=job,
)
return start, end
@require_openlineage_version(provider_min_version="2.5.0")
def emit_openlineage_events_for_databricks_queries(
task_instance,
hook: DatabricksSqlHook | DatabricksHook | None = None,
query_ids: list[str] | None = None,
query_source_namespace: str | None = None,
query_for_extra_metadata: bool = False,
additional_run_facets: dict | None = None,
additional_job_facets: dict | None = None,
) -> None:
"""
Emit OpenLineage events for executed Databricks queries.
Metadata retrieval from Databricks is attempted only if `get_extra_metadata` is True and hook is provided.
If metadata is available, execution details such as start time, end time, execution status,
error messages, and SQL text are included in the events. If no metadata is found, the function
defaults to using the Airflow task instance's state and the current timestamp.
Note that both START and COMPLETE event for each query will be emitted at the same time.
If we are able to query Databricks for query execution metadata, event times
will correspond to actual query execution times.
Args:
task_instance: The Airflow task instance that run these queries.
hook: A supported Databricks hook instance used to retrieve query metadata if available.
If omitted, `query_ids` and `query_source_namespace` must be provided explicitly and
`query_for_extra_metadata` must be `False`.
query_ids: A list of Databricks query IDs to emit events for, can only be None if `hook` is provided
and `hook.query_ids` are present (DatabricksHook does not store query_ids).
query_source_namespace: The namespace to be included in ExternalQueryRunFacet,
can be `None` only if hook is provided.
query_for_extra_metadata: Whether to query Databricks for additional metadata about queries.
Must be `False` if `hook` is not provided.
additional_run_facets: Additional run facets to include in OpenLineage events.
additional_job_facets: Additional job facets to include in OpenLineage events.
"""
from openlineage.client.facet_v2 import job_type_job
from airflow.providers.common.compat.openlineage.facet import (
ErrorMessageRunFacet,
ExternalQueryRunFacet,
RunFacet,
SQLJobFacet,
)
from airflow.providers.openlineage.conf import namespace
from airflow.providers.openlineage.plugins.listener import get_openlineage_listener
log.info("OpenLineage will emit events for Databricks queries.")
if hook:
if not query_ids:
log.debug("No Databricks query IDs provided; Checking `hook.query_ids` property.")
query_ids = getattr(hook, "query_ids", [])
if not query_ids:
raise ValueError("No Databricks query IDs provided and `hook.query_ids` are not present.")
if not query_source_namespace:
log.debug("No Databricks query namespace provided; Creating one from scratch.")
if hasattr(hook, "get_openlineage_database_info") and hasattr(hook, "get_conn_id"):
from airflow.providers.openlineage.sqlparser import SQLParser
query_source_namespace = SQLParser.create_namespace(
hook.get_openlineage_database_info(hook.get_connection(hook.get_conn_id()))
)
else:
query_source_namespace = f"databricks://{hook.host}" if hook.host else "databricks"
else:
if not query_ids:
raise ValueError("If 'hook' is not provided, 'query_ids' must be set.")
if not query_source_namespace:
raise ValueError("If 'hook' is not provided, 'query_source_namespace' must be set.")
if query_for_extra_metadata:
raise ValueError("If 'hook' is not provided, 'query_for_extra_metadata' must be False.")
query_ids = [q for q in query_ids] # Make a copy to make sure we do not change hook's attribute
if query_for_extra_metadata and hook:
log.debug("Retrieving metadata for %s queries from Databricks.", len(query_ids))
databricks_metadata = _get_queries_details_from_databricks(hook, query_ids)
else:
log.debug("`query_for_extra_metadata` is False. No extra metadata fill be fetched from Databricks.")
databricks_metadata = {}
# If real metadata is unavailable, we send events with eventTime=now
default_event_time = timezone.utcnow()
# ti.state has no `value` attr (AF2) when task it's still running, in AF3 we get 'running', in that case
# assuming it's user call and query succeeded, so we replace it with success.
# Adjust state for DBX logic, where "finished" means "success"
default_state = (
getattr(task_instance.state, "value", "running") if hasattr(task_instance, "state") else ""
)
default_state = "finished" if default_state in ("running", "success") else default_state
log.debug("Generating OpenLineage facets")
common_run_facets = {"parent": _get_parent_run_facet(task_instance)}
common_job_facets: dict[str, JobFacet] = {
"jobType": job_type_job.JobTypeJobFacet(
jobType="QUERY",
integration="DATABRICKS",
processingType="BATCH",
)
}
additional_run_facets = additional_run_facets or {}
additional_job_facets = additional_job_facets or {}
events: list[RunEvent] = []
for counter, query_id in enumerate(query_ids, 1):
query_metadata = databricks_metadata.get(query_id, {})
log.debug(
"Metadata for query no. %s, (ID `%s`): `%s`",
counter,
query_id,
query_metadata if query_metadata else "not found",
)
query_specific_run_facets: dict[str, RunFacet] = {
"externalQuery": ExternalQueryRunFacet(externalQueryId=query_id, source=query_source_namespace)
}
if query_metadata.get("error_message"):
query_specific_run_facets["error"] = ErrorMessageRunFacet(
message=query_metadata["error_message"],
programmingLanguage="SQL",
)
query_specific_job_facets = {}
if query_metadata.get("query_text"):
query_specific_job_facets["sql"] = SQLJobFacet(query=query_metadata["query_text"])
log.debug("Creating OpenLineage event pair for query ID: %s", query_id)
event_batch = _create_ol_event_pair(
job_namespace=namespace(),
job_name=f"{task_instance.dag_id}.{task_instance.task_id}.query.{counter}",
start_time=query_metadata.get("start_time") or default_event_time,
end_time=query_metadata.get("end_time") or default_event_time,
# Only finished status means it completed without failures
is_successful=(query_metadata.get("status") or default_state).lower() == "finished",
run_facets={**query_specific_run_facets, **common_run_facets, **additional_run_facets},
job_facets={**query_specific_job_facets, **common_job_facets, **additional_job_facets},
)
events.extend(event_batch)
log.debug("Generated %s OpenLineage events; emitting now.", len(events))
adapter = get_openlineage_listener().adapter
for event in events:
adapter.emit(event)
log.info("OpenLineage has successfully finished processing information about Databricks queries.")
return
| {
"repo_id": "apache/airflow",
"file_path": "providers/databricks/src/airflow/providers/databricks/utils/openlineage.py",
"license": "Apache License 2.0",
"lines": 271,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/databricks/tests/unit/databricks/utils/test_openlineage.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import copy
import datetime
from unittest import mock
import pytest
from openlineage.client.event_v2 import Job, Run, RunEvent, RunState
from openlineage.client.facet_v2 import job_type_job, parent_run
from airflow.providers.common.compat.openlineage.facet import (
ErrorMessageRunFacet,
ExternalQueryRunFacet,
SQLJobFacet,
)
from airflow.providers.common.compat.sdk import AirflowOptionalProviderFeatureException
from airflow.providers.databricks.hooks.databricks import DatabricksHook
from airflow.providers.databricks.hooks.databricks_sql import DatabricksSqlHook
from airflow.providers.databricks.utils.openlineage import (
_create_ol_event_pair,
_get_parent_run_facet,
_get_queries_details_from_databricks,
_process_data_from_api,
_run_api_call,
emit_openlineage_events_for_databricks_queries,
)
from airflow.providers.openlineage.conf import namespace
from airflow.utils import timezone
from airflow.utils.state import TaskInstanceState
def test_get_parent_run_facet():
logical_date = timezone.datetime(2025, 1, 1)
dr = mock.MagicMock(logical_date=logical_date, clear_number=0)
mock_ti = mock.MagicMock(
dag_id="dag_id",
task_id="task_id",
map_index=1,
try_number=1,
logical_date=logical_date,
state=TaskInstanceState.SUCCESS,
dag_run=dr,
)
mock_ti.get_template_context.return_value = {"dag_run": dr}
result = _get_parent_run_facet(mock_ti)
assert result.run.runId == "01941f29-7c00-7087-8906-40e512c257bd"
assert result.job.namespace == namespace()
assert result.job.name == "dag_id.task_id"
assert result.root.run.runId == "01941f29-7c00-743e-b109-28b18d0a19c5"
assert result.root.job.namespace == namespace()
assert result.root.job.name == "dag_id"
def test_run_api_call_success():
mock_hook = mock.MagicMock()
mock_hook._get_token.return_value = "mock_token"
mock_hook.host = "mock_host"
mock_response = mock.MagicMock()
mock_response.status_code = 200
mock_response.json.return_value = {"res": [{"query_id": "123", "status": "success"}]}
with mock.patch("requests.get", return_value=mock_response):
result = _run_api_call(mock_hook, ["123"])
assert result == [{"query_id": "123", "status": "success"}]
def test_run_api_call_request_error():
mock_hook = mock.MagicMock()
mock_hook._get_token.return_value = "mock_token"
mock_hook.host = "mock_host"
mock_response = mock.MagicMock()
mock_response.status_code = 200
with mock.patch("requests.get", side_effect=RuntimeError("request error")):
with pytest.raises(RuntimeError):
_run_api_call(mock_hook, ["123"])
def test_run_api_call_token_error():
mock_hook = mock.MagicMock()
mock_hook._get_token.side_effect = RuntimeError("Token error")
mock_hook.host = "mock_host"
mock_response = mock.MagicMock()
mock_response.status_code = 200
with mock.patch("requests.get", return_value=mock_response):
with pytest.raises(RuntimeError):
_run_api_call(mock_hook, ["123"])
def test_process_data_from_api():
data = [
{
"query_id": "ABC",
"status": "FINISHED",
"query_start_time_ms": 1595357086200,
"query_end_time_ms": 1595357087200,
"query_text": "SELECT * FROM table1;",
"error_message": "Error occurred",
},
{
"query_id": "DEF",
"query_start_time_ms": 1595357086200,
"query_end_time_ms": 1595357087200,
},
]
expected_details = [
{
"query_id": "ABC",
"status": "FINISHED",
"query_start_time_ms": datetime.datetime(
2020, 7, 21, 18, 44, 46, 200000, tzinfo=datetime.timezone.utc
),
"query_end_time_ms": datetime.datetime(
2020, 7, 21, 18, 44, 47, 200000, tzinfo=datetime.timezone.utc
),
"query_text": "SELECT * FROM table1;",
"error_message": "Error occurred",
},
{
"query_id": "DEF",
"query_start_time_ms": datetime.datetime(
2020, 7, 21, 18, 44, 46, 200000, tzinfo=datetime.timezone.utc
),
"query_end_time_ms": datetime.datetime(
2020, 7, 21, 18, 44, 47, 200000, tzinfo=datetime.timezone.utc
),
},
]
result = _process_data_from_api(data=data)
assert len(result) == 2
assert result == expected_details
def test_process_data_from_api_error():
with pytest.raises(KeyError):
_process_data_from_api(data=[{"query_start_time_ms": 1595357086200}])
def test_get_queries_details_from_databricks_empty_query_ids():
details = _get_queries_details_from_databricks(None, [])
assert details == {}
@mock.patch("airflow.providers.databricks.utils.openlineage._run_api_call")
def test_get_queries_details_from_databricks_error(mock_api_call):
mock_api_call.side_effect = RuntimeError("Token error")
hook = DatabricksSqlHook()
query_ids = ["ABC"]
details = _get_queries_details_from_databricks(hook, query_ids)
mock_api_call.assert_called_once_with(hook=hook, query_ids=query_ids)
assert details == {}
@mock.patch("airflow.providers.databricks.utils.openlineage._run_api_call")
def test_get_queries_details_from_databricks(mock_api_call):
hook = DatabricksSqlHook()
query_ids = ["ABC"]
fake_result = [
{
"query_id": "ABC",
"status": "FINISHED",
"query_start_time_ms": 1595357086200,
"query_end_time_ms": 1595357087200,
"query_text": "SELECT * FROM table1;",
"error_message": "Error occurred",
}
]
mock_api_call.return_value = fake_result
details = _get_queries_details_from_databricks(hook, query_ids)
mock_api_call.assert_called_once_with(hook=hook, query_ids=query_ids)
assert details == {
"ABC": {
"status": "FINISHED",
"start_time": datetime.datetime(2020, 7, 21, 18, 44, 46, 200000, tzinfo=datetime.timezone.utc),
"end_time": datetime.datetime(2020, 7, 21, 18, 44, 47, 200000, tzinfo=datetime.timezone.utc),
"query_text": "SELECT * FROM table1;",
"error_message": "Error occurred",
}
}
@mock.patch("airflow.providers.databricks.utils.openlineage._run_api_call")
def test_get_queries_details_from_databricks_no_data_found(mock_api_call):
hook = DatabricksSqlHook()
query_ids = ["ABC", "DEF"]
mock_api_call.return_value = []
details = _get_queries_details_from_databricks(hook, query_ids)
mock_api_call.assert_called_once_with(hook=hook, query_ids=query_ids)
assert details == {}
@pytest.mark.parametrize("is_successful", [True, False])
@mock.patch("openlineage.client.uuid.generate_new_uuid")
def test_create_ol_event_pair_success(mock_generate_uuid, is_successful):
fake_uuid = "01941f29-7c00-7087-8906-40e512c257bd"
mock_generate_uuid.return_value = fake_uuid
job_namespace = "test_namespace"
job_name = "test_job"
start_time = timezone.datetime(2021, 1, 1, 10, 0, 0)
end_time = timezone.datetime(2021, 1, 1, 10, 30, 0)
run_facets = {"run_key": "run_value"}
job_facets = {"job_key": "job_value"}
start_event, end_event = _create_ol_event_pair(
job_namespace,
job_name,
start_time,
end_time,
is_successful=is_successful,
run_facets=run_facets,
job_facets=job_facets,
)
assert start_event.eventType == RunState.START
assert start_event.eventTime == start_time.isoformat()
assert end_event.eventType == RunState.COMPLETE if is_successful else RunState.FAIL
assert end_event.eventTime == end_time.isoformat()
assert start_event.run.runId == fake_uuid
assert start_event.run.facets == run_facets
assert start_event.job.namespace == job_namespace
assert start_event.job.name == job_name
assert start_event.job.facets == job_facets
assert start_event.run is end_event.run
assert start_event.job == end_event.job
@mock.patch("importlib.metadata.version", return_value="3.0.0")
@mock.patch("openlineage.client.uuid.generate_new_uuid")
def test_emit_openlineage_events_for_databricks_queries(mock_generate_uuid, mock_version, time_machine):
fake_uuid = "01958e68-03a2-79e3-9ae9-26865cc40e2f"
mock_generate_uuid.return_value = fake_uuid
default_event_time = timezone.datetime(2025, 1, 5, 0, 0, 0)
time_machine.move_to(default_event_time, tick=False)
query_ids = ["query1", "query2", "query3"]
original_query_ids = copy.deepcopy(query_ids)
logical_date = timezone.datetime(2025, 1, 1)
mock_dagrun = mock.MagicMock(logical_date=logical_date, clear_number=0)
mock_ti = mock.MagicMock(
dag_id="dag_id",
task_id="task_id",
map_index=1,
try_number=1,
logical_date=logical_date,
state=TaskInstanceState.FAILED, # This will be query default state if no metadata found
dag_run=mock_dagrun,
)
mock_ti.get_template_context.return_value = {"dag_run": mock_dagrun}
fake_metadata = {
"query1": {
"status": "FINISHED",
"start_time": datetime.datetime(2020, 7, 21, 18, 44, 46, 200000, tzinfo=datetime.timezone.utc),
"end_time": datetime.datetime(2020, 7, 21, 18, 44, 47, 200000, tzinfo=datetime.timezone.utc),
"query_text": "SELECT * FROM table1",
# No error for query1
},
"query2": {
"status": "CANCELED",
"start_time": datetime.datetime(2020, 7, 21, 18, 44, 48, 200000, tzinfo=datetime.timezone.utc),
"end_time": datetime.datetime(2020, 7, 21, 18, 44, 49, 200000, tzinfo=datetime.timezone.utc),
"query_text": "SELECT * FROM table2",
"error_message": "Error occurred",
},
# No metadata for query3
}
additional_run_facets = {"custom_run": "value_run"}
additional_job_facets = {"custom_job": "value_job"}
fake_adapter = mock.MagicMock()
fake_adapter.emit = mock.MagicMock()
fake_listener = mock.MagicMock()
fake_listener.adapter = fake_adapter
with (
mock.patch(
"airflow.providers.databricks.utils.openlineage._get_queries_details_from_databricks",
return_value=fake_metadata,
),
mock.patch(
"airflow.providers.openlineage.plugins.listener.get_openlineage_listener",
return_value=fake_listener,
),
):
emit_openlineage_events_for_databricks_queries(
query_ids=query_ids,
query_source_namespace="databricks_ns",
task_instance=mock_ti,
hook=mock.MagicMock(),
query_for_extra_metadata=True,
additional_run_facets=additional_run_facets,
additional_job_facets=additional_job_facets,
)
assert query_ids == original_query_ids # Verify that the input query_ids list is unchanged.
assert fake_adapter.emit.call_count == 6 # Expect two events per query.
expected_common_job_facets = {
"jobType": job_type_job.JobTypeJobFacet(
jobType="QUERY",
processingType="BATCH",
integration="DATABRICKS",
),
"custom_job": "value_job",
}
expected_common_run_facets = {
"parent": parent_run.ParentRunFacet(
run=parent_run.Run(runId="01941f29-7c00-7087-8906-40e512c257bd"),
job=parent_run.Job(namespace=namespace(), name="dag_id.task_id"),
root=parent_run.Root(
run=parent_run.RootRun(runId="01941f29-7c00-743e-b109-28b18d0a19c5"),
job=parent_run.RootJob(namespace=namespace(), name="dag_id"),
),
),
"custom_run": "value_run",
}
expected_calls = [
mock.call( # Query1: START event
RunEvent(
eventTime="2020-07-21T18:44:46.200000+00:00",
eventType=RunState.START,
run=Run(
runId=fake_uuid,
facets={
"externalQuery": ExternalQueryRunFacet(
externalQueryId="query1", source="databricks_ns"
),
**expected_common_run_facets,
},
),
job=Job(
namespace=namespace(),
name="dag_id.task_id.query.1",
facets={
"sql": SQLJobFacet(query="SELECT * FROM table1"),
**expected_common_job_facets,
},
),
)
),
mock.call( # Query1: COMPLETE event
RunEvent(
eventTime="2020-07-21T18:44:47.200000+00:00",
eventType=RunState.COMPLETE,
run=Run(
runId=fake_uuid,
facets={
"externalQuery": ExternalQueryRunFacet(
externalQueryId="query1", source="databricks_ns"
),
**expected_common_run_facets,
},
),
job=Job(
namespace=namespace(),
name="dag_id.task_id.query.1",
facets={
"sql": SQLJobFacet(query="SELECT * FROM table1"),
**expected_common_job_facets,
},
),
)
),
mock.call( # Query2: START event
RunEvent(
eventTime="2020-07-21T18:44:48.200000+00:00",
eventType=RunState.START,
run=Run(
runId=fake_uuid,
facets={
"externalQuery": ExternalQueryRunFacet(
externalQueryId="query2", source="databricks_ns"
),
"error": ErrorMessageRunFacet(
message="Error occurred", programmingLanguage="SQL"
),
**expected_common_run_facets,
},
),
job=Job(
namespace=namespace(),
name="dag_id.task_id.query.2",
facets={
"sql": SQLJobFacet(query="SELECT * FROM table2"),
**expected_common_job_facets,
},
),
)
),
mock.call( # Query2: FAIL event
RunEvent(
eventTime="2020-07-21T18:44:49.200000+00:00",
eventType=RunState.FAIL,
run=Run(
runId=fake_uuid,
facets={
"externalQuery": ExternalQueryRunFacet(
externalQueryId="query2", source="databricks_ns"
),
"error": ErrorMessageRunFacet(
message="Error occurred", programmingLanguage="SQL"
),
**expected_common_run_facets,
},
),
job=Job(
namespace=namespace(),
name="dag_id.task_id.query.2",
facets={
"sql": SQLJobFacet(query="SELECT * FROM table2"),
**expected_common_job_facets,
},
),
)
),
mock.call( # Query3: START event (no metadata)
RunEvent(
eventTime=default_event_time.isoformat(), # no metadata for query3
eventType=RunState.START,
run=Run(
runId=fake_uuid,
facets={
"externalQuery": ExternalQueryRunFacet(
externalQueryId="query3", source="databricks_ns"
),
**expected_common_run_facets,
},
),
job=Job(
namespace=namespace(),
name="dag_id.task_id.query.3",
facets=expected_common_job_facets,
),
)
),
mock.call( # Query3: FAIL event (no metadata)
RunEvent(
eventTime=default_event_time.isoformat(), # no metadata for query3
eventType=RunState.FAIL,
run=Run(
runId=fake_uuid,
facets={
"externalQuery": ExternalQueryRunFacet(
externalQueryId="query3", source="databricks_ns"
),
**expected_common_run_facets,
},
),
job=Job(
namespace=namespace(),
name="dag_id.task_id.query.3",
facets=expected_common_job_facets,
),
)
),
]
assert fake_adapter.emit.call_args_list == expected_calls
@mock.patch("importlib.metadata.version", return_value="3.0.0")
@mock.patch("openlineage.client.uuid.generate_new_uuid")
def test_emit_openlineage_events_for_databricks_queries_without_metadata(
mock_generate_uuid, mock_version, time_machine
):
fake_uuid = "01958e68-03a2-79e3-9ae9-26865cc40e2f"
mock_generate_uuid.return_value = fake_uuid
default_event_time = timezone.datetime(2025, 1, 5, 0, 0, 0)
time_machine.move_to(default_event_time, tick=False)
query_ids = ["query1"]
original_query_ids = copy.deepcopy(query_ids)
logical_date = timezone.datetime(2025, 1, 1)
mock_ti = mock.MagicMock(
dag_id="dag_id",
task_id="task_id",
map_index=1,
try_number=1,
logical_date=logical_date,
state=TaskInstanceState.SUCCESS, # This will be query default state if no metadata found
dag_run=mock.MagicMock(logical_date=logical_date, clear_number=0),
)
mock_ti.get_template_context.return_value = {
"dag_run": mock.MagicMock(logical_date=logical_date, clear_number=0)
}
additional_run_facets = {"custom_run": "value_run"}
additional_job_facets = {"custom_job": "value_job"}
fake_adapter = mock.MagicMock()
fake_adapter.emit = mock.MagicMock()
fake_listener = mock.MagicMock()
fake_listener.adapter = fake_adapter
with mock.patch(
"airflow.providers.openlineage.plugins.listener.get_openlineage_listener",
return_value=fake_listener,
):
emit_openlineage_events_for_databricks_queries(
query_ids=query_ids,
query_source_namespace="databricks_ns",
task_instance=mock_ti,
hook=mock.MagicMock(),
# query_for_extra_metadata=False, # False by default
additional_run_facets=additional_run_facets,
additional_job_facets=additional_job_facets,
)
assert query_ids == original_query_ids # Verify that the input query_ids list is unchanged.
assert fake_adapter.emit.call_count == 2 # Expect two events per query.
expected_common_job_facets = {
"jobType": job_type_job.JobTypeJobFacet(
jobType="QUERY",
processingType="BATCH",
integration="DATABRICKS",
),
"custom_job": "value_job",
}
expected_common_run_facets = {
"parent": parent_run.ParentRunFacet(
run=parent_run.Run(runId="01941f29-7c00-7087-8906-40e512c257bd"),
job=parent_run.Job(namespace=namespace(), name="dag_id.task_id"),
root=parent_run.Root(
run=parent_run.RootRun(runId="01941f29-7c00-743e-b109-28b18d0a19c5"),
job=parent_run.RootJob(namespace=namespace(), name="dag_id"),
),
),
"custom_run": "value_run",
}
expected_calls = [
mock.call( # Query1: START event (no metadata)
RunEvent(
eventTime=default_event_time.isoformat(),
eventType=RunState.START,
run=Run(
runId=fake_uuid,
facets={
"externalQuery": ExternalQueryRunFacet(
externalQueryId="query1", source="databricks_ns"
),
**expected_common_run_facets,
},
),
job=Job(
namespace=namespace(),
name="dag_id.task_id.query.1",
facets=expected_common_job_facets,
),
)
),
mock.call( # Query1: COMPLETE event (no metadata)
RunEvent(
eventTime=default_event_time.isoformat(),
eventType=RunState.COMPLETE,
run=Run(
runId=fake_uuid,
facets={
"externalQuery": ExternalQueryRunFacet(
externalQueryId="query1", source="databricks_ns"
),
**expected_common_run_facets,
},
),
job=Job(
namespace=namespace(),
name="dag_id.task_id.query.1",
facets=expected_common_job_facets,
),
)
),
]
assert fake_adapter.emit.call_args_list == expected_calls
@mock.patch("importlib.metadata.version", return_value="3.0.0")
@mock.patch("openlineage.client.uuid.generate_new_uuid")
def test_emit_openlineage_events_for_databricks_queries_without_explicit_query_ids(
mock_generate_uuid, mock_version, time_machine
):
fake_uuid = "01958e68-03a2-79e3-9ae9-26865cc40e2f"
mock_generate_uuid.return_value = fake_uuid
default_event_time = timezone.datetime(2025, 1, 5, 0, 0, 0)
time_machine.move_to(default_event_time, tick=False)
query_ids = ["query1"]
hook = mock.MagicMock()
hook.query_ids = query_ids
original_query_ids = copy.deepcopy(query_ids)
logical_date = timezone.datetime(2025, 1, 1)
mock_ti = mock.MagicMock(
dag_id="dag_id",
task_id="task_id",
map_index=1,
try_number=1,
logical_date=logical_date,
state=TaskInstanceState.RUNNING, # This will be query default state if no metadata found
dag_run=mock.MagicMock(logical_date=logical_date, clear_number=0),
)
mock_ti.get_template_context.return_value = {
"dag_run": mock.MagicMock(logical_date=logical_date, clear_number=0)
}
additional_run_facets = {"custom_run": "value_run"}
additional_job_facets = {"custom_job": "value_job"}
fake_adapter = mock.MagicMock()
fake_adapter.emit = mock.MagicMock()
fake_listener = mock.MagicMock()
fake_listener.adapter = fake_adapter
with mock.patch(
"airflow.providers.openlineage.plugins.listener.get_openlineage_listener",
return_value=fake_listener,
):
emit_openlineage_events_for_databricks_queries(
query_source_namespace="databricks_ns",
task_instance=mock_ti,
hook=hook,
# query_for_extra_metadata=False, # False by default
additional_run_facets=additional_run_facets,
additional_job_facets=additional_job_facets,
)
assert query_ids == original_query_ids # Verify that the input query_ids list is unchanged.
assert fake_adapter.emit.call_count == 2 # Expect two events per query.
expected_common_job_facets = {
"jobType": job_type_job.JobTypeJobFacet(
jobType="QUERY",
processingType="BATCH",
integration="DATABRICKS",
),
"custom_job": "value_job",
}
expected_common_run_facets = {
"parent": parent_run.ParentRunFacet(
run=parent_run.Run(runId="01941f29-7c00-7087-8906-40e512c257bd"),
job=parent_run.Job(namespace=namespace(), name="dag_id.task_id"),
root=parent_run.Root(
run=parent_run.RootRun(runId="01941f29-7c00-743e-b109-28b18d0a19c5"),
job=parent_run.RootJob(namespace=namespace(), name="dag_id"),
),
),
"custom_run": "value_run",
}
expected_calls = [
mock.call( # Query1: START event (no metadata)
RunEvent(
eventTime=default_event_time.isoformat(),
eventType=RunState.START,
run=Run(
runId=fake_uuid,
facets={
"externalQuery": ExternalQueryRunFacet(
externalQueryId="query1", source="databricks_ns"
),
**expected_common_run_facets,
},
),
job=Job(
namespace=namespace(),
name="dag_id.task_id.query.1",
facets=expected_common_job_facets,
),
)
),
mock.call( # Query1: COMPLETE event (no metadata)
RunEvent(
eventTime=default_event_time.isoformat(),
eventType=RunState.COMPLETE,
run=Run(
runId=fake_uuid,
facets={
"externalQuery": ExternalQueryRunFacet(
externalQueryId="query1", source="databricks_ns"
),
**expected_common_run_facets,
},
),
job=Job(
namespace=namespace(),
name="dag_id.task_id.query.1",
facets=expected_common_job_facets,
),
)
),
]
assert fake_adapter.emit.call_args_list == expected_calls
@mock.patch(
"airflow.providers.openlineage.sqlparser.SQLParser.create_namespace", return_value="databricks_ns"
)
@mock.patch("importlib.metadata.version", return_value="3.0.0")
@mock.patch("openlineage.client.uuid.generate_new_uuid")
def test_emit_openlineage_events_for_databricks_queries_without_explicit_query_ids_and_namespace(
mock_generate_uuid, mock_version, mock_parser, time_machine
):
fake_uuid = "01958e68-03a2-79e3-9ae9-26865cc40e2f"
mock_generate_uuid.return_value = fake_uuid
default_event_time = timezone.datetime(2025, 1, 5, 0, 0, 0)
time_machine.move_to(default_event_time, tick=False)
query_ids = ["query1"]
hook = mock.MagicMock()
hook.query_ids = query_ids
original_query_ids = copy.deepcopy(query_ids)
logical_date = timezone.datetime(2025, 1, 1)
mock_ti = mock.MagicMock(
dag_id="dag_id",
task_id="task_id",
map_index=1,
try_number=1,
logical_date=logical_date,
state=TaskInstanceState.RUNNING, # This will be query default state if no metadata found
dag_run=mock.MagicMock(logical_date=logical_date, clear_number=0),
)
mock_ti.get_template_context.return_value = {
"dag_run": mock.MagicMock(logical_date=logical_date, clear_number=0)
}
additional_run_facets = {"custom_run": "value_run"}
additional_job_facets = {"custom_job": "value_job"}
fake_adapter = mock.MagicMock()
fake_adapter.emit = mock.MagicMock()
fake_listener = mock.MagicMock()
fake_listener.adapter = fake_adapter
with mock.patch(
"airflow.providers.openlineage.plugins.listener.get_openlineage_listener",
return_value=fake_listener,
):
emit_openlineage_events_for_databricks_queries(
task_instance=mock_ti,
hook=hook,
# query_for_extra_metadata=False, # False by default
additional_run_facets=additional_run_facets,
additional_job_facets=additional_job_facets,
)
assert query_ids == original_query_ids # Verify that the input query_ids list is unchanged.
assert fake_adapter.emit.call_count == 2 # Expect two events per query.
expected_common_job_facets = {
"jobType": job_type_job.JobTypeJobFacet(
jobType="QUERY",
processingType="BATCH",
integration="DATABRICKS",
),
"custom_job": "value_job",
}
expected_common_run_facets = {
"parent": parent_run.ParentRunFacet(
run=parent_run.Run(runId="01941f29-7c00-7087-8906-40e512c257bd"),
job=parent_run.Job(namespace=namespace(), name="dag_id.task_id"),
root=parent_run.Root(
run=parent_run.RootRun(runId="01941f29-7c00-743e-b109-28b18d0a19c5"),
job=parent_run.RootJob(namespace=namespace(), name="dag_id"),
),
),
"custom_run": "value_run",
}
expected_calls = [
mock.call( # Query1: START event (no metadata)
RunEvent(
eventTime=default_event_time.isoformat(),
eventType=RunState.START,
run=Run(
runId=fake_uuid,
facets={
"externalQuery": ExternalQueryRunFacet(
externalQueryId="query1", source="databricks_ns"
),
**expected_common_run_facets,
},
),
job=Job(
namespace=namespace(),
name="dag_id.task_id.query.1",
facets=expected_common_job_facets,
),
)
),
mock.call( # Query1: COMPLETE event (no metadata)
RunEvent(
eventTime=default_event_time.isoformat(),
eventType=RunState.COMPLETE,
run=Run(
runId=fake_uuid,
facets={
"externalQuery": ExternalQueryRunFacet(
externalQueryId="query1", source="databricks_ns"
),
**expected_common_run_facets,
},
),
job=Job(
namespace=namespace(),
name="dag_id.task_id.query.1",
facets=expected_common_job_facets,
),
)
),
]
assert fake_adapter.emit.call_args_list == expected_calls
@mock.patch("importlib.metadata.version", return_value="3.0.0")
@mock.patch("openlineage.client.uuid.generate_new_uuid")
def test_emit_openlineage_events_for_databricks_queries_without_explicit_query_ids_and_namespace_raw_ns(
mock_generate_uuid, mock_version, time_machine
):
fake_uuid = "01958e68-03a2-79e3-9ae9-26865cc40e2f"
mock_generate_uuid.return_value = fake_uuid
default_event_time = timezone.datetime(2025, 1, 5, 0, 0, 0)
time_machine.move_to(default_event_time, tick=False)
query_ids = ["query1"]
hook = DatabricksHook()
hook.query_ids = query_ids
hook.host = "some_host"
original_query_ids = copy.deepcopy(query_ids)
logical_date = timezone.datetime(2025, 1, 1)
mock_ti = mock.MagicMock(
dag_id="dag_id",
task_id="task_id",
map_index=1,
try_number=1,
logical_date=logical_date,
state=TaskInstanceState.RUNNING, # This will be query default state if no metadata found
dag_run=mock.MagicMock(logical_date=logical_date, clear_number=0),
)
mock_ti.get_template_context.return_value = {
"dag_run": mock.MagicMock(logical_date=logical_date, clear_number=0)
}
additional_run_facets = {"custom_run": "value_run"}
additional_job_facets = {"custom_job": "value_job"}
fake_adapter = mock.MagicMock()
fake_adapter.emit = mock.MagicMock()
fake_listener = mock.MagicMock()
fake_listener.adapter = fake_adapter
with mock.patch(
"airflow.providers.openlineage.plugins.listener.get_openlineage_listener",
return_value=fake_listener,
):
emit_openlineage_events_for_databricks_queries(
task_instance=mock_ti,
hook=hook,
# query_for_extra_metadata=False, # False by default
additional_run_facets=additional_run_facets,
additional_job_facets=additional_job_facets,
)
assert query_ids == original_query_ids # Verify that the input query_ids list is unchanged.
assert fake_adapter.emit.call_count == 2 # Expect two events per query.
expected_common_job_facets = {
"jobType": job_type_job.JobTypeJobFacet(
jobType="QUERY",
processingType="BATCH",
integration="DATABRICKS",
),
"custom_job": "value_job",
}
expected_common_run_facets = {
"parent": parent_run.ParentRunFacet(
run=parent_run.Run(runId="01941f29-7c00-7087-8906-40e512c257bd"),
job=parent_run.Job(namespace=namespace(), name="dag_id.task_id"),
root=parent_run.Root(
run=parent_run.RootRun(runId="01941f29-7c00-743e-b109-28b18d0a19c5"),
job=parent_run.RootJob(namespace=namespace(), name="dag_id"),
),
),
"custom_run": "value_run",
}
expected_calls = [
mock.call( # Query1: START event (no metadata)
RunEvent(
eventTime=default_event_time.isoformat(),
eventType=RunState.START,
run=Run(
runId=fake_uuid,
facets={
"externalQuery": ExternalQueryRunFacet(
externalQueryId="query1", source="databricks://some_host"
),
**expected_common_run_facets,
},
),
job=Job(
namespace=namespace(),
name="dag_id.task_id.query.1",
facets=expected_common_job_facets,
),
)
),
mock.call( # Query1: COMPLETE event (no metadata)
RunEvent(
eventTime=default_event_time.isoformat(),
eventType=RunState.COMPLETE,
run=Run(
runId=fake_uuid,
facets={
"externalQuery": ExternalQueryRunFacet(
externalQueryId="query1", source="databricks://some_host"
),
**expected_common_run_facets,
},
),
job=Job(
namespace=namespace(),
name="dag_id.task_id.query.1",
facets=expected_common_job_facets,
),
)
),
]
assert fake_adapter.emit.call_args_list == expected_calls
@mock.patch("importlib.metadata.version", return_value="3.0.0")
@mock.patch("openlineage.client.uuid.generate_new_uuid")
def test_emit_openlineage_events_for_databricks_queries_ith_query_ids_and_hook_query_ids(
mock_generate_uuid, mock_version, time_machine
):
fake_uuid = "01958e68-03a2-79e3-9ae9-26865cc40e2f"
mock_generate_uuid.return_value = fake_uuid
default_event_time = timezone.datetime(2025, 1, 5, 0, 0, 0)
time_machine.move_to(default_event_time, tick=False)
hook = DatabricksSqlHook()
hook.query_ids = ["query2", "query3"]
query_ids = ["query1"]
original_query_ids = copy.deepcopy(query_ids)
logical_date = timezone.datetime(2025, 1, 1)
mock_ti = mock.MagicMock(
dag_id="dag_id",
task_id="task_id",
map_index=1,
try_number=1,
logical_date=logical_date,
state=TaskInstanceState.SUCCESS, # This will be query default state if no metadata found
dag_run=mock.MagicMock(logical_date=logical_date, clear_number=0),
)
mock_ti.get_template_context.return_value = {
"dag_run": mock.MagicMock(logical_date=logical_date, clear_number=0)
}
additional_run_facets = {"custom_run": "value_run"}
additional_job_facets = {"custom_job": "value_job"}
fake_adapter = mock.MagicMock()
fake_adapter.emit = mock.MagicMock()
fake_listener = mock.MagicMock()
fake_listener.adapter = fake_adapter
with mock.patch(
"airflow.providers.openlineage.plugins.listener.get_openlineage_listener",
return_value=fake_listener,
):
emit_openlineage_events_for_databricks_queries(
query_ids=query_ids,
query_source_namespace="databricks_ns",
task_instance=mock_ti,
hook=hook,
# query_for_extra_metadata=False, # False by default
additional_run_facets=additional_run_facets,
additional_job_facets=additional_job_facets,
)
assert query_ids == original_query_ids # Verify that the input query_ids list is unchanged.
assert fake_adapter.emit.call_count == 2 # Expect two events per query.
expected_common_job_facets = {
"jobType": job_type_job.JobTypeJobFacet(
jobType="QUERY",
processingType="BATCH",
integration="DATABRICKS",
),
"custom_job": "value_job",
}
expected_common_run_facets = {
"parent": parent_run.ParentRunFacet(
run=parent_run.Run(runId="01941f29-7c00-7087-8906-40e512c257bd"),
job=parent_run.Job(namespace=namespace(), name="dag_id.task_id"),
root=parent_run.Root(
run=parent_run.RootRun(runId="01941f29-7c00-743e-b109-28b18d0a19c5"),
job=parent_run.RootJob(namespace=namespace(), name="dag_id"),
),
),
"custom_run": "value_run",
}
expected_calls = [
mock.call( # Query1: START event (no metadata)
RunEvent(
eventTime=default_event_time.isoformat(),
eventType=RunState.START,
run=Run(
runId=fake_uuid,
facets={
"externalQuery": ExternalQueryRunFacet(
externalQueryId="query1", source="databricks_ns"
),
**expected_common_run_facets,
},
),
job=Job(
namespace=namespace(),
name="dag_id.task_id.query.1",
facets=expected_common_job_facets,
),
)
),
mock.call( # Query1: COMPLETE event (no metadata)
RunEvent(
eventTime=default_event_time.isoformat(),
eventType=RunState.COMPLETE,
run=Run(
runId=fake_uuid,
facets={
"externalQuery": ExternalQueryRunFacet(
externalQueryId="query1", source="databricks_ns"
),
**expected_common_run_facets,
},
),
job=Job(
namespace=namespace(),
name="dag_id.task_id.query.1",
facets=expected_common_job_facets,
),
)
),
]
assert fake_adapter.emit.call_args_list == expected_calls
@mock.patch("importlib.metadata.version", return_value="3.0.0")
def test_emit_openlineage_events_for_databricks_queries_missing_query_ids_and_hook(mock_version):
query_ids = []
original_query_ids = copy.deepcopy(query_ids)
fake_adapter = mock.MagicMock()
fake_adapter.emit = mock.MagicMock()
fake_listener = mock.MagicMock()
fake_listener.adapter = fake_adapter
with mock.patch(
"airflow.providers.openlineage.plugins.listener.get_openlineage_listener",
return_value=fake_listener,
):
with pytest.raises(ValueError, match="If 'hook' is not provided, 'query_ids' must be set."):
emit_openlineage_events_for_databricks_queries(
query_ids=query_ids,
query_source_namespace="databricks_ns",
task_instance=None,
)
assert query_ids == original_query_ids # Verify that the input query_ids list is unchanged.
fake_adapter.emit.assert_not_called() # No events should be emitted
@mock.patch("importlib.metadata.version", return_value="3.0.0")
def test_emit_openlineage_events_for_databricks_queries_missing_query_namespace_and_hook(mock_version):
query_ids = ["1", "2"]
original_query_ids = copy.deepcopy(query_ids)
fake_adapter = mock.MagicMock()
fake_adapter.emit = mock.MagicMock()
fake_listener = mock.MagicMock()
fake_listener.adapter = fake_adapter
with mock.patch(
"airflow.providers.openlineage.plugins.listener.get_openlineage_listener",
return_value=fake_listener,
):
with pytest.raises(
ValueError, match="If 'hook' is not provided, 'query_source_namespace' must be set."
):
emit_openlineage_events_for_databricks_queries(
query_ids=query_ids,
task_instance=None,
)
assert query_ids == original_query_ids # Verify that the input query_ids list is unchanged.
fake_adapter.emit.assert_not_called() # No events should be emitted
@mock.patch("importlib.metadata.version", return_value="3.0.0")
def test_emit_openlineage_events_for_databricks_queries_missing_hook_and_query_for_extra_metadata_true(
mock_version,
):
query_ids = ["1", "2"]
original_query_ids = copy.deepcopy(query_ids)
fake_adapter = mock.MagicMock()
fake_adapter.emit = mock.MagicMock()
fake_listener = mock.MagicMock()
fake_listener.adapter = fake_adapter
with mock.patch(
"airflow.providers.openlineage.plugins.listener.get_openlineage_listener",
return_value=fake_listener,
):
with pytest.raises(
ValueError, match="If 'hook' is not provided, 'query_for_extra_metadata' must be False."
):
emit_openlineage_events_for_databricks_queries(
query_ids=query_ids,
query_source_namespace="databricks_ns",
task_instance=None,
query_for_extra_metadata=True,
)
assert query_ids == original_query_ids # Verify that the input query_ids list is unchanged.
fake_adapter.emit.assert_not_called() # No events should be emitted
@mock.patch("importlib.metadata.version", return_value="1.99.0")
def test_emit_openlineage_events_with_old_openlineage_provider(mock_version):
query_ids = ["q1", "q2"]
original_query_ids = copy.deepcopy(query_ids)
fake_adapter = mock.MagicMock()
fake_adapter.emit = mock.MagicMock()
fake_listener = mock.MagicMock()
fake_listener.adapter = fake_adapter
with mock.patch(
"airflow.providers.openlineage.plugins.listener.get_openlineage_listener",
return_value=fake_listener,
):
expected_err = (
"OpenLineage provider version `1.99.0` is lower than required `2.5.0`, "
"skipping function `emit_openlineage_events_for_databricks_queries` execution"
)
with pytest.raises(AirflowOptionalProviderFeatureException, match=expected_err):
emit_openlineage_events_for_databricks_queries(
query_ids=query_ids,
query_source_namespace="databricks_ns",
task_instance=None,
)
assert query_ids == original_query_ids # Verify that the input query_ids list is unchanged.
fake_adapter.emit.assert_not_called() # No events should be emitted
| {
"repo_id": "apache/airflow",
"file_path": "providers/databricks/tests/unit/databricks/utils/test_openlineage.py",
"license": "Apache License 2.0",
"lines": 1068,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/apache/kafka/src/airflow/providers/apache/kafka/queues/kafka.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import re
from typing import TYPE_CHECKING
from urllib.parse import urlparse
from airflow.providers.apache.kafka.triggers.await_message import AwaitMessageTrigger
from airflow.providers.common.messaging.providers.base_provider import BaseMessageQueueProvider
if TYPE_CHECKING:
from airflow.triggers.base import BaseEventTrigger
# [START queue_regexp]
QUEUE_REGEXP = r"^kafka://"
# [END queue_regexp]
class KafkaMessageQueueProvider(BaseMessageQueueProvider):
"""
Configuration for Apache Kafka integration with common-messaging.
[START kafka_message_queue_provider_description]
* It uses ``kafka`` as scheme for identifying Kafka queues.
* For parameter definitions take a look at :class:`~airflow.providers.apache.kafka.triggers.await_message.AwaitMessageTrigger`.
.. code-block:: python
from airflow.providers.common.messaging.triggers.msg_queue import MessageQueueTrigger
from airflow.sdk import Asset, AssetWatcher
trigger = MessageQueueTrigger(
scheme="kafka",
# Additional Kafka AwaitMessageTrigger parameters as needed
topics=["my_topic"],
apply_function="module.apply_function",
bootstrap_servers="localhost:9092",
)
asset = Asset("kafka_queue_asset", watchers=[AssetWatcher(name="kafka_watcher", trigger=trigger)])
For a complete example, see:
:mod:`tests.system.common.messaging.kafka_message_queue_trigger`
[END kafka_message_queue_provider_description]
"""
scheme = "kafka"
def queue_matches(self, queue: str) -> bool:
return bool(re.match(QUEUE_REGEXP, queue))
def trigger_class(self) -> type[BaseEventTrigger]:
return AwaitMessageTrigger # type: ignore[return-value]
def trigger_kwargs(self, queue: str, **kwargs) -> dict:
if "apply_function" not in kwargs:
raise ValueError("apply_function is required in KafkaMessageQueueProvider kwargs")
# [START extract_topics]
# Parse the queue URI
parsed = urlparse(queue)
# Extract topics (after host list)
# parsed.path starts with a '/', so strip it
raw_topics = parsed.path.lstrip("/")
topics = raw_topics.split(",") if raw_topics else []
# [END extract_topics]
if not topics and "topics" not in kwargs:
raise ValueError(
"topics is required in KafkaMessageQueueProvider kwargs or provide them in the queue URI"
)
return {} if "topics" in kwargs else {"topics": topics}
| {
"repo_id": "apache/airflow",
"file_path": "providers/apache/kafka/src/airflow/providers/apache/kafka/queues/kafka.py",
"license": "Apache License 2.0",
"lines": 69,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/apache/kafka/tests/unit/apache/kafka/queues/test_kafka.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import pytest
from airflow.providers.apache.kafka.triggers.await_message import AwaitMessageTrigger
pytest.importorskip("airflow.providers.common.messaging.providers.base_provider")
MOCK_KAFKA_TRIGGER_APPLY_FUNCTION = "mock_kafka_trigger_apply_function"
class TestKafkaMessageQueueProvider:
"""Tests for KafkaMessageQueueProvider."""
def setup_method(self):
"""Set up the test environment."""
from airflow.providers.apache.kafka.queues.kafka import KafkaMessageQueueProvider
self.provider = KafkaMessageQueueProvider()
def test_queue_create(self):
"""Test the creation of the KafkaMessageQueueProvider."""
from airflow.providers.common.messaging.providers.base_provider import BaseMessageQueueProvider
assert isinstance(self.provider, BaseMessageQueueProvider)
@pytest.mark.parametrize(
("queue_uri", "expected_result"),
[
pytest.param("kafka://localhost:9092/topic1", True, id="single_broker_single_topic"),
pytest.param(
"kafka://broker1:9092,broker2:9092/topic1,topic2", True, id="multiple_brokers_multiple_topics"
),
pytest.param("http://example.com", False, id="http_url"),
pytest.param("not-a-url", False, id="invalid_url"),
],
)
def test_queue_matches(self, queue_uri, expected_result):
"""Test the queue_matches method with various URLs."""
assert self.provider.queue_matches(queue_uri) == expected_result
@pytest.mark.parametrize(
("scheme", "expected_result"),
[
pytest.param("kafka", True, id="kafka_scheme"),
pytest.param("redis+pubsub", False, id="redis_scheme"),
pytest.param("sqs", False, id="sqs_scheme"),
pytest.param("unknown", False, id="unknown_scheme"),
],
)
def test_scheme_matches(self, scheme, expected_result):
"""Test the scheme_matches method with various schemes."""
assert self.provider.scheme_matches(scheme) == expected_result
def test_trigger_class(self):
"""Test the trigger_class method."""
assert self.provider.trigger_class() == AwaitMessageTrigger
@pytest.mark.parametrize(
("queue_uri", "extra_kwargs", "expected_result"),
[
pytest.param(
"kafka://broker:9092/topic1,topic2",
{"apply_function": MOCK_KAFKA_TRIGGER_APPLY_FUNCTION},
{"topics": ["topic1", "topic2"]},
id="topics_from_uri",
),
pytest.param(
"kafka://broker:9092/",
{"topics": ["topic1", "topic2"], "apply_function": MOCK_KAFKA_TRIGGER_APPLY_FUNCTION},
{},
id="topics_from_kwargs",
),
],
)
def test_trigger_kwargs_valid_cases(self, queue_uri, extra_kwargs, expected_result):
"""Test the trigger_kwargs method with valid parameters."""
kwargs = self.provider.trigger_kwargs(queue_uri, **extra_kwargs)
assert kwargs == expected_result
@pytest.mark.parametrize(
("queue_uri", "extra_kwargs", "expected_error", "error_match"),
[
pytest.param(
"kafka://broker:9092/topic1",
{},
ValueError,
"apply_function is required in KafkaMessageQueueProvider kwargs",
id="missing_apply_function",
),
pytest.param(
"kafka://broker:9092/",
{"apply_function": MOCK_KAFKA_TRIGGER_APPLY_FUNCTION},
ValueError,
"topics is required in KafkaMessageQueueProvider kwargs or provide them in the queue URI",
id="missing_topics",
),
],
)
def test_trigger_kwargs_error_cases(self, queue_uri, extra_kwargs, expected_error, error_match):
"""Test that trigger_kwargs raises appropriate errors with invalid parameters."""
with pytest.raises(expected_error, match=error_match):
self.provider.trigger_kwargs(queue_uri, **extra_kwargs)
| {
"repo_id": "apache/airflow",
"file_path": "providers/apache/kafka/tests/unit/apache/kafka/queues/test_kafka.py",
"license": "Apache License 2.0",
"lines": 104,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-core/tests/unit/api_fastapi/common/test_dagbag.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from unittest import mock
import pytest
from airflow.api_fastapi.app import purge_cached_app
from airflow.sdk import BaseOperator
from tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_db_serialized_dags
pytestmark = pytest.mark.db_test
class TestDagBagSingleton:
"""Tests to ensure that DagBag is instantiated only once per app lifecycle."""
dagbag_call_counter = {"count": 0}
def setup(self):
clear_db_runs()
clear_db_dags()
clear_db_serialized_dags()
def teardown(self):
clear_db_runs()
clear_db_dags()
clear_db_serialized_dags()
@pytest.fixture(autouse=True)
def patch_dagbag_once_before_app(self):
"""Patch DagBag once before app is created, and reset counter."""
self.dagbag_call_counter["count"] = 0
from airflow.models.dagbag import DBDagBag as RealDagBag
def factory(*args, **kwargs):
self.dagbag_call_counter["count"] += 1
return RealDagBag(*args, **kwargs)
with mock.patch("airflow.api_fastapi.common.dagbag.DBDagBag", side_effect=factory):
purge_cached_app()
yield
def test_dagbag_used_as_singleton_in_dependency(self, session, dag_maker, test_client):
"""
Ensure DagBag is created only once and reused across multiple API requests.
This test sets up a single DAG, patches the DagBag constructor to track instantiation count,
and verifies that two calls to the `/api/v2/dags/{dag_id}` endpoint both return 200 OK,
while only one DagBag instance is created.
This validates that the FastAPI DagBag dependency correctly resolves to app.state.dag_bag,
maintaining singleton behavior instead of creating a new DagBag per request.
"""
dag_id = "dagbag_singleton_test"
with dag_maker(dag_id=dag_id, session=session, serialized=True):
BaseOperator(task_id="test_task")
session.commit()
resp1 = test_client.get(f"/api/v2/dags/{dag_id}")
assert resp1.status_code == 200
resp2 = test_client.get(f"/api/v2/dags/{dag_id}")
assert resp2.status_code == 200
assert self.dagbag_call_counter["count"] == 1
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/tests/unit/api_fastapi/common/test_dagbag.py",
"license": "Apache License 2.0",
"lines": 63,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/google/src/airflow/providers/google/cloud/links/cloud_run.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from airflow.providers.google.cloud.links.base import BaseGoogleLink
class CloudRunJobLoggingLink(BaseGoogleLink):
"""Helper class for constructing Cloud Run Job Logging link."""
name = "Cloud Run Job Logging"
key = "log_uri"
format_str = "{log_uri}"
| {
"repo_id": "apache/airflow",
"file_path": "providers/google/src/airflow/providers/google/cloud/links/cloud_run.py",
"license": "Apache License 2.0",
"lines": 23,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/google/tests/unit/google/cloud/links/test_cloud_run.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from unittest import mock
import pytest
from airflow.providers.google.cloud.links.cloud_run import CloudRunJobLoggingLink
from airflow.providers.google.cloud.operators.cloud_run import CloudRunExecuteJobOperator
from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS
if AIRFLOW_V_3_0_PLUS:
from airflow.sdk.execution_time.comms import XComResult
TEST_LOG_URI = (
"https://console.cloud.google.com/run/jobs/logs?project=test-project®ion=test-region&job=test-job"
)
class TestCloudRunJobLoggingLink:
def test_class_attributes(self):
assert CloudRunJobLoggingLink.key == "log_uri"
assert CloudRunJobLoggingLink.name == "Cloud Run Job Logging"
assert CloudRunJobLoggingLink.format_str == "{log_uri}"
def test_persist(self):
mock_context = mock.MagicMock()
mock_context["ti"] = mock.MagicMock()
mock_context["task"] = mock.MagicMock()
CloudRunJobLoggingLink.persist(
context=mock_context,
log_uri=TEST_LOG_URI,
)
mock_context["ti"].xcom_push.assert_called_once_with(
key=CloudRunJobLoggingLink.key,
value={"log_uri": TEST_LOG_URI},
)
@pytest.mark.db_test
def test_get_link(self, create_task_instance_of_operator, session, mock_supervisor_comms):
link = CloudRunJobLoggingLink()
ti = create_task_instance_of_operator(
CloudRunExecuteJobOperator,
dag_id="test_cloud_run_job_logging_link_dag",
task_id="test_cloud_run_job_logging_link_task",
job_name="test-job",
project_id="test-project",
region="test-region",
)
session.add(ti)
session.commit()
link.persist(context={"ti": ti, "task": ti.task}, log_uri=TEST_LOG_URI)
if mock_supervisor_comms:
mock_supervisor_comms.send.return_value = XComResult(
key="key",
value={"log_uri": TEST_LOG_URI},
)
actual_url = link.get_link(operator=ti.task, ti_key=ti.key)
assert actual_url == TEST_LOG_URI
| {
"repo_id": "apache/airflow",
"file_path": "providers/google/tests/unit/google/cloud/links/test_cloud_run.py",
"license": "Apache License 2.0",
"lines": 66,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-core/src/airflow/api_fastapi/execution_api/versions/v2025_04_28.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from cadwyn import VersionChange, schema
from airflow.api_fastapi.execution_api.datamodels.taskinstance import (
TIDeferredStatePayload,
TIRetryStatePayload,
TISuccessStatePayload,
TITerminalStatePayload,
)
class AddRenderedMapIndexField(VersionChange):
"""Add the `rendered_map_index` field to payload models."""
description = __doc__
instructions_to_migrate_to_previous_version = (
schema(TITerminalStatePayload).field("rendered_map_index").didnt_exist,
schema(TISuccessStatePayload).field("rendered_map_index").didnt_exist,
schema(TIDeferredStatePayload).field("rendered_map_index").didnt_exist,
schema(TIRetryStatePayload).field("rendered_map_index").didnt_exist,
)
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/api_fastapi/execution_api/versions/v2025_04_28.py",
"license": "Apache License 2.0",
"lines": 33,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:task-sdk/src/airflow/sdk/execution_time/cache.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import datetime
import multiprocessing
from airflow.sdk import timezone
from airflow.sdk.configuration import conf
class SecretCache:
"""A static class to manage the global secret cache."""
__manager: multiprocessing.managers.SyncManager | None = None
_cache: dict[str, _CacheValue] | None = None
_ttl: datetime.timedelta
class NotPresentException(Exception):
"""Raised when a key is not present in the cache."""
class _CacheValue:
def __init__(self, value: str | None) -> None:
self.value = value
self.date = timezone.utcnow()
def is_expired(self, ttl: datetime.timedelta) -> bool:
return timezone.utcnow() - self.date > ttl
_VARIABLE_PREFIX = "__v_"
_CONNECTION_PREFIX = "__c_"
_TEAM_PATTERN = "_{}_"
@classmethod
def init(cls):
"""
Initialize the cache, provided the configuration allows it.
Safe to call several times.
"""
if cls._cache is not None:
return
use_cache = conf.getboolean(section="secrets", key="use_cache", fallback=False)
if not use_cache:
return
if cls.__manager is None:
# it is not really necessary to save the manager, but doing so allows to reuse it between tests,
# making them run a lot faster because this operation takes ~300ms each time
cls.__manager = multiprocessing.Manager()
cls._cache = cls.__manager.dict()
ttl_seconds = conf.getint(section="secrets", key="cache_ttl_seconds", fallback=15 * 60)
cls._ttl = datetime.timedelta(seconds=ttl_seconds)
@classmethod
def reset(cls):
"""Use for test purposes only."""
cls._cache = None
@classmethod
def get_variable(cls, key: str, team_name: str | None = None) -> str | None:
"""
Try to get the value associated with the key from the cache.
:param key: The key to look for.
:param team_name: The team name associated to the variable (if any).
:return: The saved value (which can be None) if present in cache and not expired,
a NotPresent exception otherwise.
"""
return cls._get(key, cls._VARIABLE_PREFIX, team_name=team_name)
@classmethod
def get_connection_uri(cls, conn_id: str, team_name: str | None = None) -> str:
"""
Try to get the uri associated with the conn_id from the cache.
:param conn_id: The connection id to look for.
:param team_name: The team name associated to the connection (if any).
:return: The saved uri if present in cache and not expired,
a NotPresent exception otherwise.
"""
val = cls._get(conn_id, cls._CONNECTION_PREFIX, team_name=team_name)
if val: # there shouldn't be any empty entries in the connections cache, but we enforce it here.
return val
raise cls.NotPresentException
@classmethod
def _get(cls, key: str, prefix: str, team_name: str | None = None) -> str | None:
if cls._cache is None:
# using an exception for misses allow to meaningfully cache None values
raise cls.NotPresentException
team = cls._TEAM_PATTERN.format(team_name) if team_name else ""
val = cls._cache.get(f"{prefix}{team}{key}")
if val and not val.is_expired(cls._ttl):
return val.value
raise cls.NotPresentException
@classmethod
def save_variable(cls, key: str, value: str | None, team_name: str | None = None):
"""Save the value for that key in the cache, if initialized."""
cls._save(key, value, cls._VARIABLE_PREFIX, team_name=team_name)
@classmethod
def save_connection_uri(cls, conn_id: str, uri: str, team_name: str | None = None):
"""Save the uri representation for that connection in the cache, if initialized."""
if uri is None:
# connections raise exceptions if not present, so we shouldn't have any None value to save.
return
cls._save(conn_id, uri, cls._CONNECTION_PREFIX, team_name=team_name)
@classmethod
def _save(cls, key: str, value: str | None, prefix: str, team_name: str | None = None):
if cls._cache is not None:
team = cls._TEAM_PATTERN.format(team_name) if team_name else ""
cls._cache[f"{prefix}{team}{key}"] = cls._CacheValue(value)
@classmethod
def invalidate_variable(cls, key: str, team_name: str | None = None):
"""Invalidate (actually removes) the value stored in the cache for that Variable."""
if cls._cache is not None:
team = cls._TEAM_PATTERN.format(team_name) if team_name else ""
# second arg ensures no exception if key is absent
cls._cache.pop(f"{cls._VARIABLE_PREFIX}{team}{key}", None)
| {
"repo_id": "apache/airflow",
"file_path": "task-sdk/src/airflow/sdk/execution_time/cache.py",
"license": "Apache License 2.0",
"lines": 116,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:scripts/ci/airflow_version_check.py | #! /usr/bin/env python
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# /// script
# requires-python = ">=3.10"
# dependencies = [
# "packaging>=25",
# "requests>=2.28.1",
# "rich>=13.6.0",
# ]
# ///
from __future__ import annotations
import re
import sys
from pathlib import Path
import requests
from packaging.version import Version, parse
from rich.console import Console
console = Console(color_system="standard", stderr=True, width=400)
def check_airflow_version(airflow_version: Version) -> tuple[str, bool]:
"""
Check if the given version is a valid Airflow version and latest.
airflow_version: The Airflow version to check.
returns: tuple containing the version and a boolean indicating if it's latest.
"""
latest = False
try:
response = requests.get(
"https://pypi.org/pypi/apache-airflow/json", headers={"User-Agent": "Python requests"}
)
response.raise_for_status()
data = response.json()
latest_version = Version(data["info"]["version"])
all_versions = sorted(
(parse(v) for v in data["releases"].keys()),
reverse=True,
)
if airflow_version not in all_versions:
console.print(f"[red]Version {airflow_version} is not a valid Airflow release version.")
console.print("[yellow]Available versions (latest 30 shown):")
console.print([str(v) for v in all_versions[:30]])
sys.exit(1)
if airflow_version == latest_version:
latest = True
# find requires-python = ">=VERSION" in pyproject.toml file of airflow
pyproject_toml_conntent = (Path(__file__).parents[2] / "pyproject.toml").read_text()
matched_version = re.search('requires-python = ">=([0-9]+.[0-9]+)', pyproject_toml_conntent)
if matched_version:
min_version = matched_version.group(1)
else:
console.print("[red]Error: requires-python version not found in pyproject.toml")
sys.exit(1)
constraints_url = (
f"https://raw.githubusercontent.com/apache/airflow/"
f"constraints-{airflow_version}/constraints-{min_version}.txt"
)
console.print(f"[bright_blue]Checking constraints file: {constraints_url}")
response = requests.head(constraints_url)
if response.status_code == 404:
console.print(
f"[red]Error: Constraints file not found for version {airflow_version}. "
f"Please set appropriate tag."
)
sys.exit(1)
response.raise_for_status()
console.print(f"[green]Constraints file found for version {airflow_version}, Python {min_version}")
return str(airflow_version), latest
except Exception as e:
console.print(f"[red]Error fetching latest version: {e}")
sys.exit(1)
def normalize_version(version: str) -> Version:
try:
return Version(version)
except Exception as e:
console.print(f"[red]Error normalizing version: {e}")
sys.exit(1)
def print_both_outputs(output: str):
print(output)
console.print(output)
if __name__ == "__main__":
if len(sys.argv) < 2:
console.print("[yellow]Usage: python normalize_airflow_version.py <version>")
sys.exit(1)
version = sys.argv[1]
parsed_version = normalize_version(version)
actual_version, is_latest = check_airflow_version(parsed_version)
print_both_outputs(f"airflowVersion={actual_version}")
skip_latest = "false" if is_latest else "true"
print_both_outputs(f"skipLatest={skip_latest}")
| {
"repo_id": "apache/airflow",
"file_path": "scripts/ci/airflow_version_check.py",
"license": "Apache License 2.0",
"lines": 103,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/microsoft/azure/tests/system/microsoft/azure/example_powerbi_dataset_list.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import os
from datetime import datetime
from airflow import DAG, settings
try:
from airflow.sdk import task
except ImportError:
# Airflow 2 path
from airflow.decorators import task # type: ignore[attr-defined,no-redef]
from airflow.models import Connection
from airflow.models.baseoperator import chain
from airflow.providers.microsoft.azure.operators.powerbi import PowerBIDatasetListOperator
DAG_ID = "example_powerbi_dataset_list"
CONN_ID = "powerbi_default"
# Before running this system test, you should set following environment variables:
GROUP_ID = os.environ.get("GROUP_ID", "None")
CLIENT_ID = os.environ.get("CLIENT_ID", None)
CLIENT_SECRET = os.environ.get("CLIENT_SECRET", None)
TENANT_ID = os.environ.get("TENANT_ID", None)
@task
def create_connection(conn_id_name: str):
conn = Connection(
conn_id=conn_id_name,
conn_type="powerbi",
login=CLIENT_ID,
password=CLIENT_SECRET,
extra={"tenant_id": TENANT_ID},
)
if settings.Session is None:
raise RuntimeError("Session not configured. Call configure_orm() first.")
session = settings.Session()
session.add(conn)
session.commit()
with DAG(
dag_id=DAG_ID,
start_date=datetime(2021, 1, 1),
schedule=None,
tags=["example"],
) as dag:
set_up_connection = create_connection(CONN_ID)
# [START howto_operator_powerbi_dataset_list_async]
get_powerbi_dataset_list = PowerBIDatasetListOperator(
conn_id="powerbi_default",
task_id="get_powerbi_dataset_list",
group_id=GROUP_ID,
timeout=120,
)
# [END howto_operator_powerbi_dataset_list_async]
chain(
# TEST SETUP
set_up_connection,
# TEST BODY
get_powerbi_dataset_list,
)
from tests_common.test_utils.watcher import watcher
# This test needs watcher in order to properly mark success/failure
# when "tearDown" task with trigger rule is part of the DAG
list(dag.tasks) >> watcher()
from tests_common.test_utils.system_tests import get_test_run # noqa: E402
# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest)
test_run = get_test_run(dag)
| {
"repo_id": "apache/airflow",
"file_path": "providers/microsoft/azure/tests/system/microsoft/azure/example_powerbi_dataset_list.py",
"license": "Apache License 2.0",
"lines": 77,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.