sample_id stringlengths 21 196 | text stringlengths 105 936k | metadata dict | category stringclasses 6
values |
|---|---|---|---|
apache/airflow:providers/cncf/kubernetes/tests/unit/cncf/kubernetes/utils/test_container.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from types import SimpleNamespace
from unittest.mock import MagicMock
import pytest
from airflow.providers.cncf.kubernetes.utils.container import (
container_is_running,
container_is_succeeded,
container_is_terminated,
)
@pytest.mark.parametrize(
("container_state", "expected_is_terminated"),
[("waiting", False), ("running", False), ("terminated", True)],
)
def test_container_is_terminated_with_waiting_state(container_state, expected_is_terminated):
container_status = MagicMock()
container_status.configure_mock(
**{
"name": "base",
"state.waiting": True if container_state == "waiting" else None,
"state.running": True if container_state == "running" else None,
"state.terminated": True if container_state == "terminated" else None,
}
)
pod_info = MagicMock()
pod_info.status.container_statuses = [container_status]
assert container_is_terminated(pod_info, "base") == expected_is_terminated
def params_for_test_container_is_running():
"""The `container_is_running` method is designed to handle an assortment of bad objects
returned from `read_pod`. E.g. a None object, an object `e` such that `e.status` is None,
an object `e` such that `e.status.container_statuses` is None, and so on. This function
emits params used in `test_container_is_running` to verify this behavior.
We create mock classes not derived from MagicMock because with an instance `e` of MagicMock,
tests like `e.hello is not None` are always True.
"""
class RemotePodMock:
pass
class ContainerStatusMock:
def __init__(self, name):
self.name = name
def remote_pod(running=None, not_running=None):
e = RemotePodMock()
e.status = RemotePodMock()
e.status.container_statuses = []
e.status.init_container_statuses = []
for r in not_running or []:
e.status.container_statuses.append(container(r, False))
for r in running or []:
e.status.container_statuses.append(container(r, True))
return e
def container(name, running):
c = ContainerStatusMock(name)
c.state = RemotePodMock()
c.state.running = {"a": "b"} if running else None
return c
pod_mock_list = []
pod_mock_list.append(pytest.param(None, False, id="None remote_pod"))
p = RemotePodMock()
p.status = None
pod_mock_list.append(pytest.param(p, False, id="None remote_pod.status"))
p = RemotePodMock()
p.status = RemotePodMock()
p.status.container_statuses = []
p.status.init_container_statuses = []
pod_mock_list.append(pytest.param(p, False, id="empty remote_pod.status.container_statuses"))
pod_mock_list.append(pytest.param(remote_pod(), False, id="filter empty"))
pod_mock_list.append(pytest.param(remote_pod(None, ["base"]), False, id="filter 0 running"))
pod_mock_list.append(pytest.param(remote_pod(["hello"], ["base"]), False, id="filter 1 not running"))
pod_mock_list.append(pytest.param(remote_pod(["base"], ["hello"]), True, id="filter 1 running"))
return pod_mock_list
@pytest.mark.parametrize(("remote_pod", "result"), params_for_test_container_is_running())
def test_container_is_running(remote_pod, result):
"""The `container_is_running` function is designed to handle an assortment of bad objects
returned from `read_pod`. E.g. a None object, an object `e` such that `e.status` is None,
an object `e` such that `e.status.container_statuses` is None, and so on. This test
verifies the expected behavior."""
assert container_is_running(remote_pod, "base") is result
def params_for_test_container_is_succeeded():
"""The `container_is_succeeded` method is designed to handle an assortment of bad objects
returned from `read_pod`. E.g. a None object, an object `e` such that `e.status` is None,
an object `e` such that `e.status.container_statuses` is None, and so on. This function
emits params used in `test_container_is_succeeded` to verify this behavior.
We create mock classes not derived from MagicMock because with an instance `e` of MagicMock,
tests like `e.hello is not None` are always True.
"""
class RemotePodMock:
pass
class ContainerStatusMock:
def __init__(self, name):
self.name = name
def remote_pod(succeeded=None, not_succeeded=None):
e = RemotePodMock()
e.status = RemotePodMock()
e.status.container_statuses = []
e.status.init_container_statuses = []
for r in not_succeeded or []:
e.status.container_statuses.append(container(r, False))
for r in succeeded or []:
e.status.container_statuses.append(container(r, True))
return e
def container(name, succeeded):
c = ContainerStatusMock(name)
c.state = RemotePodMock()
c.state.terminated = SimpleNamespace(**{"exit_code": 0}) if succeeded else None
return c
pod_mock_list = []
pod_mock_list.append(pytest.param(None, False, id="None remote_pod"))
p = RemotePodMock()
p.status = None
pod_mock_list.append(pytest.param(p, False, id="None remote_pod.status"))
p = RemotePodMock()
p.status = RemotePodMock()
p.status.container_statuses = None
p.status.init_container_statuses = []
pod_mock_list.append(pytest.param(p, False, id="None remote_pod.status.container_statuses"))
p = RemotePodMock()
p.status = RemotePodMock()
p.status.container_statuses = []
p.status.init_container_statuses = []
pod_mock_list.append(pytest.param(p, False, id="empty remote_pod.status.container_statuses"))
pod_mock_list.append(pytest.param(remote_pod(), False, id="filter empty"))
pod_mock_list.append(pytest.param(remote_pod(None, ["base"]), False, id="filter 0 succeeded"))
pod_mock_list.append(pytest.param(remote_pod(["hello"], ["base"]), False, id="filter 1 not succeeded"))
pod_mock_list.append(pytest.param(remote_pod(["base"], ["hello"]), True, id="filter 1 succeeded"))
return pod_mock_list
@pytest.mark.parametrize(("remote_pod", "result"), params_for_test_container_is_succeeded())
def test_container_is_succeeded(remote_pod, result):
"""The `container_is_succeeded` function is designed to handle an assortment of bad objects
returned from `read_pod`. E.g. a None object, an object `e` such that `e.status` is None,
an object `e` such that `e.status.container_statuses` is None, and so on. This test
verifies the expected behavior."""
assert container_is_succeeded(remote_pod, "base") is result
| {
"repo_id": "apache/airflow",
"file_path": "providers/cncf/kubernetes/tests/unit/cncf/kubernetes/utils/test_container.py",
"license": "Apache License 2.0",
"lines": 147,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-core/src/airflow/models/callback.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from datetime import datetime
from enum import Enum
from importlib import import_module
from typing import TYPE_CHECKING, Any, Protocol, runtime_checkable
from uuid import UUID
import structlog
import uuid6
from sqlalchemy import ForeignKey, Integer, String, Text, Uuid
from sqlalchemy.orm import Mapped, mapped_column, relationship
from airflow._shared.observability.metrics.stats import Stats
from airflow._shared.timezones import timezone
from airflow.executors.workloads import BaseWorkload
from airflow.executors.workloads.callback import CallbackFetchMethod
from airflow.models import Base
from airflow.utils.sqlalchemy import ExtendedJSON, UtcDateTime
from airflow.utils.state import CallbackState
CallbackKey = str # Callback keys are str(UUID)
if TYPE_CHECKING:
from sqlalchemy.orm import Session
from airflow.callbacks.callback_requests import CallbackRequest
from airflow.triggers.base import TriggerEvent
log = structlog.get_logger(__name__)
ACTIVE_STATES = frozenset((CallbackState.PENDING, CallbackState.QUEUED, CallbackState.RUNNING))
TERMINAL_STATES = frozenset((CallbackState.SUCCESS, CallbackState.FAILED))
class CallbackType(str, Enum):
"""
Types of Callbacks.
Used for figuring out what class to instantiate during deserialization.
"""
TRIGGERER = "triggerer"
EXECUTOR = "executor"
DAG_PROCESSOR = "dag_processor"
class CallbackDefinitionProtocol(Protocol):
"""Protocol for TaskSDK Callback definition."""
def serialize(self) -> dict[str, Any]:
"""Serialize to a dictionary."""
...
@runtime_checkable
class ImportPathCallbackDefProtocol(CallbackDefinitionProtocol, Protocol):
"""Protocol for callbacks that use the import path fetch method."""
path: str
kwargs: dict
@runtime_checkable
class ImportPathExecutorCallbackDefProtocol(ImportPathCallbackDefProtocol, Protocol):
"""Protocol for callbacks that use the import path fetch method and have an executor attribute to specify the executor to run them on."""
executor: str | None
class Callback(Base, BaseWorkload):
"""Base class for callbacks."""
__tablename__ = "callback"
id: Mapped[UUID] = mapped_column(Uuid(), primary_key=True, default=uuid6.uuid7)
# This is used by SQLAlchemy to be able to deserialize DB rows to subclasses
__mapper_args__ = {
"polymorphic_identity": "callback",
"polymorphic_on": "type",
}
type: Mapped[str] = mapped_column(String(20), nullable=False)
# Method used to fetch the callback, of type: CallbackFetchMethod
fetch_method: Mapped[str] = mapped_column(String(20), nullable=False)
# Used by subclasses to store information about how to run the callback
data: Mapped[dict] = mapped_column(ExtendedJSON, nullable=False)
# State of the Callback of type: CallbackState. Can be null for instances of DagProcessorCallback.
state: Mapped[str | None] = mapped_column(String(10))
# Return value of the callback if successful, otherwise exception details
output: Mapped[str | None] = mapped_column(Text, nullable=True)
# Used for prioritization. Higher weight -> higher priority
priority_weight: Mapped[int] = mapped_column(Integer, nullable=False)
# Creation time of the callback
created_at: Mapped[datetime] = mapped_column(UtcDateTime, default=timezone.utcnow, nullable=False)
# Used for callbacks of type CallbackType.TRIGGERER
trigger_id: Mapped[int] = mapped_column(Integer, ForeignKey("trigger.id"), nullable=True)
trigger = relationship("Trigger", back_populates="callback", uselist=False)
def __init__(self, priority_weight: int = 1, prefix: str = "", **kwargs):
"""
Initialize a Callback. This is the base class so it shouldn't usually need to be initialized.
:param priority_weight: Priority for callback execution (higher value -> higher priority)
:param prefix: Optional prefix for metric names
:param kwargs: Additional data emitted in metric tags
"""
self.state = CallbackState.SCHEDULED
self.priority_weight = priority_weight
self.data = kwargs # kwargs can be used to include additional info in metric tags
if prefix:
self.data["prefix"] = prefix
def queue(self):
self.state = CallbackState.QUEUED
def get_metric_info(self, status: CallbackState, result: Any) -> dict:
tags = {"result": result, **self.data}
tags.pop("prefix", None)
if "kwargs" in tags:
# Remove the context (if exists) to keep the tags simple
tags["kwargs"] = {k: v for k, v in tags["kwargs"].items() if k != "context"}
prefix = self.data.get("prefix", "")
name = f"{prefix}.callback_{status}" if prefix else f"callback_{status}"
return {"stat": name, "tags": tags}
def get_dag_id(self) -> str | None:
"""Return the DAG ID for scheduler routing."""
return self.data.get("dag_id")
def get_executor_name(self) -> str | None:
"""Return the executor name for scheduler routing."""
return self.data.get("executor")
@staticmethod
def create_from_sdk_def(callback_def: CallbackDefinitionProtocol, **kwargs) -> Callback:
# Cannot check actual type using isinstance() because that would require SDK import
match type(callback_def).__name__:
case "AsyncCallback":
if TYPE_CHECKING:
assert isinstance(callback_def, ImportPathCallbackDefProtocol)
return TriggererCallback(callback_def, **kwargs)
case "SyncCallback":
if TYPE_CHECKING:
assert isinstance(callback_def, ImportPathExecutorCallbackDefProtocol)
return ExecutorCallback(callback_def, fetch_method=CallbackFetchMethod.IMPORT_PATH, **kwargs)
case _:
raise ValueError(f"Cannot handle Callback of type {type(callback_def)}")
class TriggererCallback(Callback):
"""Callbacks that run on the Triggerer (must be async)."""
__mapper_args__ = {"polymorphic_identity": CallbackType.TRIGGERER}
def __init__(self, callback_def: ImportPathCallbackDefProtocol, **kwargs):
"""
Initialize a TriggererCallback from a callback definition.
:param callback_def: Callback definition with path and kwargs
:param kwargs: Passed to parent Callback.__init__ (see base class for details)
"""
super().__init__(**kwargs)
self.fetch_method = CallbackFetchMethod.IMPORT_PATH
self.data |= callback_def.serialize()
def __repr__(self):
return f"{self.data['path']}({self.data['kwargs'] or ''}) on a triggerer"
def queue(self):
from airflow.models.trigger import Trigger
from airflow.triggers.callback import CallbackTrigger
self.trigger = Trigger.from_object(
CallbackTrigger(
callback_path=self.data["path"],
callback_kwargs=self.data["kwargs"],
)
)
super().queue()
def handle_event(self, event: TriggerEvent, session: Session):
from airflow.triggers.callback import PAYLOAD_BODY_KEY, PAYLOAD_STATUS_KEY
if (status := event.payload.get(PAYLOAD_STATUS_KEY)) and status in (ACTIVE_STATES | TERMINAL_STATES):
self.state = status
if status in TERMINAL_STATES:
self.trigger = None
self.output = event.payload.get(PAYLOAD_BODY_KEY)
Stats.incr(**self.get_metric_info(status, self.output))
session.add(self)
else:
log.error("Unexpected event received: %s", event.payload)
class ExecutorCallback(Callback):
"""Callbacks that run on the executor."""
__mapper_args__ = {"polymorphic_identity": CallbackType.EXECUTOR}
def __init__(
self, callback_def: ImportPathExecutorCallbackDefProtocol, fetch_method: CallbackFetchMethod, **kwargs
):
"""
Initialize an ExecutorCallback from a callback definition and fetch method.
:param callback_def: Callback definition with path, kwargs, and executor
:param fetch_method: Method to fetch the callback at runtime
:param kwargs: Passed to parent Callback.__init__ (see base class for details)
"""
super().__init__(**kwargs)
self.fetch_method = fetch_method
self.data |= callback_def.serialize()
def __repr__(self):
return f"{self.data['path']}({self.data['kwargs'] or ''}) on {self.data.get('executor', 'default')} executor"
class DagProcessorCallback(Callback):
"""Used to store Dag Processor's callback requests in the DB."""
__mapper_args__ = {"polymorphic_identity": CallbackType.DAG_PROCESSOR}
def __init__(self, priority_weight: int, callback: CallbackRequest):
"""Initialize a DagProcessorCallback from a callback request."""
super().__init__(priority_weight=priority_weight)
self.fetch_method = CallbackFetchMethod.DAG_ATTRIBUTE
self.state = None
self.data |= {"req_class": callback.__class__.__name__, "req_data": callback.to_json()}
def get_callback_request(self) -> CallbackRequest:
module = import_module("airflow.callbacks.callback_requests")
callback_request_class = getattr(module, self.data["req_class"])
# Get the function (from the instance) that we need to call
from_json = getattr(callback_request_class, "from_json")
return from_json(self.data["req_data"])
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/models/callback.py",
"license": "Apache License 2.0",
"lines": 198,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:airflow-core/tests/unit/models/test_callback.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import pytest
from sqlalchemy import select
from airflow.models import Trigger
from airflow.models.callback import (
Callback,
CallbackFetchMethod,
CallbackState,
ExecutorCallback,
TriggererCallback,
)
from airflow.sdk.definitions.callback import AsyncCallback, SyncCallback
from airflow.triggers.base import TriggerEvent
from airflow.triggers.callback import PAYLOAD_BODY_KEY, PAYLOAD_STATUS_KEY
from airflow.utils.session import create_session
from tests_common.test_utils.db import clear_db_callbacks
pytestmark = [pytest.mark.db_test]
async def async_callback():
"""Empty awaitable callable used for unit tests."""
pass
def sync_callback():
"""Empty (sync) callable used for unit tests"""
pass
TEST_CALLBACK_KWARGS = {"arg1": "value1"}
TEST_ASYNC_CALLBACK = AsyncCallback(async_callback, kwargs=TEST_CALLBACK_KWARGS)
TEST_SYNC_CALLBACK = SyncCallback(sync_callback, kwargs=TEST_CALLBACK_KWARGS)
TEST_DAG_ID = "test_dag"
@pytest.fixture
def session():
"""Fixture that provides a SQLAlchemy session"""
with create_session() as session:
yield session
@pytest.fixture(scope="module", autouse=True)
def clean_db(request):
yield
clear_db_callbacks()
class TestCallback:
@pytest.mark.parametrize(
("callback_def", "expected_cb_instance"),
[
pytest.param(
TEST_ASYNC_CALLBACK, TriggererCallback(callback_def=TEST_ASYNC_CALLBACK), id="triggerer"
),
pytest.param(
TEST_SYNC_CALLBACK,
ExecutorCallback(
callback_def=TEST_SYNC_CALLBACK, fetch_method=CallbackFetchMethod.IMPORT_PATH
),
id="executor",
),
],
)
def test_create_from_sdk_def(self, callback_def, expected_cb_instance):
returned_cb = Callback.create_from_sdk_def(callback_def)
assert isinstance(returned_cb, type(expected_cb_instance))
assert returned_cb.data == expected_cb_instance.data
def test_create_from_sdk_def_unknown_type(self):
"""Test that unknown callback type raises ValueError"""
class UnknownCallback:
pass
unknown_callback = UnknownCallback()
with pytest.raises(ValueError, match="Cannot handle Callback of type"):
Callback.create_from_sdk_def(unknown_callback)
def test_get_metric_info(self):
callback = TriggererCallback(TEST_ASYNC_CALLBACK, prefix="deadline_alerts", dag_id=TEST_DAG_ID)
callback.data["kwargs"] = {"context": {"dag_id": TEST_DAG_ID}, "email": "test@example.com"}
metric_info = callback.get_metric_info(CallbackState.SUCCESS, "0")
assert metric_info["stat"] == "deadline_alerts.callback_success"
assert metric_info["tags"] == {
"result": "0",
"path": TEST_ASYNC_CALLBACK.path,
"kwargs": {"email": "test@example.com"},
"dag_id": TEST_DAG_ID,
}
class TestTriggererCallback:
def test_polymorphic_serde(self, session):
"""Test that TriggererCallback can be serialized and deserialized"""
callback = TriggererCallback(TEST_ASYNC_CALLBACK)
session.add(callback)
session.commit()
retrieved = session.scalar(select(Callback).where(Callback.id == callback.id))
assert isinstance(retrieved, TriggererCallback)
assert retrieved.fetch_method == CallbackFetchMethod.IMPORT_PATH
assert retrieved.data == TEST_ASYNC_CALLBACK.serialize()
assert retrieved.state == CallbackState.SCHEDULED.value
assert retrieved.output is None
assert retrieved.priority_weight == 1
assert retrieved.created_at is not None
assert retrieved.trigger_id is None
def test_queue(self, session):
callback = TriggererCallback(TEST_ASYNC_CALLBACK)
assert callback.state == CallbackState.SCHEDULED
assert callback.trigger is None
callback.queue()
assert isinstance(callback.trigger, Trigger)
assert callback.trigger.kwargs["callback_path"] == TEST_ASYNC_CALLBACK.path
assert callback.trigger.kwargs["callback_kwargs"] == TEST_ASYNC_CALLBACK.kwargs
assert callback.state == CallbackState.QUEUED
@pytest.mark.parametrize(
("event", "terminal_state"),
[
pytest.param(
TriggerEvent({PAYLOAD_STATUS_KEY: CallbackState.SUCCESS, PAYLOAD_BODY_KEY: "test_result"}),
True,
id="success_event",
),
pytest.param(
TriggerEvent({PAYLOAD_STATUS_KEY: CallbackState.FAILED, PAYLOAD_BODY_KEY: "RuntimeError"}),
True,
id="failed_event",
),
pytest.param(
TriggerEvent({PAYLOAD_STATUS_KEY: CallbackState.RUNNING}),
False,
id="running_event",
),
pytest.param(
TriggerEvent({PAYLOAD_STATUS_KEY: CallbackState.QUEUED, PAYLOAD_BODY_KEY: ""}),
False,
id="invalid_event",
),
pytest.param(TriggerEvent({PAYLOAD_STATUS_KEY: "unknown_state"}), False, id="unknown_event"),
],
)
def test_handle_event(self, session, event, terminal_state):
callback = TriggererCallback(TEST_ASYNC_CALLBACK)
callback.queue()
callback.handle_event(event, session)
status = event.payload[PAYLOAD_STATUS_KEY]
if status in set(CallbackState):
assert callback.state == status
else:
assert callback.state == CallbackState.QUEUED
if terminal_state:
assert callback.trigger is None
assert callback.output == event.payload[PAYLOAD_BODY_KEY]
class TestExecutorCallback:
def test_polymorphic_serde(self, session):
"""Test that ExecutorCallback can be serialized and deserialized"""
callback = ExecutorCallback(TEST_SYNC_CALLBACK, fetch_method=CallbackFetchMethod.IMPORT_PATH)
session.add(callback)
session.commit()
retrieved = session.scalar(select(Callback).where(Callback.id == callback.id))
assert isinstance(retrieved, ExecutorCallback)
assert retrieved.fetch_method == CallbackFetchMethod.IMPORT_PATH
assert retrieved.data == TEST_SYNC_CALLBACK.serialize()
assert retrieved.state == CallbackState.SCHEDULED.value
assert retrieved.output is None
assert retrieved.priority_weight == 1
assert retrieved.created_at is not None
assert retrieved.trigger_id is None
def test_queue(self):
callback = ExecutorCallback(TEST_SYNC_CALLBACK, fetch_method=CallbackFetchMethod.DAG_ATTRIBUTE)
assert callback.state == CallbackState.SCHEDULED
callback.queue()
assert callback.state == CallbackState.QUEUED
# Note: class DagProcessorCallback is tested in airflow-core/tests/unit/dag_processing/test_manager.py
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/tests/unit/models/test_callback.py",
"license": "Apache License 2.0",
"lines": 173,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/common/compat/src/airflow/providers/common/compat/_compat_utils.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Reusable utilities for creating compatibility layers with fallback imports.
This module provides the core machinery used by sdk.py and standard/* modules
to handle import fallbacks between Airflow 3.x and 2.x.
"""
from __future__ import annotations
import importlib
def create_module_getattr(
import_map: dict[str, str | tuple[str, ...]],
module_map: dict[str, str | tuple[str, ...]] | None = None,
rename_map: dict[str, tuple[str, str, str]] | None = None,
):
"""
Create a __getattr__ function for lazy imports with fallback support.
:param import_map: Dictionary mapping attribute names to module paths (single or tuple for fallback)
:param module_map: Dictionary mapping module names to module paths (single or tuple for fallback)
:param rename_map: Dictionary mapping new names to (new_path, old_path, old_name) tuples
:return: A __getattr__ function that can be assigned at module level
"""
module_map = module_map or {}
rename_map = rename_map or {}
def __getattr__(name: str):
# Check renamed imports first
if name in rename_map:
new_path, old_path, old_name = rename_map[name]
rename_error: ImportError | ModuleNotFoundError | AttributeError | None = None
# Try new path with new name first (Airflow 3.x)
try:
module = __import__(new_path, fromlist=[name])
return getattr(module, name)
except (ImportError, ModuleNotFoundError, AttributeError) as e:
rename_error = e
# Fall back to old path with old name (Airflow 2.x)
try:
module = __import__(old_path, fromlist=[old_name])
return getattr(module, old_name)
except (ImportError, ModuleNotFoundError, AttributeError):
if rename_error:
raise ImportError(
f"Could not import {name!r} from {new_path!r} or {old_name!r} from {old_path!r}"
) from rename_error
raise
# Check module imports
if name in module_map:
value = module_map[name]
paths = value if isinstance(value, tuple) else (value,)
module_error: ImportError | ModuleNotFoundError | None = None
for module_path in paths:
try:
return importlib.import_module(module_path)
except (ImportError, ModuleNotFoundError) as e:
module_error = e
continue
if module_error:
raise ImportError(f"Could not import module {name!r} from any of: {paths}") from module_error
# Check regular imports
if name in import_map:
value = import_map[name]
paths = value if isinstance(value, tuple) else (value,)
attr_error: ImportError | ModuleNotFoundError | AttributeError | None = None
for module_path in paths:
try:
module = __import__(module_path, fromlist=[name])
return getattr(module, name)
except (ImportError, ModuleNotFoundError, AttributeError) as e:
attr_error = e
continue
if attr_error:
raise ImportError(f"Could not import {name!r} from any of: {paths}") from attr_error
raise AttributeError(f"module has no attribute {name!r}")
return __getattr__
| {
"repo_id": "apache/airflow",
"file_path": "providers/common/compat/src/airflow/providers/common/compat/_compat_utils.py",
"license": "Apache License 2.0",
"lines": 87,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/common/compat/src/airflow/providers/common/compat/sdk.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Airflow compatibility imports for seamless migration from Airflow 2 to Airflow 3.
This module provides lazy imports that automatically try Airflow 3 paths first,
then fall back to Airflow 2 paths, enabling code to work across both versions.
"""
from __future__ import annotations
from typing import TYPE_CHECKING
from airflow.providers.common.compat.version_compat import AIRFLOW_V_3_0_PLUS
if TYPE_CHECKING:
import airflow.sdk.io as io # noqa: F401
import airflow.sdk.timezone as timezone # noqa: F401
from airflow.models.xcom import XCOM_RETURN_KEY as XCOM_RETURN_KEY
from airflow.sdk import (
DAG as DAG,
Asset as Asset,
AssetAlias as AssetAlias,
AssetAll as AssetAll,
AssetAny as AssetAny,
BaseHook as BaseHook,
BaseNotifier as BaseNotifier,
BaseOperator as BaseOperator,
BaseOperatorLink as BaseOperatorLink,
BaseSensorOperator as BaseSensorOperator,
Connection as Connection,
Context as Context,
DagRunState as DagRunState,
EdgeModifier as EdgeModifier,
Label as Label,
Metadata as Metadata,
ObjectStoragePath as ObjectStoragePath,
Param as Param,
PokeReturnValue as PokeReturnValue,
TaskGroup as TaskGroup,
TaskInstanceState as TaskInstanceState,
TriggerRule as TriggerRule,
Variable as Variable,
WeightRule as WeightRule,
XComArg as XComArg,
chain as chain,
chain_linear as chain_linear,
cross_downstream as cross_downstream,
dag as dag,
get_current_context as get_current_context,
get_parsing_context as get_parsing_context,
setup as setup,
task as task,
task_group as task_group,
teardown as teardown,
)
from airflow.sdk._shared.listeners import hookimpl as hookimpl
from airflow.sdk._shared.observability.metrics.stats import Stats as Stats
from airflow.sdk.bases.decorator import (
DecoratedMappedOperator as DecoratedMappedOperator,
DecoratedOperator as DecoratedOperator,
TaskDecorator as TaskDecorator,
get_unique_task_id as get_unique_task_id,
task_decorator_factory as task_decorator_factory,
)
from airflow.sdk.bases.sensor import poke_mode_only as poke_mode_only
from airflow.sdk.configuration import conf as conf
from airflow.sdk.definitions.context import context_merge as context_merge
from airflow.sdk.definitions.mappedoperator import MappedOperator as MappedOperator
from airflow.sdk.definitions.template import literal as literal
from airflow.sdk.exceptions import (
AirflowConfigException as AirflowConfigException,
AirflowException as AirflowException,
AirflowFailException as AirflowFailException,
AirflowNotFoundException as AirflowNotFoundException,
AirflowOptionalProviderFeatureException as AirflowOptionalProviderFeatureException,
AirflowSensorTimeout as AirflowSensorTimeout,
AirflowSkipException as AirflowSkipException,
AirflowTaskTimeout as AirflowTaskTimeout,
ParamValidationError as ParamValidationError,
TaskDeferred as TaskDeferred,
XComNotFound as XComNotFound,
)
from airflow.sdk.lineage import (
HookLineage as HookLineage,
HookLineageCollector as HookLineageCollector,
HookLineageReader as HookLineageReader,
NoOpCollector as NoOpCollector,
get_hook_lineage_collector as get_hook_lineage_collector,
)
from airflow.sdk.listener import get_listener_manager as get_listener_manager
from airflow.sdk.log import redact as redact
from airflow.sdk.plugins_manager import AirflowPlugin as AirflowPlugin
# Airflow 3-only exceptions (conditionally imported)
if AIRFLOW_V_3_0_PLUS:
from airflow.sdk.exceptions import (
DagRunTriggerException as DagRunTriggerException,
DownstreamTasksSkipped as DownstreamTasksSkipped,
)
from airflow.sdk.execution_time.context import (
AIRFLOW_VAR_NAME_FORMAT_MAPPING as AIRFLOW_VAR_NAME_FORMAT_MAPPING,
context_to_airflow_vars as context_to_airflow_vars,
)
from airflow.sdk.execution_time.timeout import timeout as timeout
from airflow.sdk.execution_time.xcom import XCom as XCom
from airflow.sdk.types import TaskInstanceKey as TaskInstanceKey
from airflow.providers.common.compat._compat_utils import create_module_getattr
# Rename map for classes that changed names between Airflow 2.x and 3.x
# Format: new_name -> (new_path, old_path, old_name)
_RENAME_MAP: dict[str, tuple[str, str, str]] = {
# Assets: Dataset -> Asset rename in Airflow 3.0
"Asset": ("airflow.sdk", "airflow.datasets", "Dataset"),
"AssetAlias": ("airflow.sdk", "airflow.datasets", "DatasetAlias"),
"AssetAll": ("airflow.sdk", "airflow.datasets", "DatasetAll"),
"AssetAny": ("airflow.sdk", "airflow.datasets", "DatasetAny"),
}
# Airflow 3-only renames (not available in Airflow 2)
_AIRFLOW_3_ONLY_RENAMES: dict[str, tuple[str, str, str]] = {}
# Import map for classes/functions/constants
# Format: class_name -> module_path(s)
# - str: single module path (no fallback)
# - tuple[str, ...]: multiple module paths (try in order, newest first)
_IMPORT_MAP: dict[str, str | tuple[str, ...]] = {
# ============================================================================
# Hooks
# ============================================================================
"BaseHook": ("airflow.sdk", "airflow.hooks.base"),
# ============================================================================
# Sensors
# ============================================================================
"BaseSensorOperator": ("airflow.sdk", "airflow.sensors.base"),
"PokeReturnValue": ("airflow.sdk", "airflow.sensors.base"),
"poke_mode_only": ("airflow.sdk.bases.sensor", "airflow.sensors.base"),
# ============================================================================
# Operators
# ============================================================================
"BaseOperator": ("airflow.sdk", "airflow.models.baseoperator"),
# ============================================================================
# Decorators
# ============================================================================
"task": ("airflow.sdk", "airflow.decorators"),
"dag": ("airflow.sdk", "airflow.decorators"),
"task_group": ("airflow.sdk", "airflow.decorators"),
"setup": ("airflow.sdk", "airflow.decorators"),
"teardown": ("airflow.sdk", "airflow.decorators"),
"TaskDecorator": ("airflow.sdk.bases.decorator", "airflow.decorators"),
"task_decorator_factory": ("airflow.sdk.bases.decorator", "airflow.decorators.base"),
"get_unique_task_id": ("airflow.sdk.bases.decorator", "airflow.decorators.base"),
# ============================================================================
# Models
# ============================================================================
"Connection": ("airflow.sdk", "airflow.models.connection"),
"Variable": ("airflow.sdk", "airflow.models.variable"),
"XCom": ("airflow.sdk.execution_time.xcom", "airflow.models.xcom"),
"DAG": ("airflow.sdk", "airflow.models.dag"),
"Param": ("airflow.sdk", "airflow.models.param"),
"XComArg": ("airflow.sdk", "airflow.models.xcom_arg"),
"DecoratedOperator": ("airflow.sdk.bases.decorator", "airflow.decorators.base"),
"DecoratedMappedOperator": ("airflow.sdk.bases.decorator", "airflow.decorators.base"),
"MappedOperator": ("airflow.sdk.definitions.mappedoperator", "airflow.models.mappedoperator"),
# ============================================================================
# Assets (Dataset → Asset rename in Airflow 3.0)
# ============================================================================
# Note: Asset, AssetAlias, AssetAll, AssetAny are handled by _RENAME_MAP
# Metadata moved from airflow.datasets.metadata (2.x) to airflow.sdk (3.x)
"Metadata": ("airflow.sdk", "airflow.datasets.metadata"),
# ============================================================================
# Notifiers
# ============================================================================
"BaseNotifier": ("airflow.sdk", "airflow.notifications.basenotifier"),
# ============================================================================
# Plugins
# ============================================================================
"AirflowPlugin": ("airflow.sdk.plugins_manager", "airflow.plugins_manager"),
# ============================================================================
# Operator Links & Task Groups
# ============================================================================
"BaseOperatorLink": ("airflow.sdk", "airflow.models.baseoperatorlink"),
"TaskInstanceKey": ("airflow.sdk.types", "airflow.models.taskinstancekey"),
"TaskGroup": ("airflow.sdk", "airflow.utils.task_group"),
# ============================================================================
# Operator Utilities (chain, cross_downstream, etc.)
# ============================================================================
"chain": ("airflow.sdk", "airflow.models.baseoperator"),
"chain_linear": ("airflow.sdk", "airflow.models.baseoperator"),
"cross_downstream": ("airflow.sdk", "airflow.models.baseoperator"),
# ============================================================================
# Edge Modifiers & Labels
# ============================================================================
"EdgeModifier": ("airflow.sdk", "airflow.utils.edgemodifier"),
"Label": ("airflow.sdk", "airflow.utils.edgemodifier"),
# ============================================================================
# State Enums
# ============================================================================
"DagRunState": ("airflow.sdk", "airflow.utils.state"),
"TaskInstanceState": ("airflow.sdk", "airflow.utils.state"),
"TriggerRule": ("airflow.sdk", "airflow.utils.trigger_rule"),
"WeightRule": ("airflow.sdk", "airflow.utils.weight_rule"),
# ============================================================================
# IO & Storage
# ============================================================================
"ObjectStoragePath": ("airflow.sdk", "airflow.io.path"),
# ============================================================================
# Template Utilities
# ============================================================================
"literal": ("airflow.sdk.definitions.template", "airflow.utils.template"),
# ============================================================================
# Context & Utilities
# ============================================================================
"Context": ("airflow.sdk", "airflow.utils.context"),
"context_merge": ("airflow.sdk.definitions.context", "airflow.utils.context"),
"context_to_airflow_vars": ("airflow.sdk.execution_time.context", "airflow.utils.operator_helpers"),
"AIRFLOW_VAR_NAME_FORMAT_MAPPING": (
"airflow.sdk.execution_time.context",
"airflow.utils.operator_helpers",
),
"get_current_context": ("airflow.sdk", "airflow.operators.python"),
"get_parsing_context": ("airflow.sdk", "airflow.utils.dag_parsing_context"),
# ============================================================================
# Timeout Utilities
# ============================================================================
"timeout": ("airflow.sdk.execution_time.timeout", "airflow.utils.timeout"),
# ============================================================================
# XCom & Task Communication
# ============================================================================
"XCOM_RETURN_KEY": "airflow.models.xcom",
# ============================================================================
# Lineage
# ============================================================================
"HookLineageCollector": ("airflow.sdk.lineage", "airflow.lineage.hook"),
"HookLineageReader": ("airflow.sdk.lineage", "airflow.lineage.hook"),
"get_hook_lineage_collector": ("airflow.sdk.lineage", "airflow.lineage.hook"),
"HookLineage": ("airflow.sdk.lineage", "airflow.lineage.hook"),
# Note: AssetLineageInfo is handled by _RENAME_MAP (DatasetLineageInfo -> AssetLineageInfo)
"NoOpCollector": ("airflow.sdk.lineage", "airflow.lineage.hook"),
# ============================================================================
# Exceptions (deprecated in airflow.exceptions, prefer SDK)
# ============================================================================
# Note: AirflowException and AirflowNotFoundException are not deprecated, but exposing them
# here keeps provider imports consistent across Airflow 2 and 3.
"AirflowException": ("airflow.sdk.exceptions", "airflow.exceptions"),
"AirflowFailException": ("airflow.sdk.exceptions", "airflow.exceptions"),
"AirflowNotFoundException": ("airflow.sdk.exceptions", "airflow.exceptions"),
"AirflowOptionalProviderFeatureException": ("airflow.sdk.exceptions", "airflow.exceptions"),
"AirflowSkipException": ("airflow.sdk.exceptions", "airflow.exceptions"),
"AirflowTaskTimeout": ("airflow.sdk.exceptions", "airflow.exceptions"),
"AirflowSensorTimeout": ("airflow.sdk.exceptions", "airflow.exceptions"),
"ParamValidationError": ("airflow.sdk.exceptions", "airflow.exceptions"),
"TaskDeferred": ("airflow.sdk.exceptions", "airflow.exceptions"),
"XComNotFound": ("airflow.sdk.exceptions", "airflow.exceptions"),
# ============================================================================
# Observability
# ============================================================================
"Stats": ("airflow.sdk.observability.stats", "airflow.observability.stats", "airflow.stats"),
# ============================================================================
# Secrets Masking
# ============================================================================
"redact": (
"airflow.sdk.log",
"airflow.sdk._shared.secrets_masker",
"airflow.sdk.execution_time.secrets_masker",
"airflow.utils.log.secrets_masker",
),
# ============================================================================
# Listeners
# ============================================================================
"hookimpl": ("airflow.sdk._shared.listeners", "airflow.listeners"),
"get_listener_manager": ("airflow.sdk.listener", "airflow.listeners.listener"),
# Configuration
# ============================================================================
"conf": ("airflow.sdk.configuration", "airflow.configuration"),
"AirflowConfigException": ("airflow.sdk.exceptions", "airflow.exceptions"),
}
# Airflow 3-only exceptions (not available in Airflow 2)
_AIRFLOW_3_ONLY_EXCEPTIONS: dict[str, tuple[str, ...]] = {
"DownstreamTasksSkipped": ("airflow.sdk.exceptions", "airflow.exceptions"),
"DagRunTriggerException": ("airflow.sdk.exceptions", "airflow.exceptions"),
}
# Add Airflow 3-only exceptions and renames to _IMPORT_MAP if running Airflow 3+
if AIRFLOW_V_3_0_PLUS:
_IMPORT_MAP.update(_AIRFLOW_3_ONLY_EXCEPTIONS)
_RENAME_MAP.update(_AIRFLOW_3_ONLY_RENAMES)
# AssetLineageInfo exists in 3.0+ but location changed in 3.2
# 3.0-3.1: airflow.lineage.hook.AssetLineageInfo
# 3.2+: airflow.sdk.lineage.AssetLineageInfo
_IMPORT_MAP["AssetLineageInfo"] = ("airflow.sdk.lineage", "airflow.lineage.hook")
# Module map: module_name -> module_path(s)
# For entire modules that have been moved (e.g., timezone)
# Usage: from airflow.providers.common.compat.lazy_compat import timezone
_MODULE_MAP: dict[str, str | tuple[str, ...]] = {
"timezone": ("airflow.sdk.timezone", "airflow.utils.timezone"),
"io": ("airflow.sdk.io", "airflow.io"),
}
# Use the shared utility to create __getattr__
__getattr__ = create_module_getattr(
import_map=_IMPORT_MAP,
module_map=_MODULE_MAP,
rename_map=_RENAME_MAP,
)
__all__ = list(_RENAME_MAP.keys()) + list(_IMPORT_MAP.keys()) + list(_MODULE_MAP.keys())
| {
"repo_id": "apache/airflow",
"file_path": "providers/common/compat/src/airflow/providers/common/compat/sdk.py",
"license": "Apache License 2.0",
"lines": 309,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/common/compat/tests/unit/common/compat/test__compat_utils.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import pytest
from airflow.providers.common.compat._compat_utils import create_module_getattr
class TestCreateModuleGetattr:
"""Unit tests for the create_module_getattr utility function."""
@pytest.mark.parametrize(
("name", "import_map", "is_module"),
[
("BaseHook", {"BaseHook": "airflow.hooks.base"}, False),
("timezone", {}, True), # Will be tested with module_map
("utcnow", {"utcnow": "airflow.utils.timezone"}, False),
],
)
def test_single_path_import(self, name, import_map, is_module):
"""Test basic single-path imports work correctly."""
if name == "timezone":
getattr_fn = create_module_getattr(import_map={}, module_map={name: "airflow.utils.timezone"})
else:
getattr_fn = create_module_getattr(import_map=import_map)
result = getattr_fn(name)
if is_module:
# Check if it's a module
import types
assert isinstance(result, types.ModuleType)
else:
# Check if it's a class or callable
assert isinstance(result, type) or callable(result)
@pytest.mark.parametrize(
("name", "paths", "should_succeed"),
[
("BaseHook", ("airflow.sdk", "airflow.hooks.base"), True),
("NonExistent", ("fake.module1", "fake.module2"), False),
("timezone", ("airflow.sdk.timezone", "airflow.utils.timezone"), True),
],
)
def test_fallback_import_mechanism(self, name, paths, should_succeed):
"""Test that fallback paths are tried in order."""
if name == "timezone":
getattr_fn = create_module_getattr(import_map={}, module_map={name: paths})
else:
getattr_fn = create_module_getattr(import_map={name: paths})
if should_succeed:
result = getattr_fn(name)
assert result is not None
else:
with pytest.raises(ImportError, match=f"Could not import {name!r}"):
getattr_fn(name)
def test_rename_map_tries_new_then_old(self):
"""Test that renamed classes try new name first, then fall back to old."""
rename_map = {
"Asset": ("airflow.sdk", "airflow.datasets", "Dataset"),
}
getattr_fn = create_module_getattr(import_map={}, rename_map=rename_map)
# Should successfully import (either Asset from airflow.sdk or Dataset from airflow.datasets)
result = getattr_fn("Asset")
assert result is not None
# In Airflow 3, it's Asset; in Airflow 2, it would be Dataset
assert result.__name__ in ("Asset", "Dataset")
def test_module_map_imports_whole_module(self):
"""Test that module_map imports entire modules, not just attributes."""
module_map = {"timezone": "airflow.utils.timezone"}
getattr_fn = create_module_getattr(import_map={}, module_map=module_map)
result = getattr_fn("timezone")
assert hasattr(result, "utc") # Module should have attributes
assert hasattr(result, "utcnow")
def test_exception_chaining_preserves_context(self):
"""Test that exception chaining with 'from' preserves original error context."""
import_map = {"NonExistent": ("fake.module1", "fake.module2")}
getattr_fn = create_module_getattr(import_map=import_map)
with pytest.raises(ImportError) as exc_info:
getattr_fn("NonExistent")
# Verify exception has __cause__ (exception chaining)
assert exc_info.value.__cause__ is not None
@pytest.mark.parametrize(
("error_scenario", "map_config", "expected_match"),
[
(
"import_error",
{"import_map": {"Fake": ("nonexistent.mod1", "nonexistent.mod2")}},
"Could not import 'Fake' from any of:",
),
(
"module_error",
{"module_map": {"fake_mod": ("nonexistent.module1", "nonexistent.module2")}},
"Could not import module 'fake_mod' from any of:",
),
(
"rename_error",
{"rename_map": {"NewName": ("fake.new", "fake.old", "OldName")}},
"Could not import 'NewName' from 'fake.new' or 'OldName' from 'fake.old'",
),
],
)
def test_error_messages_include_all_paths(self, error_scenario, map_config, expected_match):
"""Test that error messages include all attempted paths for debugging."""
getattr_fn = create_module_getattr(
import_map=map_config.get("import_map", {}),
module_map=map_config.get("module_map"),
rename_map=map_config.get("rename_map"),
)
keys = (
map_config.get("import_map", {}).keys()
or map_config.get("module_map", {}).keys()
or map_config.get("rename_map", {}).keys()
)
name = next(iter(keys))
with pytest.raises(ImportError, match=expected_match):
getattr_fn(name)
def test_attribute_error_for_unknown_name(self):
"""Test that accessing unknown attributes raises AttributeError with correct message."""
getattr_fn = create_module_getattr(import_map={"BaseHook": "airflow.hooks.base"})
with pytest.raises(AttributeError, match="module has no attribute 'UnknownClass'"):
getattr_fn("UnknownClass")
def test_optional_params_default_to_empty(self):
"""Test that module_map and rename_map default to empty dicts when not provided."""
getattr_fn = create_module_getattr(import_map={"BaseHook": "airflow.hooks.base"})
# Should work fine without module_map and rename_map
result = getattr_fn("BaseHook")
assert result is not None
# Should raise AttributeError for names not in any map
with pytest.raises(AttributeError):
getattr_fn("NonExistent")
def test_priority_order_rename_then_module_then_import(self):
"""Test that rename_map has priority over module_map, which has priority over import_map."""
# If a name exists in multiple maps, rename_map should be checked first
import_map = {"test": "airflow.hooks.base"}
module_map = {"test": "airflow.utils.timezone"}
rename_map = {"test": ("airflow.sdk", "airflow.datasets", "Dataset")}
getattr_fn = create_module_getattr(
import_map=import_map,
module_map=module_map,
rename_map=rename_map,
)
# Should use rename_map (which tries to import Asset/Dataset)
result = getattr_fn("test")
# Verify it came from rename_map (Asset or Dataset class, depending on Airflow version)
assert hasattr(result, "__name__")
assert result.__name__ in ("Asset", "Dataset")
def test_module_not_found_error_is_caught(self):
"""Test that ModuleNotFoundError (Python 3.6+) is properly caught."""
import_map = {"Fake": "completely.nonexistent.module.that.does.not.exist"}
getattr_fn = create_module_getattr(import_map=import_map)
# Should catch ModuleNotFoundError and raise ImportError
with pytest.raises(ImportError, match="Could not import 'Fake'"):
getattr_fn("Fake")
@pytest.mark.parametrize(
("map_type", "config"),
[
("import_map", {"BaseHook": "airflow.hooks.base"}),
("module_map", {"timezone": "airflow.utils.timezone"}),
("rename_map", {"Asset": ("airflow.sdk", "airflow.datasets", "Dataset")}),
],
)
def test_each_map_type_works_independently(self, map_type, config):
"""Test that each map type (import, module, rename) works correctly on its own."""
kwargs = {"import_map": {}}
if map_type == "import_map":
kwargs["import_map"] = config
elif map_type == "module_map":
kwargs["module_map"] = config
elif map_type == "rename_map":
kwargs["rename_map"] = config
getattr_fn = create_module_getattr(**kwargs)
name = next(iter(config.keys()))
result = getattr_fn(name)
assert result is not None
| {
"repo_id": "apache/airflow",
"file_path": "providers/common/compat/tests/unit/common/compat/test__compat_utils.py",
"license": "Apache License 2.0",
"lines": 182,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/common/compat/src/airflow/providers/common/compat/sqlalchemy/orm.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
try:
from sqlalchemy.orm import mapped_column
except ImportError:
# fallback for SQLAlchemy < 2.0
def mapped_column(*args, **kwargs): # type: ignore[misc]
from sqlalchemy import Column
return Column(*args, **kwargs)
| {
"repo_id": "apache/airflow",
"file_path": "providers/common/compat/src/airflow/providers/common/compat/sqlalchemy/orm.py",
"license": "Apache License 2.0",
"lines": 24,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/common/compat/tests/unit/common/compat/sqlalchemy/test_orm.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import builtins
import importlib
import sys
import types
from collections.abc import Callable
from typing import Any, cast
import pytest
TARGET = "airflow.providers.common.compat.sqlalchemy.orm"
@pytest.fixture(autouse=True)
def clean_target():
"""Ensure the target module is removed from sys.modules before each test."""
sys.modules.pop(TARGET, None)
yield
sys.modules.pop(TARGET, None)
def reload_target() -> Any:
"""Import the compatibility shim after the monkey‑patched environment is set."""
return importlib.import_module(TARGET)
# ----------------------------------------------------------------------
# Helper factories for the fake sqlalchemy packages
# ----------------------------------------------------------------------
def make_fake_sqlalchemy(
*,
has_mapped_column: bool = False,
column_impl: Callable[..., tuple] | None = None,
) -> tuple[Any, Any]:
"""Return a tuple `(sqlalchemy_pkg, orm_pkg)` that mimics the requested feature set."""
# Cast the ModuleType to Any so static type checkers don't complain when we
# dynamically add attributes like `Column`, `orm` or `mapped_column`.
sqlalchemy_pkg = cast("Any", types.ModuleType("sqlalchemy"))
orm_pkg = cast("Any", types.ModuleType("sqlalchemy.orm"))
# Provide Column implementation (used by the fallback)
if column_impl is None:
column_impl = lambda *a, **kw: ("Column_called", a, kw)
sqlalchemy_pkg.Column = column_impl
if has_mapped_column:
orm_pkg.mapped_column = lambda *a, **kw: ("mapped_column_called", a, kw)
sqlalchemy_pkg.orm = orm_pkg
return sqlalchemy_pkg, orm_pkg
# ----------------------------------------------------------------------
# Parametrised tests
# ----------------------------------------------------------------------
@pytest.mark.parametrize(
("has_mapped", "expect_fallback"),
[
(True, False), # real mapped_column present
(False, True), # fallback to Column
],
)
def test_mapped_column_resolution(monkeypatch, has_mapped, expect_fallback):
sqlalchemy_pkg, orm_pkg = make_fake_sqlalchemy(has_mapped_column=has_mapped)
monkeypatch.setitem(sys.modules, "sqlalchemy", sqlalchemy_pkg)
monkeypatch.setitem(sys.modules, "sqlalchemy.orm", orm_pkg)
mod = reload_target()
# The shim must expose a callable named `mapped_column`
assert callable(mod.mapped_column)
# Verify that the correct implementation is used
result = mod.mapped_column(1, a=2)
if expect_fallback:
assert result == ("Column_called", (1,), {"a": 2})
else:
assert result == ("mapped_column_called", (1,), {"a": 2})
def test_fallback_call_shapes(monkeypatch):
"""Exercise a handful of call signatures on the fallback."""
sqlalchemy_pkg, orm_pkg = make_fake_sqlalchemy(has_mapped_column=False)
monkeypatch.setitem(sys.modules, "sqlalchemy", sqlalchemy_pkg)
monkeypatch.setitem(sys.modules, "sqlalchemy.orm", orm_pkg)
mod = reload_target()
# No‑arg call
assert mod.mapped_column() == ("Column_called", (), {})
# Mixed positional / keyword
assert mod.mapped_column(1, 2, a=3, b=4) == (
"Column_called",
(1, 2),
{"a": 3, "b": 4},
)
def test_importerror_while_importing_sqlalchemy_orm(monkeypatch):
"""Simulate an ImportError raised *during* the import of sqlalchemy.orm."""
sqlalchemy_pkg = cast("Any", types.ModuleType("sqlalchemy"))
sqlalchemy_pkg.Column = lambda *a, **kw: ("Column_called", a, kw)
monkeypatch.setitem(sys.modules, "sqlalchemy", sqlalchemy_pkg)
# Force ImportError for any attempt to import sqlalchemy.orm
real_import = __import__
def fake_import(name, globals=None, locals=None, fromlist=(), level=0):
if name.startswith("sqlalchemy.orm"):
raise ImportError("simulated failure")
return real_import(name, globals, locals, fromlist, level)
monkeypatch.setattr(builtins, "__import__", fake_import)
try:
mod = reload_target()
finally:
# Restore the original import function - pytest's monkeypatch will also
# do this, but we keep the explicit finally for clarity.
monkeypatch.setattr(builtins, "__import__", real_import)
assert callable(mod.mapped_column)
assert mod.mapped_column("abc") == ("Column_called", ("abc",), {})
| {
"repo_id": "apache/airflow",
"file_path": "providers/common/compat/tests/unit/common/compat/sqlalchemy/test_orm.py",
"license": "Apache License 2.0",
"lines": 112,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/presto/tests/unit/presto/hooks/test_presto_sql.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import json
from collections import namedtuple
from unittest import mock
from unittest.mock import MagicMock, PropertyMock, patch
import pytest
from sqlalchemy.engine.url import make_url
from airflow.models import Connection
from airflow.providers.common.compat.sdk import AirflowException
from airflow.providers.presto.hooks.presto import PrestoHook
SerializableRow = namedtuple("SerializableRow", ["id", "value"])
DEFAULT_CONN_ID = "presto_default"
DEFAULT_HOST = "test_host"
DEFAULT_PORT = 8080
DEFAULT_LOGIN = "test"
DEFAULT_EXTRA_JSON = None
DEFAULT_PASSWORD = "test_pass"
def get_cursor_descriptions(fields: list[str]) -> list[tuple[str]]:
"""Convert field names into cursor.description tuples."""
return [(field,) for field in fields]
@pytest.fixture(autouse=True)
def mock_connection(create_connection_without_db) -> Connection:
"""Create a mocked Airflow connection for Presto."""
conn = Connection(
conn_id=DEFAULT_CONN_ID,
conn_type="presto",
host=DEFAULT_HOST,
login=DEFAULT_LOGIN,
password=DEFAULT_PASSWORD,
port=DEFAULT_PORT,
extra=DEFAULT_EXTRA_JSON,
schema="presto_db",
)
create_connection_without_db(conn)
return conn
@pytest.fixture
def presto_hook() -> PrestoHook:
"""Fixture for PrestoHook with mocked connection."""
return PrestoHook(presto_conn_id=DEFAULT_CONN_ID)
@pytest.fixture
def mock_get_conn():
"""Fixture to mock get_conn method of PrestoHook."""
with patch.object(PrestoHook, "get_conn", autospec=True) as mock:
yield mock
@pytest.fixture
def mock_cursor(mock_get_conn: MagicMock | mock.AsyncMock):
"""Fixture to mock cursor returned by get_conn."""
cursor = MagicMock()
type(cursor).rowcount = PropertyMock(return_value=1)
cursor.fetchall.return_value = [("1", "row1")]
cursor.description = get_cursor_descriptions(["id", "value"])
cursor.nextset.side_effect = [False]
mock_get_conn.return_value.cursor.return_value = cursor
return cursor
@pytest.mark.parametrize(
(
"custom_extra",
"expected_catalog",
"expected_protocol",
"expected_source",
"conn_schema_override",
"expected_schema",
),
[
pytest.param(
{"catalog": "reporting_db", "protocol": "https", "source": "airflow"},
"reporting_db",
"https",
"airflow",
"data_schema",
"data_schema",
id="custom_catalog_and_protocol",
),
pytest.param(
{"source": "my_dag_run"},
None,
"http",
"my_dag_run",
"test_schema",
"test_schema",
id="missing_protocol_should_default_http",
),
pytest.param(
{"protocol": None, "catalog": "logs"},
"logs",
"http",
"airflow",
None,
None,
id="explicit_protocol_none_should_default_http",
),
pytest.param(
{},
"hive",
"http",
"airflow",
"default_schema",
"default_schema",
id="all_defaults",
),
],
)
def test_sqlalchemy_url_property(
presto_hook,
create_connection_without_db,
custom_extra,
expected_catalog,
expected_protocol,
expected_source,
conn_schema_override,
expected_schema,
):
"""Tests various custom configurations passed via the 'extra' field."""
# Create a real Airflow connection
temp_conn = Connection(
conn_id=DEFAULT_CONN_ID,
conn_type="presto",
host=DEFAULT_HOST,
login=DEFAULT_LOGIN,
password=DEFAULT_PASSWORD,
port=DEFAULT_PORT,
schema=conn_schema_override or "",
extra=json.dumps(custom_extra) if custom_extra else None,
)
create_connection_without_db(temp_conn)
with patch.object(presto_hook, "get_connection", return_value=temp_conn):
url = presto_hook.sqlalchemy_url
assert url.host == DEFAULT_HOST
assert url.port == DEFAULT_PORT
assert url.username == DEFAULT_LOGIN
assert url.password == DEFAULT_PASSWORD
assert url.database == custom_extra.get("catalog")
query = url.query
assert query.get("protocol") == custom_extra.get("protocol")
assert query.get("source") == custom_extra.get("source")
assert query.get("schema") == temp_conn.schema
@pytest.mark.parametrize(
("return_last", "split_statements", "sql", "expected_calls", "cursor_results", "expected_result"),
[
pytest.param(
True,
False,
"SELECT * FROM table_A",
["SELECT * FROM table_A"],
[("A", 1), ("B", 2)],
[SerializableRow("A", 1), SerializableRow("B", 2)],
id="single_query_return_all",
),
pytest.param(
True,
True,
"SELECT * FROM table1; SELECT 1;",
["SELECT * FROM table1", "SELECT 1"],
[[("Result1", 1)], [("Result2", 2)]],
[SerializableRow("Result2", 2)],
id="multi_query_return_last",
),
],
)
def test_run_single_query(
presto_hook,
mock_cursor,
return_last,
split_statements,
sql,
expected_calls,
cursor_results,
expected_result,
):
"""Tests various execution paths for PrestoHook.run"""
if split_statements:
mock_cursor.fetchall.side_effect = cursor_results
mock_cursor.nextset.return_value = False
else:
mock_cursor.fetchall.return_value = cursor_results
mock_cursor.nextset.side_effect = lambda: False
result = presto_hook.run(
sql, return_last=return_last, handler=lambda cur: cur.fetchall(), split_statements=split_statements
)
mock_cursor.execute.assert_has_calls([mock.call(sql_statement) for sql_statement in expected_calls])
assert [SerializableRow(*row) for row in result] == expected_result
def test_get_sqlalchemy_engine(presto_hook, mock_connection, mocker):
"""Test that get_sqlalchemy_engine returns a SQLAlchemy engine with the correct URL."""
mock_create_engine = mocker.patch("airflow.providers.common.sql.hooks.sql.create_engine", autospec=True)
mock_engine = MagicMock()
mock_create_engine.return_value = mock_engine
with patch.object(presto_hook, "get_connection") as mock_get_connection:
mock_get_connection.return_value = mock_connection
engine = presto_hook.get_sqlalchemy_engine()
assert engine is mock_engine, "Returned engine does not match the mocked engine."
mock_create_engine.assert_called_once()
call_args = mock_create_engine.call_args[1]
actual_url = call_args["url"]
extra_dict = json.loads(mock_connection.extra or "{}")
assert actual_url.drivername == "presto"
assert actual_url.host == str(DEFAULT_HOST)
assert actual_url.password == (DEFAULT_PASSWORD or "")
assert actual_url.port == DEFAULT_PORT
assert actual_url.username == DEFAULT_LOGIN
assert actual_url.database == extra_dict.get("catalog")
assert actual_url.query.get("protocol") == extra_dict.get("protocol")
assert actual_url.query.get("source") == extra_dict.get("source")
assert actual_url.query.get("schema") == mock_connection.schema
def test_run_with_multiple_statements(presto_hook, mock_cursor, mock_get_conn):
"""Test execution of a single string containing multiple queries."""
mock_cursor.fetchall.side_effect = [[(1,)], [(2,)]]
mock_cursor.nextset.return_value = False
sql = "SELECT 1; SELECT 2;"
results = presto_hook.run(
sql,
return_last=True,
handler=lambda cur: cur.fetchall(),
split_statements=True,
)
mock_cursor.execute.assert_has_calls(
[
mock.call("SELECT 1"),
mock.call("SELECT 2"),
],
any_order=False,
)
mock_get_conn.return_value.cursor.assert_called_once()
assert results == [(2,)]
def test_get_uri(presto_hook, mock_connection):
"""Test that get_uri returns the correct connection URI with debug prints."""
# Ensure all connection attributes are explicitly set
mock_connection.host = DEFAULT_HOST
mock_connection.port = DEFAULT_PORT
mock_connection.login = DEFAULT_LOGIN
mock_connection.password = DEFAULT_PASSWORD
mock_connection.extra = json.dumps({"catalog": "hive", "protocol": "https", "source": "airflow"})
mock_connection.schema = "presto_db"
expected_uri = (
"presto://test:test_pass@test_host:8080/hive?protocol=https&source=airflow&schema=presto_db"
)
with patch.object(presto_hook, "get_connection", return_value=mock_connection):
uri = presto_hook.get_uri()
parsed = make_url(uri)
expected = make_url(expected_uri)
assert parsed.drivername == expected.drivername
assert parsed.username == expected.username
assert parsed.password == expected.password
assert parsed.host == expected.host
assert parsed.port == expected.port
assert parsed.database == expected.database
for key, value in expected.query.items():
assert parsed.query.get(key) == value
@pytest.mark.parametrize("sql", ["", "\n", " "])
def test_run_with_empty_sql(presto_hook, sql):
"""Test that running with empty SQL raises an ValueError."""
with pytest.raises(ValueError, match="List of SQL statements is empty"):
presto_hook.run(sql)
def test_with_invalid_names(presto_hook, mock_cursor):
"""Ensure PrestoHook.run handles queries returning reserved/invalid column names without raising errors."""
invalid_names = ["1_2_3", "select", "from"]
mock_cursor.description = get_cursor_descriptions(invalid_names)
expected_data = [(1, "row1", "bar")]
mock_cursor.fetchall.return_value = expected_data
test_sql = "SELECT 1 as id, 'row1' as value, 'bar' as foo"
result = presto_hook.run(test_sql, handler=lambda cur: cur.fetchall())
mock_cursor.execute.assert_called_once_with(test_sql)
assert result == expected_data
assert result[0][0] == 1
assert result[0][1] == "row1"
assert result[0][2] == "bar"
@pytest.fixture
def presto_hook_with_timeout(create_connection_without_db):
"""Fixture for PrestoHook with a connection that has a timeout set."""
create_connection_without_db(
Connection(
conn_id="presto_with_timeout",
conn_type="presto",
host=DEFAULT_HOST,
login=DEFAULT_LOGIN,
password=DEFAULT_PASSWORD,
port=DEFAULT_PORT,
schema="presto_db",
extra=json.dumps({"catalog": "hive", "protocol": "http", "source": "airflow", "timeout": 10}),
)
)
return PrestoHook(presto_conn_id="presto_with_timeout")
def test_execution_timeout_exceeded(presto_hook_with_timeout):
"""Test that a query exceeding the execution timeout raises an AirflowException."""
test_sql = "SELECT large_data FROM slow_table"
with patch(
"airflow.providers.common.sql.hooks.sql.DbApiHook.run",
side_effect=AirflowException("Query exceeded execution timeout"),
) as mock_parent_run:
with pytest.raises(AirflowException, match="Query exceeded execution timeout"):
presto_hook_with_timeout.run(sql=test_sql)
mock_parent_run.assert_called_once()
| {
"repo_id": "apache/airflow",
"file_path": "providers/presto/tests/unit/presto/hooks/test_presto_sql.py",
"license": "Apache License 2.0",
"lines": 307,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-ctl-tests/tests/airflowctl_tests/constants.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import os
from pathlib import Path
AIRFLOW_ROOT_PATH = Path(__file__).resolve().parents[3]
DEFAULT_PYTHON_MAJOR_MINOR_VERSION = "3.10"
DEFAULT_DOCKER_IMAGE = f"ghcr.io/apache/airflow/main/prod/python{DEFAULT_PYTHON_MAJOR_MINOR_VERSION}:latest"
DOCKER_IMAGE = os.environ.get("DOCKER_IMAGE") or DEFAULT_DOCKER_IMAGE
DOCKER_COMPOSE_HOST_PORT = os.environ.get("HOST_PORT", "localhost:8080")
DOCKER_COMPOSE_FILE_PATH = (
AIRFLOW_ROOT_PATH / "airflow-core" / "docs" / "howto" / "docker-compose" / "docker-compose.yaml"
)
LOGIN_COMMAND = "auth login --username airflow --password airflow"
LOGIN_OUTPUT = "Login successful! Welcome to airflowctl!"
| {
"repo_id": "apache/airflow",
"file_path": "airflow-ctl-tests/tests/airflowctl_tests/constants.py",
"license": "Apache License 2.0",
"lines": 29,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-ctl-tests/tests/airflowctl_tests/test_airflowctl_commands.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import pytest
def date_param():
import random
from datetime import datetime, timedelta
from dateutil.relativedelta import relativedelta
# original datetime string
dt_str = "2025-10-25T00:02:00+00:00"
# parse to datetime object
dt = datetime.fromisoformat(dt_str)
# boundaries
start = dt - relativedelta(months=1)
end = dt + relativedelta(months=1)
# pick random time between start and end
delta = end - start
random_seconds = random.randint(0, int(delta.total_seconds()))
random_dt = start + timedelta(seconds=random_seconds)
return random_dt.isoformat()
# Passing password via command line is insecure but acceptable for testing purposes
# Please do not do this in production, it enables possibility of exposing your credentials
LOGIN_COMMAND = "auth login --username airflow --password airflow"
LOGIN_COMMAND_SKIP_KEYRING = "auth login --skip-keyring"
LOGIN_OUTPUT = "Login successful! Welcome to airflowctl!"
TEST_COMMANDS = [
# Assets commands
"assets list",
"assets get --asset-id=1",
"assets create-event --asset-id=1",
# Backfill commands
"backfill list",
# Config commands
"config get --section core --option executor",
"config list",
"config lint",
# Connections commands
"connections create --connection-id=test_con --conn-type=mysql --password=TEST_PASS -o json",
"connections list",
"connections list -o yaml",
"connections list -o table",
"connections get --conn-id=test_con",
"connections get --conn-id=test_con -o json",
"connections update --connection-id=test_con --conn-type=postgres",
"connections import tests/airflowctl_tests/fixtures/test_connections.json",
"connections delete --conn-id=test_con",
"connections delete --conn-id=test_import_conn",
# DAGs commands
"dags list",
"dags get --dag-id=example_bash_operator",
"dags get-details --dag-id=example_bash_operator",
"dags get-stats --dag-ids=example_bash_operator",
"dags get-version --dag-id=example_bash_operator --version-number=1",
"dags list-import-errors",
"dags list-version --dag-id=example_bash_operator",
"dags list-warning",
# Order of trigger and pause/unpause is important for test stability because state checked
"dags trigger --dag-id=example_bash_operator --logical-date={date_param} --run-after={date_param}",
# Test trigger without logical-date (should default to now)
"dags trigger --dag-id=example_bash_operator",
"dags pause example_bash_operator",
"dags unpause example_bash_operator",
# DAG Run commands
'dagrun get --dag-id=example_bash_operator --dag-run-id="manual__{date_param}"',
"dags update --dag-id=example_bash_operator --no-is-paused",
# DAG Run commands
"dagrun list --dag-id example_bash_operator --state success --limit=1",
# XCom commands - need a DAG run with completed tasks
'xcom add --dag-id=example_bash_operator --dag-run-id="manual__{date_param}" --task-id=runme_0 --key=test_xcom_key --value=\'{{"test": "value"}}\'',
'xcom get --dag-id=example_bash_operator --dag-run-id="manual__{date_param}" --task-id=runme_0 --key=test_xcom_key',
'xcom list --dag-id=example_bash_operator --dag-run-id="manual__{date_param}" --task-id=runme_0',
'xcom edit --dag-id=example_bash_operator --dag-run-id="manual__{date_param}" --task-id=runme_0 --key=test_xcom_key --value=\'{{"updated": "value"}}\'',
'xcom delete --dag-id=example_bash_operator --dag-run-id="manual__{date_param}" --task-id=runme_0 --key=test_xcom_key',
# Jobs commands
"jobs list",
# Pools commands
"pools create --name=test_pool --slots=5",
"pools list",
"pools get --pool-name=test_pool",
"pools get --pool-name=test_pool -o yaml",
"pools update --pool=test_pool --slots=10",
"pools import tests/airflowctl_tests/fixtures/test_pools.json",
"pools export tests/airflowctl_tests/fixtures/pools_export.json --output=json",
"pools delete --pool=test_pool",
"pools delete --pool=test_import_pool",
# Providers commands
"providers list",
# Variables commands
"variables create --key=test_key --value=test_value",
"variables list",
"variables get --variable-key=test_key",
"variables get --variable-key=test_key -o table",
"variables update --key=test_key --value=updated_value",
"variables import tests/airflowctl_tests/fixtures/test_variables.json",
"variables delete --variable-key=test_key",
"variables delete --variable-key=test_import_var",
"variables delete --variable-key=test_import_var_with_desc",
# Version command
"version --remote",
]
DATE_PARAM_1 = date_param()
DATE_PARAM_2 = date_param()
TEST_COMMANDS_DEBUG_MODE = [LOGIN_COMMAND] + [test.format(date_param=DATE_PARAM_1) for test in TEST_COMMANDS]
TEST_COMMANDS_SKIP_KEYRING = [LOGIN_COMMAND_SKIP_KEYRING] + [
test.format(date_param=DATE_PARAM_2) for test in TEST_COMMANDS
]
@pytest.mark.flaky(reruns=3, reruns_delay=1)
@pytest.mark.parametrize(
"command",
TEST_COMMANDS_DEBUG_MODE,
ids=[" ".join(command.split(" ", 2)[:2]) for command in TEST_COMMANDS_DEBUG_MODE],
)
def test_airflowctl_commands(command: str, run_command):
"""Test airflowctl commands using docker-compose environment."""
env_vars = {"AIRFLOW_CLI_DEBUG_MODE": "true"}
run_command(command, env_vars, skip_login=True)
@pytest.mark.flaky(reruns=3, reruns_delay=1)
@pytest.mark.parametrize(
"command",
TEST_COMMANDS_SKIP_KEYRING,
ids=[" ".join(command.split(" ", 2)[:2]) for command in TEST_COMMANDS_SKIP_KEYRING],
)
def test_airflowctl_commands_skip_keyring(command: str, api_token: str, run_command):
"""Test airflowctl commands using docker-compose environment without using keyring."""
env_vars = {}
env_vars["AIRFLOW_CLI_TOKEN"] = api_token
env_vars["AIRFLOW_CLI_DEBUG_MODE"] = "false"
env_vars["AIRFLOW_CLI_ENVIRONMENT"] = "nokeyring"
run_command(command, env_vars, skip_login=True)
| {
"repo_id": "apache/airflow",
"file_path": "airflow-ctl-tests/tests/airflowctl_tests/test_airflowctl_commands.py",
"license": "Apache License 2.0",
"lines": 143,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-core/tests/unit/dags/subdir1/test_explicit_dont_ignore.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from datetime import datetime
from airflow.models.dag import DAG
from airflow.providers.standard.operators.bash import BashOperator
DEFAULT_DATE = datetime(2019, 12, 1)
dag = DAG(dag_id="test_dag_explicit_dont_ignore", start_date=DEFAULT_DATE, schedule=None)
task = BashOperator(task_id="task1", bash_command='echo "test dag explicitly dont ignore"', dag=dag)
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/tests/unit/dags/subdir1/test_explicit_dont_ignore.py",
"license": "Apache License 2.0",
"lines": 24,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-core/tests/unit/dags/subdir2/subdir3/should_ignore_this.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/tests/unit/dags/subdir2/subdir3/should_ignore_this.py",
"license": "Apache License 2.0",
"lines": 17,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-core/tests/unit/dags/subdir2/subdir3/test_explicit_ignore.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/tests/unit/dags/subdir2/subdir3/test_explicit_ignore.py",
"license": "Apache License 2.0",
"lines": 16,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:scripts/ci/prek/check_common_compat_lazy_imports.py | #!/usr/bin/env python3
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Validate that TYPE_CHECKING block in sdk.py matches runtime import maps.
This pre-commit hook ensures that:
1. All items in _IMPORT_MAP, _RENAME_MAP, and _MODULE_MAP are in TYPE_CHECKING
2. All items in TYPE_CHECKING are in one of the runtime maps
3. No mismatches between type hints and runtime behavior
Usage:
python scripts/ci/prek/check_common_compat_lazy_imports.py
"""
from __future__ import annotations
import ast
import sys
from pathlib import Path
def extract_runtime_maps(py_file: Path) -> tuple[set[str], set[str], set[str]]:
"""
Extract all names from _IMPORT_MAP, _RENAME_MAP, and _MODULE_MAP.
Returns tuple of (import_names, rename_names, module_names)
"""
content = py_file.read_text()
tree = ast.parse(content)
import_map = set()
rename_map = set()
module_map = set()
for node in tree.body:
# Handle both annotated assignments and regular assignments
targets: list[ast.Name | ast.expr] = []
value = None
if isinstance(node, ast.AnnAssign) and isinstance(node.target, ast.Name):
targets = [node.target]
value = node.value
elif isinstance(node, ast.Assign):
targets = node.targets
value = node.value
for target in targets:
if not isinstance(target, ast.Name):
continue
if target.id == "_IMPORT_MAP" and value:
data = ast.literal_eval(value)
import_map = set(data.keys())
elif target.id == "_RENAME_MAP" and value:
data = ast.literal_eval(value)
rename_map = set(data.keys())
elif target.id == "_MODULE_MAP" and value:
data = ast.literal_eval(value)
module_map = set(data.keys())
return import_map, rename_map, module_map
def extract_type_checking_names(py_file: Path) -> set[str]:
"""
Extract all imported names from TYPE_CHECKING block.
"""
content = py_file.read_text()
tree = ast.parse(content)
type_checking_names = set()
for node in ast.walk(tree):
# Find: if TYPE_CHECKING:
if isinstance(node, ast.If):
# Check if condition is comparing TYPE_CHECKING
if isinstance(node.test, ast.Name) and node.test.id == "TYPE_CHECKING":
# Extract all imports in this block
for stmt in node.body:
if isinstance(stmt, ast.ImportFrom):
# from X import Y as Z
for alias in stmt.names:
# Use alias.asname if present, else alias.name
name = alias.asname if alias.asname else alias.name
type_checking_names.add(name)
elif isinstance(stmt, ast.Import):
# import X as Y or import X.Y as Z
for alias in stmt.names:
# For module imports like "import airflow.sdk.io as io"
# Use the alias name (io) not the full module path
name = alias.asname if alias.asname else alias.name.split(".")[-1]
type_checking_names.add(name)
return type_checking_names
def main():
"""Validate TYPE_CHECKING block matches runtime maps."""
sdk_py = (
Path(__file__).parent.parent.parent.parent
/ "providers"
/ "common"
/ "compat"
/ "src"
/ "airflow"
/ "providers"
/ "common"
/ "compat"
/ "sdk.py"
)
if not sdk_py.exists():
print(f"❌ ERROR: {sdk_py} not found")
sys.exit(1)
# Extract runtime maps
import_names, rename_names, module_names = extract_runtime_maps(sdk_py)
runtime_names = import_names | rename_names | module_names
# Extract TYPE_CHECKING imports
type_checking_names = extract_type_checking_names(sdk_py)
# Check for discrepancies
missing_in_type_checking = runtime_names - type_checking_names
extra_in_type_checking = type_checking_names - runtime_names
errors = []
if missing_in_type_checking:
errors.append("\n❌ Items in runtime maps but MISSING in TYPE_CHECKING block:")
for name in sorted(missing_in_type_checking):
# Determine which map it's from
map_name = []
if name in import_names:
map_name.append("_IMPORT_MAP")
if name in rename_names:
map_name.append("_RENAME_MAP")
if name in module_names:
map_name.append("_MODULE_MAP")
errors.append(f" - {name} (in {', '.join(map_name)})")
if extra_in_type_checking:
errors.append("\n❌ Items in TYPE_CHECKING block but NOT in any runtime map:")
for name in sorted(extra_in_type_checking):
errors.append(f" - {name}")
if errors:
print("\n".join(errors))
print("\n❌ FAILED: TYPE_CHECKING block and runtime maps are out of sync!")
print("\nTo fix:")
print(f"1. Add missing items to TYPE_CHECKING block in {sdk_py}")
print("2. Remove extra items from TYPE_CHECKING block")
print(
"3. Ensure every item in _IMPORT_MAP/_RENAME_MAP/_MODULE_MAP has a corresponding TYPE_CHECKING import"
)
sys.exit(1)
print("✅ SUCCESS: TYPE_CHECKING block matches runtime maps")
print(f" - {len(import_names)} items in _IMPORT_MAP")
print(f" - {len(rename_names)} items in _RENAME_MAP")
print(f" - {len(module_names)} items in _MODULE_MAP")
print(f" - {len(type_checking_names)} items in TYPE_CHECKING")
sys.exit(0)
if __name__ == "__main__":
main()
| {
"repo_id": "apache/airflow",
"file_path": "scripts/ci/prek/check_common_compat_lazy_imports.py",
"license": "Apache License 2.0",
"lines": 151,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:airflow-core/src/airflow/api_fastapi/auth/middlewares/refresh_token.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from fastapi import HTTPException, Request
from fastapi.responses import JSONResponse
from starlette.middleware.base import BaseHTTPMiddleware
from airflow.api_fastapi.app import get_auth_manager
from airflow.api_fastapi.auth.managers.base_auth_manager import COOKIE_NAME_JWT_TOKEN
from airflow.api_fastapi.auth.managers.exceptions import AuthManagerRefreshTokenExpiredException
from airflow.api_fastapi.auth.managers.models.base_user import BaseUser
from airflow.api_fastapi.core_api.security import resolve_user_from_token
from airflow.configuration import conf
class JWTRefreshMiddleware(BaseHTTPMiddleware):
"""
Middleware to handle JWT token refresh.
This middleware:
1. Extracts JWT token from cookies and build the user from the token
2. Calls ``refresh_user`` method from auth manager with the user
3. If ``refresh_user`` returns a user, generate a JWT token based upon this user and send it in the
response as cookie
"""
async def dispatch(self, request: Request, call_next):
new_token = None
current_token = request.cookies.get(COOKIE_NAME_JWT_TOKEN)
try:
if current_token is not None:
try:
new_user, current_user = await self._refresh_user(current_token)
if user := (new_user or current_user):
request.state.user = user
if new_user:
# If we created a new user, serialize it and set it as a cookie
new_token = get_auth_manager().generate_jwt(new_user)
except (HTTPException, AuthManagerRefreshTokenExpiredException):
# Receive a HTTPException when the Airflow token is expired
# Receive a AuthManagerRefreshTokenExpiredException when the potential underlying refresh
# token used by the auth manager is expired
new_token = ""
response = await call_next(request)
if new_token is not None:
secure = bool(conf.get("api", "ssl_cert", fallback=""))
response.set_cookie(
COOKIE_NAME_JWT_TOKEN,
new_token,
httponly=True,
secure=secure,
samesite="lax",
max_age=0 if new_token == "" else None,
)
except HTTPException as exc:
# If any HTTPException is raised during user resolution or refresh, return it as response
return JSONResponse(status_code=exc.status_code, content={"detail": exc.detail})
return response
@staticmethod
async def _refresh_user(current_token: str) -> tuple[BaseUser | None, BaseUser | None]:
user = await resolve_user_from_token(current_token)
return get_auth_manager().refresh_user(user=user), user
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/api_fastapi/auth/middlewares/refresh_token.py",
"license": "Apache License 2.0",
"lines": 72,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:airflow-core/tests/unit/api_fastapi/auth/middlewares/test_refresh_token.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from unittest.mock import AsyncMock, MagicMock, patch
import pytest
from fastapi import HTTPException, Request, Response
from airflow.api_fastapi.auth.managers.base_auth_manager import COOKIE_NAME_JWT_TOKEN
from airflow.api_fastapi.auth.managers.models.base_user import BaseUser
from airflow.api_fastapi.auth.middlewares.refresh_token import JWTRefreshMiddleware
class TestJWTRefreshMiddleware:
@pytest.fixture
def middleware(self):
return JWTRefreshMiddleware(app=MagicMock())
@pytest.fixture
def mock_request(self):
request = MagicMock(spec=Request)
request.cookies = {}
request.state = MagicMock()
return request
@pytest.fixture
def mock_user(self):
return MagicMock(spec=BaseUser)
@patch.object(JWTRefreshMiddleware, "_refresh_user")
@pytest.mark.asyncio
async def test_dispatch_no_token(self, mock_refresh_user, middleware, mock_request):
call_next = AsyncMock(return_value=Response())
await middleware.dispatch(mock_request, call_next)
call_next.assert_called_once_with(mock_request)
mock_refresh_user.assert_not_called()
@patch.object(
JWTRefreshMiddleware,
"_refresh_user",
side_effect=HTTPException(status_code=403, detail="Invalid JWT token"),
)
@pytest.mark.asyncio
async def test_dispatch_invalid_token(self, mock_refresh_user, middleware, mock_request):
mock_request.cookies = {COOKIE_NAME_JWT_TOKEN: "valid_token"}
call_next = AsyncMock(return_value=Response(status_code=401))
response = await middleware.dispatch(mock_request, call_next)
assert response.status_code == 401
assert '_token=""; HttpOnly; Max-Age=0; Path=/; SameSite=lax' in response.headers.get("set-cookie")
@patch("airflow.api_fastapi.auth.middlewares.refresh_token.get_auth_manager")
@patch("airflow.api_fastapi.auth.middlewares.refresh_token.resolve_user_from_token")
@pytest.mark.asyncio
async def test_dispatch_no_refreshed_token(
self, mock_resolve_user_from_token, mock_get_auth_manager, middleware, mock_request, mock_user
):
mock_request.cookies = {COOKIE_NAME_JWT_TOKEN: "valid_token"}
mock_resolve_user_from_token.return_value = mock_user
mock_auth_manager = MagicMock()
mock_get_auth_manager.return_value = mock_auth_manager
mock_auth_manager.refresh_user.return_value = None
call_next = AsyncMock(return_value=Response())
await middleware.dispatch(mock_request, call_next)
call_next.assert_called_once_with(mock_request)
mock_resolve_user_from_token.assert_called_once_with("valid_token")
mock_auth_manager.generate_jwt.assert_not_called()
@patch("airflow.api_fastapi.auth.middlewares.refresh_token.resolve_user_from_token")
@pytest.mark.asyncio
async def test_dispatch_expired_token(self, mock_resolve_user_from_token, middleware, mock_request):
mock_request.cookies = {COOKIE_NAME_JWT_TOKEN: "invalid_token"}
mock_resolve_user_from_token.side_effect = HTTPException(status_code=403)
call_next = AsyncMock(return_value=Response())
await middleware.dispatch(mock_request, call_next)
call_next.assert_called_once_with(mock_request)
mock_resolve_user_from_token.assert_called_once_with("invalid_token")
@pytest.mark.asyncio
@patch("airflow.api_fastapi.auth.middlewares.refresh_token.get_auth_manager")
@patch("airflow.api_fastapi.auth.middlewares.refresh_token.resolve_user_from_token")
@patch("airflow.api_fastapi.auth.middlewares.refresh_token.conf")
async def test_dispatch_with_refreshed_user(
self,
mock_conf,
mock_resolve_user_from_token,
mock_get_auth_manager,
middleware,
mock_request,
mock_user,
):
refreshed_user = MagicMock(spec=BaseUser)
mock_request.cookies = {COOKIE_NAME_JWT_TOKEN: "valid_token"}
mock_resolve_user_from_token.return_value = mock_user
mock_auth_manager = MagicMock()
mock_get_auth_manager.return_value = mock_auth_manager
mock_auth_manager.refresh_user.return_value = refreshed_user
mock_auth_manager.generate_jwt.return_value = "new_token"
mock_conf.get.return_value = ""
call_next = AsyncMock(return_value=Response())
response = await middleware.dispatch(mock_request, call_next)
assert mock_request.state.user == refreshed_user
call_next.assert_called_once_with(mock_request)
mock_resolve_user_from_token.assert_called_once_with("valid_token")
mock_auth_manager.refresh_user.assert_called_once_with(user=mock_user)
mock_auth_manager.generate_jwt.assert_called_once_with(refreshed_user)
set_cookie_headers = response.headers.get("set-cookie", "")
assert f"{COOKIE_NAME_JWT_TOKEN}=new_token" in set_cookie_headers
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/tests/unit/api_fastapi/auth/middlewares/test_refresh_token.py",
"license": "Apache License 2.0",
"lines": 111,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-core/src/airflow/serialization/definitions/notset.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING, TypeVar
if TYPE_CHECKING:
from typing_extensions import TypeIs
T = TypeVar("T")
__all__ = ["NOTSET", "ArgNotSet", "is_arg_set"]
class ArgNotSet:
"""Sentinel type for annotations, useful when None is not viable."""
NOTSET = ArgNotSet()
"""Sentinel value for argument default. See ``ArgNotSet``."""
def is_arg_set(value: T | ArgNotSet) -> TypeIs[T]:
return not isinstance(value, ArgNotSet)
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/serialization/definitions/notset.py",
"license": "Apache License 2.0",
"lines": 29,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:airflow-core/src/airflow/serialization/definitions/param.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import collections.abc
import copy
from typing import TYPE_CHECKING, Any, Literal
from airflow.serialization.definitions.notset import NOTSET, is_arg_set
if TYPE_CHECKING:
from collections.abc import Iterator, Mapping
class SerializedParam:
"""Server-side param class for deserialization."""
def __init__(
self,
default: Any = NOTSET,
description: str | None = None,
source: Literal["dag", "task"] | None = None,
**schema,
):
# No validation needed - the SDK already validated the default.
self.value = default
self.description = description
self.schema = schema
self.source = source
def resolve(self, *, raises: bool = False) -> Any:
"""
Run the validations and returns the param's final value.
Different from SDK Param, this function never raises by default. *None*
is returned if validation fails, no value is available, or the return
value is not JSON-serializable.
:param raises: All exceptions during validation are suppressed by
default. They are only raised if this is set to *True* instead.
"""
import jsonschema
try:
if not is_arg_set(value := self.value):
raise ValueError("No value passed")
jsonschema.validate(value, self.schema, format_checker=jsonschema.FormatChecker())
except Exception:
if not raises:
return None
raise
return value
def dump(self) -> dict[str, Any]:
"""Return the full param spec for API consumers."""
return {
"value": self.resolve(),
"schema": self.schema,
"description": self.description,
"source": self.source,
}
def _coerce_param(v: Any) -> SerializedParam:
if isinstance(v, SerializedParam):
return v
return SerializedParam(v)
def _collect_params(container: Mapping[str, Any] | None) -> Iterator[tuple[str, SerializedParam]]:
if not container:
return
for k, v in container.items():
yield k, _coerce_param(v)
class SerializedParamsDict(collections.abc.Mapping[str, Any]):
"""Server-side ParamsDict class for deserialization."""
__dict: dict[str, SerializedParam]
def __init__(self, d: Mapping[str, Any] | None = None) -> None:
self.__dict = dict(_collect_params(d))
def __eq__(self, other: Any) -> bool:
"""Compare params dicts using their dumped content, matching SDK behavior."""
if hasattr(other, "dump"): # ParamsDict or SerializedParamsDict
return self.dump() == other.dump()
if isinstance(other, collections.abc.Mapping):
return self.dump() == other
return NotImplemented
def __hash__(self):
return hash(self.dump())
def __contains__(self, key: object) -> bool:
return key in self.__dict
def __len__(self) -> int:
return len(self.__dict)
def __iter__(self) -> Iterator[str]:
return iter(self.__dict)
def __getitem__(self, key: str) -> Any:
"""
Get the resolved value for this key.
This matches SDK ParamsDict behavior.
"""
return self.__dict[key].value
def get_param(self, key: str) -> SerializedParam:
"""Get the internal SerializedParam object for this key."""
return self.__dict[key]
def items(self):
return collections.abc.ItemsView(self.__dict)
def values(self):
return collections.abc.ValuesView(self.__dict)
def validate(self) -> dict[str, Any]:
"""Validate & returns all the params stored in the dictionary."""
def _validate_one(k: str, v: SerializedParam):
try:
return v.resolve(raises=True)
except Exception as e:
raise ValueError(f"Invalid input for param {k}: {e}") from None
return {k: _validate_one(k, v) for k, v in self.__dict.items()}
def dump(self) -> Mapping[str, Any]:
"""Dump the resolved values as a mapping."""
return {k: v.resolve() for k, v in self.__dict.items()}
def deep_merge(self, data: Mapping[str, Any] | None) -> SerializedParamsDict:
"""Create a new params dict by merging incoming data into this params dict."""
params = copy.deepcopy(self)
if not data:
return params
for k, v in data.items():
if k not in params:
params.__dict[k] = _coerce_param(v)
elif isinstance(v, SerializedParam):
params.__dict[k] = v
else:
params.__dict[k].value = v
return params
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/serialization/definitions/param.py",
"license": "Apache License 2.0",
"lines": 131,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:airflow-e2e-tests/tests/airflow_e2e_tests/remote_log_tests/test_remote_logging.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import time
from datetime import datetime, timezone
import boto3
import pytest
from airflow_e2e_tests.e2e_test_utils.clients import AirflowClient
class TestRemoteLogging:
airflow_client = AirflowClient()
dag_id = "example_xcom_test"
task_count = 6
retry_interval_in_seconds = 5
max_retries = 12
def test_dag_unpause(self):
self.airflow_client.un_pause_dag(
TestRemoteLogging.dag_id,
)
def test_remote_logging_s3(self):
"""Test that a DAG using remote logging to S3 completes successfully."""
self.airflow_client.un_pause_dag(TestRemoteLogging.dag_id)
resp = self.airflow_client.trigger_dag(
TestRemoteLogging.dag_id, json={"logical_date": datetime.now(timezone.utc).isoformat()}
)
state = self.airflow_client.wait_for_dag_run(
dag_id=TestRemoteLogging.dag_id,
run_id=resp["dag_run_id"],
)
assert state == "success", (
f"DAG {TestRemoteLogging.dag_id} did not complete successfully. Final state: {state}"
)
# This bucket will be created part of the docker-compose setup in
bucket_name = "test-airflow-logs"
s3_client = boto3.client(
"s3",
endpoint_url="http://localhost:4566",
aws_access_key_id="test",
aws_secret_access_key="test",
region_name="us-east-1",
)
# Wait for logs to be available in S3 before we call `get_task_logs`
for _ in range(self.max_retries):
response = s3_client.list_objects_v2(Bucket=bucket_name)
contents = response.get("Contents", [])
if len(contents) >= self.task_count:
break
print(f"Expected at least {self.task_count} log files, found {len(contents)}. Retrying...")
time.sleep(self.retry_interval_in_seconds)
if len(contents) < self.task_count:
pytest.fail(
f"Expected at least {self.task_count} log files in S3 bucket {bucket_name}, "
f"but found {len(contents)} objects: {[obj.get('Key') for obj in contents]}. \n"
f"List Objects Response: {response}"
)
task_logs = self.airflow_client.get_task_logs(
dag_id=TestRemoteLogging.dag_id,
task_id="bash_pull",
run_id=resp["dag_run_id"],
)
task_log_sources = [
source for content in task_logs.get("content", [{}]) for source in content.get("sources", [])
]
response = s3_client.list_objects_v2(Bucket=bucket_name)
if "Contents" not in response:
pytest.fail("No objects found in S3 bucket %s", bucket_name)
# s3 key format: dag_id=example_xcom/run_id=manual__2025-09-29T23:32:09.457215+00:00/task_id=bash_pull/attempt=1.log
log_files = [f"s3://{bucket_name}/{obj['Key']}" for obj in response["Contents"]]
assert any(source in log_files for source in task_log_sources), (
f"None of the log sources {task_log_sources} were found in S3 bucket logs {log_files}"
)
| {
"repo_id": "apache/airflow",
"file_path": "airflow-e2e-tests/tests/airflow_e2e_tests/remote_log_tests/test_remote_logging.py",
"license": "Apache License 2.0",
"lines": 84,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/amazon/src/airflow/providers/amazon/aws/hooks/firehose.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module contains AWS Firehose hook."""
from __future__ import annotations
from collections.abc import Iterable
from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
class FirehoseHook(AwsBaseHook):
"""
Interact with Amazon Kinesis Firehose.
Provide thick wrapper around :external+boto3:py:class:`boto3.client("firehose") <Firehose.Client>`.
:param delivery_stream: Name of the delivery stream
Additional arguments (such as ``aws_conn_id``) may be specified and
are passed down to the underlying AwsBaseHook.
.. seealso::
- :class:`airflow.providers.amazon.aws.hooks.base_aws.AwsBaseHook`
"""
def __init__(self, delivery_stream: str, *args, **kwargs) -> None:
self.delivery_stream = delivery_stream
kwargs["client_type"] = "firehose"
super().__init__(*args, **kwargs)
def put_records(self, records: Iterable) -> dict:
"""
Write batch records to Kinesis Firehose.
.. seealso::
- :external+boto3:py:meth:`Firehose.Client.put_record_batch`
:param records: list of records
"""
return self.get_conn().put_record_batch(DeliveryStreamName=self.delivery_stream, Records=records)
| {
"repo_id": "apache/airflow",
"file_path": "providers/amazon/src/airflow/providers/amazon/aws/hooks/firehose.py",
"license": "Apache License 2.0",
"lines": 43,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/amazon/tests/unit/amazon/aws/hooks/test_firehose.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import uuid
import boto3
from moto import mock_aws
from airflow.providers.amazon.aws.hooks.firehose import FirehoseHook
@mock_aws
class TestFirehoseHook:
def test_get_conn_returns_a_boto3_connection(self):
hook = FirehoseHook(
aws_conn_id="aws_default", delivery_stream="test_airflow", region_name="us-east-1"
)
assert hook.get_conn() is not None
def test_insert_batch_records_kinesis_firehose(self):
boto3.client("s3").create_bucket(Bucket="kinesis-test")
hook = FirehoseHook(
aws_conn_id="aws_default", delivery_stream="test_airflow", region_name="us-east-1"
)
response = hook.get_conn().create_delivery_stream(
DeliveryStreamName="test_airflow",
S3DestinationConfiguration={
"RoleARN": "arn:aws:iam::123456789012:role/firehose_delivery_role",
"BucketARN": "arn:aws:s3:::kinesis-test",
"Prefix": "airflow/",
"BufferingHints": {"SizeInMBs": 123, "IntervalInSeconds": 124},
"CompressionFormat": "UNCOMPRESSED",
},
)
stream_arn = response["DeliveryStreamARN"]
assert stream_arn == "arn:aws:firehose:us-east-1:123456789012:deliverystream/test_airflow"
records = [{"Data": str(uuid.uuid4())} for _ in range(100)]
response = hook.put_records(records)
assert response["FailedPutCount"] == 0
assert response["ResponseMetadata"]["HTTPStatusCode"] == 200
| {
"repo_id": "apache/airflow",
"file_path": "providers/amazon/tests/unit/amazon/aws/hooks/test_firehose.py",
"license": "Apache License 2.0",
"lines": 50,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-core/src/airflow/api_fastapi/core_api/datamodels/task_instance_history.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from datetime import datetime
from typing import Annotated
from pydantic import (
AliasPath,
BeforeValidator,
Field,
)
from airflow.api_fastapi.core_api.base import BaseModel
from airflow.api_fastapi.core_api.datamodels.dag_versions import DagVersionResponse
from airflow.utils.state import TaskInstanceState
class TaskInstanceHistoryResponse(BaseModel):
"""TaskInstanceHistory serializer for responses."""
task_id: str
dag_id: str
# todo: this should not be aliased; it's ambiguous with dag run's "id" - airflow 3.0
run_id: str = Field(alias="dag_run_id")
map_index: int
start_date: datetime | None
end_date: datetime | None
duration: float | None
state: TaskInstanceState | None
try_number: int
max_tries: int
task_display_name: str
dag_display_name: str = Field(validation_alias=AliasPath("dag_run", "dag_model", "dag_display_name"))
hostname: str | None
unixname: str | None
pool: str
pool_slots: int
queue: str | None
priority_weight: int | None
operator: str | None
custom_operator_name: str | None = Field(alias="operator_name")
queued_dttm: datetime | None = Field(alias="queued_when")
scheduled_dttm: datetime | None = Field(alias="scheduled_when")
pid: int | None
executor: str | None
executor_config: Annotated[str, BeforeValidator(str)]
dag_version: DagVersionResponse | None
class TaskInstanceHistoryCollectionResponse(BaseModel):
"""TaskInstanceHistory Collection serializer for responses."""
task_instances: list[TaskInstanceHistoryResponse]
total_entries: int
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/api_fastapi/core_api/datamodels/task_instance_history.py",
"license": "Apache License 2.0",
"lines": 60,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:airflow-core/src/airflow/models/hitl_history.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from datetime import datetime
from typing import TYPE_CHECKING
from uuid import UUID
import sqlalchemy as sa
from sqlalchemy import Boolean, ForeignKeyConstraint, Text, Uuid
from sqlalchemy.orm import Mapped, mapped_column, relationship
from airflow._shared.timezones import timezone
from airflow.models.base import Base
from airflow.models.hitl import HITLDetailPropertyMixin
from airflow.utils.sqlalchemy import UtcDateTime
if TYPE_CHECKING:
from airflow.models.hitl import HITLDetail
class HITLDetailHistory(Base, HITLDetailPropertyMixin):
"""
Store HITLDetail for old tries of TaskInstances.
:meta private:
"""
__tablename__ = "hitl_detail_history"
ti_history_id: Mapped[UUID] = mapped_column(
Uuid(),
primary_key=True,
nullable=False,
)
# User Request Detail
options: Mapped[dict] = mapped_column(sa.JSON(), nullable=False)
subject: Mapped[str] = mapped_column(Text, nullable=False)
body: Mapped[str | None] = mapped_column(Text, nullable=True)
defaults: Mapped[dict | None] = mapped_column(sa.JSON(), nullable=True)
multiple: Mapped[bool | None] = mapped_column(Boolean, unique=False, default=False, nullable=True)
params: Mapped[dict] = mapped_column(sa.JSON(), nullable=False, default={})
assignees: Mapped[list[dict[str, str]] | None] = mapped_column(sa.JSON(), nullable=True)
created_at: Mapped[datetime] = mapped_column(UtcDateTime, default=timezone.utcnow, nullable=False)
# Response Content Detail
responded_at: Mapped[datetime | None] = mapped_column(UtcDateTime, nullable=True)
responded_by: Mapped[dict | None] = mapped_column(sa.JSON(), nullable=True)
chosen_options: Mapped[dict | None] = mapped_column(
sa.JSON(),
nullable=True,
default=None,
)
params_input: Mapped[dict] = mapped_column(sa.JSON(), nullable=False, default={})
task_instance = relationship(
"TaskInstanceHistory",
lazy="joined",
back_populates="hitl_detail",
)
def __init__(self, hitl_detail: HITLDetail):
super().__init__()
for column in self.__table__.columns:
if column.name == "ti_history_id":
setattr(self, column.name, hitl_detail.ti_id)
continue
setattr(self, column.name, getattr(hitl_detail, column.name))
__table_args__ = (
ForeignKeyConstraint(
(ti_history_id,),
["task_instance_history.task_instance_id"],
name="hitl_detail_history_tih_fkey",
ondelete="CASCADE",
onupdate="CASCADE",
),
)
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/models/hitl_history.py",
"license": "Apache License 2.0",
"lines": 79,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:task-sdk/src/airflow/sdk/execution_time/secrets/execution_api.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Secrets backend that routes requests to the Execution API."""
from __future__ import annotations
from typing import TYPE_CHECKING
from airflow.sdk.bases.secrets_backend import BaseSecretsBackend
if TYPE_CHECKING:
from airflow.sdk import Connection
class ExecutionAPISecretsBackend(BaseSecretsBackend):
"""
Secrets backend for client contexts (workers, DAG processors, triggerers).
Routes connection and variable requests through SUPERVISOR_COMMS to the
Execution API server. This backend should only be registered in client
processes, not in API server/scheduler processes.
"""
def get_conn_value(self, conn_id: str, team_name: str | None = None) -> str | None:
"""
Get connection URI via SUPERVISOR_COMMS.
Not used since we override get_connection directly.
"""
raise NotImplementedError("Use get_connection instead")
def get_connection(self, conn_id: str, team_name: str | None = None) -> Connection | None: # type: ignore[override]
"""
Return connection object by routing through SUPERVISOR_COMMS.
:param conn_id: connection id
:param team_name: Name of the team associated to the task trying to access the connection.
Unused here because the team name is inferred from the task ID provided in the execution API JWT token.
:return: Connection object or None if not found
"""
from airflow.sdk.execution_time.comms import ErrorResponse, GetConnection
from airflow.sdk.execution_time.context import _process_connection_result_conn
from airflow.sdk.execution_time.task_runner import SUPERVISOR_COMMS
try:
msg = SUPERVISOR_COMMS.send(GetConnection(conn_id=conn_id))
if isinstance(msg, ErrorResponse):
# Connection not found or error occurred
return None
# Convert ExecutionAPI response to SDK Connection
return _process_connection_result_conn(msg)
except RuntimeError as e:
# TriggerCommsDecoder.send() uses async_to_sync internally, which raises RuntimeError
# when called within an async event loop. In greenback portal contexts (triggerer),
# we catch this and use greenback to call the async version instead.
if str(e).startswith("You cannot use AsyncToSync in the same thread as an async event loop"):
import asyncio
import greenback
task = asyncio.current_task()
if greenback.has_portal(task):
import warnings
warnings.warn(
"You should not use sync calls here -- use `await aget_connection` instead",
stacklevel=2,
)
return greenback.await_(self.aget_connection(conn_id))
# Fall through to the general exception handler for other RuntimeErrors
return None
except Exception:
# If SUPERVISOR_COMMS fails for any reason, return None
# to allow fallback to other backends
return None
def get_variable(self, key: str, team_name: str | None = None) -> str | None:
"""
Return variable value by routing through SUPERVISOR_COMMS.
:param key: Variable key
:param team_name: Name of the team associated to the task trying to access the variable.
Unused here because the team name is inferred from the task ID provided in the execution API JWT token.
:return: Variable value or None if not found
"""
from airflow.sdk.execution_time.comms import ErrorResponse, GetVariable, VariableResult
from airflow.sdk.execution_time.task_runner import SUPERVISOR_COMMS
try:
msg = SUPERVISOR_COMMS.send(GetVariable(key=key))
if isinstance(msg, ErrorResponse):
# Variable not found or error occurred
return None
# Extract value from VariableResult
if isinstance(msg, VariableResult):
return msg.value # Already a string | None
return None
except Exception:
# If SUPERVISOR_COMMS fails for any reason, return None
# to allow fallback to other backends
return None
async def aget_connection(self, conn_id: str) -> Connection | None: # type: ignore[override]
"""
Return connection object asynchronously via SUPERVISOR_COMMS.
:param conn_id: connection id
:return: Connection object or None if not found
"""
from airflow.sdk.execution_time.comms import ErrorResponse, GetConnection
from airflow.sdk.execution_time.context import _process_connection_result_conn
from airflow.sdk.execution_time.task_runner import SUPERVISOR_COMMS
try:
msg = await SUPERVISOR_COMMS.asend(GetConnection(conn_id=conn_id))
if isinstance(msg, ErrorResponse):
# Connection not found or error occurred
return None
# Convert ExecutionAPI response to SDK Connection
return _process_connection_result_conn(msg)
except Exception:
# If SUPERVISOR_COMMS fails for any reason, return None
# to allow fallback to other backends
return None
async def aget_variable(self, key: str) -> str | None:
"""
Return variable value asynchronously via SUPERVISOR_COMMS.
:param key: Variable key
:return: Variable value or None if not found
"""
from airflow.sdk.execution_time.comms import ErrorResponse, GetVariable, VariableResult
from airflow.sdk.execution_time.task_runner import SUPERVISOR_COMMS
try:
msg = await SUPERVISOR_COMMS.asend(GetVariable(key=key))
if isinstance(msg, ErrorResponse):
# Variable not found or error occurred
return None
# Extract value from VariableResult
if isinstance(msg, VariableResult):
return msg.value # Already a string | None
return None
except Exception:
# If SUPERVISOR_COMMS fails for any reason, return None
# to allow fallback to other backends
return None
| {
"repo_id": "apache/airflow",
"file_path": "task-sdk/src/airflow/sdk/execution_time/secrets/execution_api.py",
"license": "Apache License 2.0",
"lines": 138,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:task-sdk/tests/task_sdk/execution_time/test_secrets.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import pytest
from airflow.sdk.definitions.connection import Connection
from airflow.sdk.execution_time.secrets.execution_api import ExecutionAPISecretsBackend
class TestExecutionAPISecretsBackend:
"""Test ExecutionAPISecretsBackend."""
def test_get_connection_via_supervisor_comms(self, mock_supervisor_comms):
"""Test that connection is retrieved via SUPERVISOR_COMMS."""
from airflow.sdk.api.datamodels._generated import ConnectionResponse
from airflow.sdk.execution_time.comms import ConnectionResult
# Mock connection response
conn_response = ConnectionResponse(
conn_id="test_conn",
conn_type="http",
host="example.com",
port=443,
schema="https",
)
conn_result = ConnectionResult.from_conn_response(conn_response)
mock_supervisor_comms.send.return_value = conn_result
backend = ExecutionAPISecretsBackend()
conn = backend.get_connection("test_conn")
assert conn is not None
assert conn.conn_id == "test_conn"
assert conn.conn_type == "http"
assert conn.host == "example.com"
mock_supervisor_comms.send.assert_called_once()
def test_get_connection_not_found(self, mock_supervisor_comms):
"""Test that None is returned when connection not found."""
from airflow.sdk.exceptions import ErrorType
from airflow.sdk.execution_time.comms import ErrorResponse
# Mock error response
error_response = ErrorResponse(error=ErrorType.CONNECTION_NOT_FOUND, detail={"message": "Not found"})
mock_supervisor_comms.send.return_value = error_response
backend = ExecutionAPISecretsBackend()
conn = backend.get_connection("nonexistent")
assert conn is None
mock_supervisor_comms.send.assert_called_once()
def test_get_variable_via_supervisor_comms(self, mock_supervisor_comms):
"""Test that variable is retrieved via SUPERVISOR_COMMS."""
from airflow.sdk.execution_time.comms import VariableResult
# Mock variable response
var_result = VariableResult(key="test_var", value="test_value")
mock_supervisor_comms.send.return_value = var_result
backend = ExecutionAPISecretsBackend()
value = backend.get_variable("test_var")
assert value == "test_value"
mock_supervisor_comms.send.assert_called_once()
def test_get_variable_not_found(self, mock_supervisor_comms):
"""Test that None is returned when variable not found."""
from airflow.sdk.exceptions import ErrorType
from airflow.sdk.execution_time.comms import ErrorResponse
# Mock error response
error_response = ErrorResponse(error=ErrorType.VARIABLE_NOT_FOUND, detail={"message": "Not found"})
mock_supervisor_comms.send.return_value = error_response
backend = ExecutionAPISecretsBackend()
value = backend.get_variable("nonexistent")
assert value is None
mock_supervisor_comms.send.assert_called_once()
def test_get_connection_handles_exception(self, mock_supervisor_comms):
"""Test that exceptions are handled gracefully."""
mock_supervisor_comms.send.side_effect = RuntimeError("Connection failed")
backend = ExecutionAPISecretsBackend()
conn = backend.get_connection("test_conn")
# Should return None on exception to allow fallback to other backends
assert conn is None
def test_get_variable_handles_exception(self, mock_supervisor_comms):
"""Test that exceptions are handled gracefully for variables."""
mock_supervisor_comms.send.side_effect = RuntimeError("Communication failed")
backend = ExecutionAPISecretsBackend()
value = backend.get_variable("test_var")
# Should return None on exception to allow fallback to other backends
assert value is None
def test_get_conn_value_not_implemented(self):
"""Test that get_conn_value raises NotImplementedError."""
backend = ExecutionAPISecretsBackend()
with pytest.raises(NotImplementedError, match="Use get_connection instead"):
backend.get_conn_value("test_conn")
def test_runtime_error_triggers_greenback_fallback(self, mocker, mock_supervisor_comms):
"""
Test that RuntimeError from async_to_sync triggers greenback fallback.
This test verifies the fix for issue #57145: when SUPERVISOR_COMMS.send()
raises the specific RuntimeError about async_to_sync in an event loop,
the backend catches it and uses greenback to call aget_connection().
"""
# Expected connection to be returned
expected_conn = Connection(
conn_id="databricks_default",
conn_type="databricks",
host="example.databricks.com",
)
# Simulate the RuntimeError that triggers greenback fallback
mock_supervisor_comms.send.side_effect = RuntimeError(
"You cannot use AsyncToSync in the same thread as an async event loop"
)
# Mock the greenback and asyncio modules that are imported inside the exception handler
mocker.patch("greenback.has_portal", return_value=True)
mocker.patch("asyncio.current_task")
# Mock greenback.await_ to actually await the coroutine it receives.
# This prevents Python 3.13 RuntimeWarning about unawaited coroutines.
import asyncio
def greenback_await_side_effect(coro):
loop = asyncio.new_event_loop()
try:
return loop.run_until_complete(coro)
finally:
loop.close()
mock_greenback_await = mocker.patch("greenback.await_", side_effect=greenback_await_side_effect)
# Mock aget_connection to return the expected connection directly.
# We need to mock this because the real aget_connection would try to
# use SUPERVISOR_COMMS.asend which is not set up for this test.
async def mock_aget_connection(self, conn_id):
return expected_conn
mocker.patch.object(ExecutionAPISecretsBackend, "aget_connection", mock_aget_connection)
backend = ExecutionAPISecretsBackend()
conn = backend.get_connection("databricks_default")
# Verify we got the expected connection
assert conn is not None
assert conn.conn_id == "databricks_default"
# Verify the greenback fallback was called
mock_greenback_await.assert_called_once()
# Verify send was attempted first (and raised RuntimeError)
mock_supervisor_comms.send.assert_called_once()
class TestContextDetection:
"""Test context detection in ensure_secrets_backend_loaded."""
def test_client_context_with_supervisor_comms(self, mock_supervisor_comms):
"""Client context: SUPERVISOR_COMMS set → uses worker chain."""
from airflow.sdk.execution_time.supervisor import ensure_secrets_backend_loaded
backends = ensure_secrets_backend_loaded()
backend_classes = [type(b).__name__ for b in backends]
assert "ExecutionAPISecretsBackend" in backend_classes
assert "MetastoreBackend" not in backend_classes
def test_server_context_with_env_var(self, monkeypatch):
"""Server context: env var set → uses server chain."""
import sys
from airflow.sdk.execution_time.supervisor import ensure_secrets_backend_loaded
monkeypatch.setenv("_AIRFLOW_PROCESS_CONTEXT", "server")
# Ensure SUPERVISOR_COMMS is not available
if "airflow.sdk.execution_time.task_runner" in sys.modules:
monkeypatch.delitem(sys.modules, "airflow.sdk.execution_time.task_runner")
backends = ensure_secrets_backend_loaded()
backend_classes = [type(b).__name__ for b in backends]
assert "MetastoreBackend" in backend_classes
assert "ExecutionAPISecretsBackend" not in backend_classes
def test_fallback_context_no_markers(self, monkeypatch):
"""Fallback context: no SUPERVISOR_COMMS, no env var → only env vars + external."""
import sys
from airflow.sdk.execution_time.supervisor import ensure_secrets_backend_loaded
# Ensure no SUPERVISOR_COMMS
if "airflow.sdk.execution_time.task_runner" in sys.modules:
monkeypatch.delitem(sys.modules, "airflow.sdk.execution_time.task_runner")
# Ensure no env var
monkeypatch.delenv("_AIRFLOW_PROCESS_CONTEXT", raising=False)
backends = ensure_secrets_backend_loaded()
backend_classes = [type(b).__name__ for b in backends]
assert "EnvironmentVariablesBackend" in backend_classes
assert "MetastoreBackend" not in backend_classes
assert "ExecutionAPISecretsBackend" not in backend_classes
| {
"repo_id": "apache/airflow",
"file_path": "task-sdk/tests/task_sdk/execution_time/test_secrets.py",
"license": "Apache License 2.0",
"lines": 174,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/google/src/airflow/providers/google/cloud/bundles/gcs.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import os
from pathlib import Path
import structlog
from google.api_core.exceptions import NotFound
from airflow.dag_processing.bundles.base import BaseDagBundle
from airflow.providers.common.compat.sdk import AirflowException
from airflow.providers.google.cloud.hooks.gcs import GCSHook
from airflow.providers.google.common.hooks.base_google import GoogleBaseHook
class GCSDagBundle(BaseDagBundle):
"""
GCS Dag bundle - exposes a directory in GCS as a Dag bundle.
This allows Airflow to load Dags directly from a GCS bucket.
:param gcp_conn_id: Airflow connection ID for GCS. Defaults to GoogleBaseHook.default_conn_name.
:param bucket_name: The name of the GCS bucket containing the Dag files.
:param prefix: Optional subdirectory within the GCS bucket where the Dags are stored.
If None, Dags are assumed to be at the root of the bucket (Optional).
"""
supports_versioning = False
def __init__(
self,
*,
gcp_conn_id: str = GoogleBaseHook.default_conn_name,
bucket_name: str,
prefix: str = "",
**kwargs,
) -> None:
super().__init__(**kwargs)
self.gcp_conn_id = gcp_conn_id
self.bucket_name = bucket_name
self.prefix = prefix
# Local path where GCS Dags are downloaded
self.gcs_dags_dir: Path = self.base_dir
log = structlog.get_logger(__name__)
self._log = log.bind(
bundle_name=self.name,
version=self.version,
bucket_name=self.bucket_name,
prefix=self.prefix,
gcp_conn_id=self.gcp_conn_id,
)
self._gcs_hook: GCSHook | None = None
def _initialize(self):
with self.lock():
if not self.gcs_dags_dir.exists():
self._log.info("Creating local Dags directory: %s", self.gcs_dags_dir)
os.makedirs(self.gcs_dags_dir)
if not self.gcs_dags_dir.is_dir():
raise NotADirectoryError(f"Local Dags path: {self.gcs_dags_dir} is not a directory.")
try:
self.gcs_hook.get_bucket(bucket_name=self.bucket_name)
except NotFound:
raise ValueError(f"GCS bucket '{self.bucket_name}' does not exist.")
if self.prefix:
# don't check when prefix is ""
if not self.gcs_hook.list(bucket_name=self.bucket_name, prefix=self.prefix):
raise ValueError(f"GCS prefix 'gs://{self.bucket_name}/{self.prefix}' does not exist.")
self.refresh()
def initialize(self) -> None:
self._initialize()
super().initialize()
@property
def gcs_hook(self):
if self._gcs_hook is None:
try:
self._gcs_hook: GCSHook = GCSHook(gcp_conn_id=self.gcp_conn_id) # Initialize GCS hook.
except AirflowException as e:
self._log.warning("Could not create GCSHook for connection %s: %s", self.gcp_conn_id, e)
return self._gcs_hook
def __repr__(self):
return (
f"<GCSDagBundle("
f"name={self.name!r}, "
f"bucket_name={self.bucket_name!r}, "
f"prefix={self.prefix!r}, "
f"version={self.version!r}"
f")>"
)
def get_current_version(self) -> str | None:
"""Return the current version of the Dag bundle. Currently not supported."""
return None
@property
def path(self) -> Path:
"""Return the local path to the Dag files."""
return self.gcs_dags_dir # Path where Dags are downloaded.
def refresh(self) -> None:
"""Refresh the Dag bundle by re-downloading the Dags from GCS."""
if self.version:
raise ValueError("Refreshing a specific version is not supported")
with self.lock():
self._log.debug(
"Downloading Dags from gs://%s/%s to %s", self.bucket_name, self.prefix, self.gcs_dags_dir
)
self.gcs_hook.sync_to_local_dir(
bucket_name=self.bucket_name,
prefix=self.prefix,
local_dir=self.gcs_dags_dir,
delete_stale=True,
)
def view_url(self, version: str | None = None) -> str | None:
"""
Return a URL for viewing the Dags in GCS. Currently, versioning is not supported.
This method is deprecated and will be removed when the minimum supported Airflow version is 3.1.
Use `view_url_template` instead.
"""
return self.view_url_template()
def view_url_template(self) -> str | None:
"""Return a URL for viewing the Dags in GCS. Currently, versioning is not supported."""
if self.version:
raise ValueError("GCS url with version is not supported")
if hasattr(self, "_view_url_template") and self._view_url_template:
# Because we use this method in the view_url method, we need to handle
# backward compatibility for Airflow versions that doesn't have the
# _view_url_template attribute. Should be removed when we drop support for Airflow 3.0
return self._view_url_template
# https://console.cloud.google.com/storage/browser/<bucket-name>/<prefix>
url = f"https://console.cloud.google.com/storage/browser/{self.bucket_name}"
if self.prefix:
url += f"/{self.prefix}"
return url
| {
"repo_id": "apache/airflow",
"file_path": "providers/google/src/airflow/providers/google/cloud/bundles/gcs.py",
"license": "Apache License 2.0",
"lines": 136,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/google/tests/unit/google/cloud/bundles/test_gcs.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from unittest.mock import MagicMock, PropertyMock, call, patch
import pytest
from google.api_core.exceptions import NotFound
import airflow.version
from airflow.models import Connection
from tests_common.test_utils.config import conf_vars
if airflow.version.version.strip().startswith("3"):
from airflow.providers.google.cloud.bundles.gcs import GCSDagBundle
GCP_CONN_ID = "gcs_dags_connection"
GCS_BUCKET_NAME = "my-airflow-dags-bucket"
GCS_BUCKET_PREFIX = "project1/dags"
@pytest.fixture(autouse=True)
def bundle_temp_dir(tmp_path):
with conf_vars({("dag_processor", "dag_bundle_storage_path"): str(tmp_path)}):
yield tmp_path
@pytest.mark.skipif(not airflow.version.version.strip().startswith("3"), reason="Airflow >=3.0.0 test")
class TestGCSDagBundle:
@pytest.fixture(autouse=True)
def setup_connections(self, create_connection_without_db):
create_connection_without_db(
Connection(
conn_id=GCP_CONN_ID,
conn_type="google_cloud_platform",
)
)
def test_view_url_generates_console_url(self):
bundle = GCSDagBundle(
name="test", gcp_conn_id=GCP_CONN_ID, prefix=GCS_BUCKET_PREFIX, bucket_name=GCS_BUCKET_NAME
)
url: str = bundle.view_url()
assert (
url == f"https://console.cloud.google.com/storage/browser/{GCS_BUCKET_NAME}/{GCS_BUCKET_PREFIX}"
)
def test_view_url_template_generates_console_url(self):
bundle = GCSDagBundle(
name="test", gcp_conn_id=GCP_CONN_ID, prefix=GCS_BUCKET_PREFIX, bucket_name=GCS_BUCKET_NAME
)
url: str = bundle.view_url_template()
assert (
url == f"https://console.cloud.google.com/storage/browser/{GCS_BUCKET_NAME}/{GCS_BUCKET_PREFIX}"
)
def test_supports_versioning(self):
bundle = GCSDagBundle(
name="test", gcp_conn_id=GCP_CONN_ID, prefix=GCS_BUCKET_PREFIX, bucket_name=GCS_BUCKET_NAME
)
assert GCSDagBundle.supports_versioning is False
# set version, it's not supported
bundle.version = "test_version"
with pytest.raises(ValueError, match="Refreshing a specific version is not supported"):
bundle.refresh()
with pytest.raises(ValueError, match="GCS url with version is not supported"):
bundle.view_url("test_version")
def test_local_dags_path_is_not_a_directory(self, bundle_temp_dir):
bundle_name = "test"
# Create a file where the directory should be
file_path = bundle_temp_dir / bundle_name
file_path.touch()
bundle = GCSDagBundle(
name=bundle_name,
gcp_conn_id=GCP_CONN_ID,
prefix="project1_dags",
bucket_name="airflow_dags",
)
with pytest.raises(NotADirectoryError, match=f"Local Dags path: {file_path} is not a directory."):
bundle.initialize()
def test_correct_bundle_path_used(self):
bundle = GCSDagBundle(
name="test", gcp_conn_id=GCP_CONN_ID, prefix="project1_dags", bucket_name="airflow_dags"
)
assert str(bundle.base_dir) == str(bundle.gcs_dags_dir)
@patch("airflow.providers.google.cloud.bundles.gcs.GCSDagBundle.gcs_hook", new_callable=PropertyMock)
def test_gcs_bucket_and_prefix_validated(self, mock_gcs_hook_property):
mock_hook = MagicMock()
mock_gcs_hook_property.return_value = mock_hook
mock_hook.get_bucket.side_effect = NotFound("Bucket not found")
bundle = GCSDagBundle(
name="test",
gcp_conn_id=GCP_CONN_ID,
prefix="project1_dags",
bucket_name="non-existing-bucket",
)
with pytest.raises(ValueError, match="GCS bucket 'non-existing-bucket' does not exist."):
bundle.initialize()
mock_hook.get_bucket.assert_called_once_with(bucket_name="non-existing-bucket")
mock_hook.get_bucket.side_effect = None
mock_hook.get_bucket.return_value = True
mock_hook.list.return_value = []
bundle = GCSDagBundle(
name="test",
gcp_conn_id=GCP_CONN_ID,
prefix="non-existing-prefix",
bucket_name=GCS_BUCKET_NAME,
)
with pytest.raises(
ValueError,
match=f"GCS prefix 'gs://{GCS_BUCKET_NAME}/non-existing-prefix' does not exist.",
):
bundle.initialize()
mock_hook.list.assert_called_once_with(bucket_name=GCS_BUCKET_NAME, prefix="non-existing-prefix")
mock_hook.list.return_value = ["some/object"]
bundle = GCSDagBundle(
name="test",
gcp_conn_id=GCP_CONN_ID,
prefix=GCS_BUCKET_PREFIX,
bucket_name=GCS_BUCKET_NAME,
)
# initialize succeeds, with correct prefix and bucket
bundle.initialize()
mock_hook.list.reset_mock()
bundle = GCSDagBundle(
name="test",
gcp_conn_id=GCP_CONN_ID,
prefix="",
bucket_name=GCS_BUCKET_NAME,
)
# initialize succeeds, with empty prefix
bundle.initialize()
mock_hook.list.assert_not_called()
@patch("airflow.providers.google.cloud.bundles.gcs.GCSDagBundle.gcs_hook", new_callable=PropertyMock)
def test_refresh(self, mock_gcs_hook_property):
mock_hook = MagicMock()
mock_gcs_hook_property.return_value = mock_hook
bundle = GCSDagBundle(
name="test",
gcp_conn_id=GCP_CONN_ID,
prefix=GCS_BUCKET_PREFIX,
bucket_name=GCS_BUCKET_NAME,
)
bundle._log.debug = MagicMock()
download_log_call = call(
"Downloading Dags from gs://%s/%s to %s", GCS_BUCKET_NAME, GCS_BUCKET_PREFIX, bundle.gcs_dags_dir
)
sync_call = call(
bucket_name=GCS_BUCKET_NAME,
prefix=GCS_BUCKET_PREFIX,
local_dir=bundle.gcs_dags_dir,
delete_stale=True,
)
bundle.initialize()
assert bundle._log.debug.call_count == 1
assert bundle._log.debug.call_args_list == [download_log_call]
assert mock_hook.sync_to_local_dir.call_count == 1
assert mock_hook.sync_to_local_dir.call_args_list == [sync_call]
bundle.refresh()
assert bundle._log.debug.call_count == 2
assert bundle._log.debug.call_args_list == [download_log_call, download_log_call]
assert mock_hook.sync_to_local_dir.call_count == 2
assert mock_hook.sync_to_local_dir.call_args_list == [sync_call, sync_call]
@patch("airflow.providers.google.cloud.bundles.gcs.GCSDagBundle.gcs_hook", new_callable=PropertyMock)
def test_refresh_without_prefix(self, mock_gcs_hook_property):
mock_hook = MagicMock()
mock_gcs_hook_property.return_value = mock_hook
bundle = GCSDagBundle(
name="test",
gcp_conn_id=GCP_CONN_ID,
bucket_name=GCS_BUCKET_NAME,
)
bundle._log.debug = MagicMock()
download_log_call = call(
"Downloading Dags from gs://%s/%s to %s", GCS_BUCKET_NAME, "", bundle.gcs_dags_dir
)
sync_call = call(
bucket_name=GCS_BUCKET_NAME, prefix="", local_dir=bundle.gcs_dags_dir, delete_stale=True
)
assert bundle.prefix == ""
bundle.initialize()
assert bundle._log.debug.call_count == 1
assert bundle._log.debug.call_args_list == [download_log_call]
assert mock_hook.sync_to_local_dir.call_count == 1
assert mock_hook.sync_to_local_dir.call_args_list == [sync_call]
bundle.refresh()
assert bundle._log.debug.call_count == 2
assert bundle._log.debug.call_args_list == [download_log_call, download_log_call]
assert mock_hook.sync_to_local_dir.call_count == 2
assert mock_hook.sync_to_local_dir.call_args_list == [sync_call, sync_call]
| {
"repo_id": "apache/airflow",
"file_path": "providers/google/tests/unit/google/cloud/bundles/test_gcs.py",
"license": "Apache License 2.0",
"lines": 192,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-core/tests/unit/api_fastapi/execution_api/versions/v2025_09_23/test_task_instances.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import pytest
from airflow._shared.timezones import timezone
from airflow.utils.state import DagRunState, State
from tests_common.test_utils.db import clear_db_runs
pytestmark = pytest.mark.db_test
@pytest.fixture
def ver_client(client):
"""Client configured to use API version 2025-09-23."""
client.headers["Airflow-API-Version"] = "2025-09-23"
return client
class TestTIRunStateV20250923:
"""Test that API version 2025-09-23 does NOT include triggering_user_name field."""
def setup_method(self):
clear_db_runs()
def teardown_method(self):
clear_db_runs()
def test_ti_run_excludes_triggering_user_name(
self,
ver_client,
session,
create_task_instance,
time_machine,
):
"""
Test that the triggering_user_name field is NOT present in API version 2025-09-23.
This field was added in version 2025-10-10, so older API clients should not
receive it in the response.
"""
instant_str = "2024-09-30T12:00:00Z"
instant = timezone.parse(instant_str)
time_machine.move_to(instant, tick=False)
ti = create_task_instance(
task_id="test_triggering_user_exclusion",
state=State.QUEUED,
dagrun_state=DagRunState.RUNNING,
session=session,
start_date=instant,
)
session.commit()
response = ver_client.patch(
f"/execution/task-instances/{ti.id}/run",
json={
"state": "running",
"hostname": "test-hostname",
"unixname": "test-user",
"pid": 12345,
"start_date": instant_str,
},
)
assert response.status_code == 200
json_response = response.json()
# Verify the dag_run is present
assert "dag_run" in json_response
dag_run = json_response["dag_run"]
# The triggering_user_name field should NOT be present in this API version
assert "triggering_user_name" not in dag_run, (
"triggering_user_name should not be present in API version 2025-09-23"
)
# Verify other expected fields are still present
assert dag_run["dag_id"] == ti.dag_id
assert dag_run["run_id"] == "test"
assert dag_run["state"] == "running"
assert dag_run["conf"] == {}
class TestTIRunConfV20250923:
"""Test that API version 2025-09-23 converts NULL conf to empty dict."""
def setup_method(self):
clear_db_runs()
def teardown_method(self):
clear_db_runs()
def test_ti_run_null_conf_converted_to_dict(
self,
ver_client,
session,
create_task_instance,
):
"""
Test that NULL conf is converted to empty dict in API version 2025-09-23.
In version 2025-10-10, the conf field became nullable to match database schema.
Older API clients (2025-09-23 and earlier) should receive an empty dict instead
of None for backward compatibility.
"""
ti = create_task_instance(
task_id="test_ti_run_null_conf_v2",
state=State.QUEUED,
dagrun_state=DagRunState.RUNNING,
session=session,
)
# Set conf to NULL to simulate Airflow 2.x upgrade or offline migration
ti.dag_run.conf = None
session.commit()
response = ver_client.patch(
f"/execution/task-instances/{ti.id}/run",
json={
"state": "running",
"pid": 100,
"hostname": "test-hostname",
"unixname": "test-user",
"start_date": timezone.utcnow().isoformat(),
},
)
assert response.status_code == 200
json_response = response.json()
assert "dag_run" in json_response
dag_run = json_response["dag_run"]
# In older API versions, None should be converted to empty dict
assert dag_run["conf"] == {}, "NULL conf should be converted to empty dict in API version 2025-09-23"
# Verify other expected fields
assert dag_run["dag_id"] == ti.dag_id
assert dag_run["run_id"] == "test"
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/tests/unit/api_fastapi/execution_api/versions/v2025_09_23/test_task_instances.py",
"license": "Apache License 2.0",
"lines": 126,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-core/tests/unit/dag_processing/test_dagbag.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import contextlib
import inspect
import logging
import os
import pathlib
import re
import sys
import textwrap
import warnings
import zipfile
from copy import deepcopy
from datetime import datetime, timedelta, timezone
from unittest import mock
from unittest.mock import patch
import pytest
from sqlalchemy import select
from airflow import settings
from airflow.dag_processing.dagbag import (
BundleDagBag,
DagBag,
_capture_with_reraise,
_validate_executor_fields,
)
from airflow.exceptions import UnknownExecutorException
from airflow.executors.executor_loader import ExecutorLoader
from airflow.models.dag import DagModel
from airflow.models.dagwarning import DagWarning, DagWarningType
from airflow.models.serialized_dag import SerializedDagModel
from airflow.sdk import DAG, BaseOperator
from tests_common.pytest_plugin import AIRFLOW_ROOT_PATH
from tests_common.test_utils import db
from tests_common.test_utils.config import conf_vars
from unit import cluster_policies
from unit.models import TEST_DAGS_FOLDER
pytestmark = pytest.mark.db_test
example_dags_folder = AIRFLOW_ROOT_PATH / "airflow-core" / "src" / "airflow" / "example_dags" / "standard"
PY311 = sys.version_info >= (3, 11)
PY313 = sys.version_info >= (3, 13)
# Include the words "airflow" and "dag" in the file contents,
# tricking airflow into thinking these
# files contain a DAG (otherwise Airflow will skip them)
INVALID_DAG_WITH_DEPTH_FILE_CONTENTS = "def something():\n return airflow_DAG\nsomething()"
def db_clean_up():
db.clear_db_dags()
db.clear_db_runs()
db.clear_db_serialized_dags()
db.clear_dag_specific_permissions()
class TestValidateExecutorFields:
"""Comprehensive tests for _validate_executor_fields function."""
@patch.object(ExecutorLoader, "lookup_executor_name_by_str")
def test_multi_team_disabled_ignores_bundle_name(self, mock_lookup):
"""Test that when multi_team is disabled, bundle_name is ignored and no team lookup occurs."""
with DAG("test-dag", schedule=None) as dag:
BaseOperator(task_id="t1", executor="test.executor")
# multi_team disabled by default, no need to add conf_vars
_validate_executor_fields(dag, bundle_name="some_bundle")
# Should call ExecutorLoader without team_name (defaults to None)
mock_lookup.assert_called_once_with("test.executor", team_name=None, validate_teams=False)
@patch("airflow.dag_processing.bundles.manager.DagBundlesManager")
@patch.object(ExecutorLoader, "lookup_executor_name_by_str")
def test_multi_team_enabled_bundle_exists_with_team(self, mock_lookup, mock_manager_class):
"""Test successful team lookup when bundle exists and has team_name."""
# Setup mock bundle manager
mock_bundle_config = mock.MagicMock()
mock_bundle_config.team_name = "test_team"
mock_manager = mock_manager_class.return_value
mock_manager._bundle_config = {"test_bundle": mock_bundle_config}
with DAG("test-dag", schedule=None) as dag:
BaseOperator(task_id="t1", executor="team.executor")
with conf_vars({("core", "multi_team"): "True"}):
_validate_executor_fields(dag, bundle_name="test_bundle")
# Should call ExecutorLoader with team from bundle config
mock_lookup.assert_called_once_with("team.executor", team_name="test_team", validate_teams=False)
@patch("airflow.dag_processing.bundles.manager.DagBundlesManager")
@patch.object(ExecutorLoader, "lookup_executor_name_by_str")
def test_multi_team_enabled_bundle_exists_no_team(self, mock_lookup, mock_manager_class):
"""Test when bundle exists but has no team_name (None or empty)."""
mock_bundle_config = mock.MagicMock()
mock_bundle_config.team_name = None # No team associated
mock_manager = mock_manager_class.return_value
mock_manager._bundle_config = {"test_bundle": mock_bundle_config}
with DAG("test-dag", schedule=None) as dag:
BaseOperator(task_id="t1", executor="test.executor")
with conf_vars({("core", "multi_team"): "True"}):
_validate_executor_fields(dag, bundle_name="test_bundle")
mock_lookup.assert_called_once_with("test.executor", team_name=None, validate_teams=False)
@patch.object(ExecutorLoader, "lookup_executor_name_by_str")
def test_multiple_tasks_with_executors(self, mock_lookup):
"""Test that all tasks with executors are validated."""
with DAG("test-dag", schedule=None) as dag:
BaseOperator(task_id="t1", executor="executor1")
BaseOperator(task_id="t2", executor="executor2")
BaseOperator(task_id="t3") # No executor, should be skipped
with conf_vars({("core", "multi_team"): "True"}):
_validate_executor_fields(dag)
# Should be called for each task with executor
assert mock_lookup.call_count == 2
mock_lookup.assert_any_call("executor1", team_name=None, validate_teams=False)
mock_lookup.assert_any_call("executor2", team_name=None, validate_teams=False)
@patch("airflow.dag_processing.bundles.manager.DagBundlesManager")
@patch.object(ExecutorLoader, "lookup_executor_name_by_str")
def test_executor_validation_failure_with_team(self, mock_lookup, mock_manager_class):
"""Test executor validation failure when team is associated (team-specific error)."""
mock_bundle_config = mock.MagicMock()
mock_bundle_config.team_name = "test_team"
mock_manager = mock_manager_class.return_value
mock_manager._bundle_config = {"test_bundle": mock_bundle_config}
# ExecutorLoader raises exception
mock_lookup.side_effect = UnknownExecutorException("Executor not found")
with DAG("test-dag", schedule=None) as dag:
BaseOperator(task_id="task1", executor="invalid.executor")
with conf_vars({("core", "multi_team"): "True"}):
with pytest.raises(
UnknownExecutorException,
match=re.escape(
"Task 'task1' specifies executor 'invalid.executor', which is not available "
"for team 'test_team' (the team associated with DAG 'test-dag') or as a global executor. "
"Make sure 'invalid.executor' is configured for team 'test_team' or globally in your "
"[core] executors configuration, or update the task's executor to use one of the "
"configured executors for team 'test_team' or available global executors."
),
):
_validate_executor_fields(dag, bundle_name="test_bundle")
@patch.object(ExecutorLoader, "lookup_executor_name_by_str")
def test_executor_validation_failure_no_team(self, mock_lookup):
"""Test executor validation failure when no team is associated (generic error)."""
mock_lookup.side_effect = UnknownExecutorException("Executor not found")
with DAG("test-dag", schedule=None) as dag:
BaseOperator(task_id="task1", executor="invalid.executor")
with conf_vars({("core", "multi_team"): "True"}):
with pytest.raises(
UnknownExecutorException,
match=re.escape(
"Task 'task1' specifies executor 'invalid.executor', which is not available. "
"Make sure it is listed in your [core] executors configuration, or update the task's "
"executor to use one of the configured executors."
),
):
_validate_executor_fields(dag) # No bundle_name
@patch("airflow.dag_processing.bundles.manager.DagBundlesManager")
@patch.object(ExecutorLoader, "lookup_executor_name_by_str")
def test_global_executor_fallback_success(self, mock_lookup, mock_manager_class):
"""Test that team-specific executor failure falls back to global executor successfully."""
mock_bundle_config = mock.MagicMock()
mock_bundle_config.team_name = "test_team"
mock_manager = mock_manager_class.return_value
mock_manager._bundle_config = {"test_bundle": mock_bundle_config}
# First call (team-specific) fails, second call (global) succeeds
mock_lookup.side_effect = [UnknownExecutorException("Team executor not found"), None]
with DAG("test-dag", schedule=None) as dag:
BaseOperator(task_id="task1", executor="global.executor")
with conf_vars({("core", "multi_team"): "True"}):
# Should not raise exception due to global fallback
_validate_executor_fields(dag, bundle_name="test_bundle")
# Should call lookup twice: first for team, then for global
assert mock_lookup.call_count == 2
mock_lookup.assert_any_call("global.executor", team_name="test_team", validate_teams=False)
mock_lookup.assert_any_call("global.executor", team_name=None, validate_teams=False)
@patch("airflow.dag_processing.bundles.manager.DagBundlesManager")
@patch.object(ExecutorLoader, "lookup_executor_name_by_str")
def test_global_executor_fallback_failure(self, mock_lookup, mock_manager_class):
"""Test that when both team-specific and global executors fail, appropriate error is raised."""
mock_bundle_config = mock.MagicMock()
mock_bundle_config.team_name = "test_team"
mock_manager = mock_manager_class.return_value
mock_manager._bundle_config = {"test_bundle": mock_bundle_config}
# Both calls fail
mock_lookup.side_effect = UnknownExecutorException("Executor not found")
with DAG("test-dag", schedule=None) as dag:
BaseOperator(task_id="task1", executor="unknown.executor")
with conf_vars({("core", "multi_team"): "True"}):
with pytest.raises(
UnknownExecutorException,
match=re.escape(
"Task 'task1' specifies executor 'unknown.executor', which is not available "
"for team 'test_team' (the team associated with DAG 'test-dag') or as a global executor. "
"Make sure 'unknown.executor' is configured for team 'test_team' or globally in your "
"[core] executors configuration, or update the task's executor to use one of the "
"configured executors for team 'test_team' or available global executors."
),
):
_validate_executor_fields(dag, bundle_name="test_bundle")
# Should call lookup twice: first for team, then for global fallback
assert mock_lookup.call_count == 2
mock_lookup.assert_any_call("unknown.executor", team_name="test_team", validate_teams=False)
mock_lookup.assert_any_call("unknown.executor", team_name=None, validate_teams=False)
@patch("airflow.dag_processing.bundles.manager.DagBundlesManager")
@patch.object(ExecutorLoader, "lookup_executor_name_by_str")
def test_team_specific_executor_success_no_fallback(self, mock_lookup, mock_manager_class):
"""Test that when team-specific executor succeeds, global fallback is not attempted."""
mock_bundle_config = mock.MagicMock()
mock_bundle_config.team_name = "test_team"
mock_manager = mock_manager_class.return_value
mock_manager._bundle_config = {"test_bundle": mock_bundle_config}
# First call (team-specific) succeeds
mock_lookup.return_value = None
with DAG("test-dag", schedule=None) as dag:
BaseOperator(task_id="task1", executor="team.executor")
with conf_vars({("core", "multi_team"): "True"}):
_validate_executor_fields(dag, bundle_name="test_bundle")
# Should only call lookup once for team-specific executor
mock_lookup.assert_called_once_with("team.executor", team_name="test_team", validate_teams=False)
@pytest.mark.usefixtures("clean_executor_loader")
def test_validate_executor_fields_does_not_access_database(self):
"""Regression test: executor validation during DAG parsing must not access the database.
In Airflow 3, DAG parsing happens in isolated subprocesses where database access
is blocked via block_orm_access(). The _validate_executor_fields function must
validate executors using only local config (validate_teams=False), without querying
the database to verify team names exist. If validate_teams were True, the call chain
would reach Team.get_all_team_names() which does a DB query, raising RuntimeError
in the parsing subprocess.
"""
with DAG("test-dag", schedule=None) as dag:
BaseOperator(task_id="t1", executor="LocalExecutor")
with conf_vars(
{
("core", "executor"): "LocalExecutor;team1=CeleryExecutor",
("core", "multi_team"): "True",
}
):
# Patch _validate_teams_exist_in_database to raise RuntimeError,
# simulating what happens in the DAG parsing subprocess where DB is blocked.
# If the fix is correct, this should never be called.
with patch.object(
ExecutorLoader,
"_validate_teams_exist_in_database",
side_effect=RuntimeError("Direct database access via the ORM is not allowed in Airflow 3.0"),
):
# Should succeed without hitting the database
_validate_executor_fields(dag)
def test_validate_executor_field_executor_not_configured():
with DAG("test-dag", schedule=None) as dag:
BaseOperator(task_id="t1", executor="test.custom.executor")
with pytest.raises(
UnknownExecutorException,
match=re.escape(
"Task 't1' specifies executor 'test.custom.executor', which is not available. "
"Make sure it is listed in your [core] executors configuration, or update the task's "
"executor to use one of the configured executors."
),
):
_validate_executor_fields(dag)
def test_validate_executor_field():
with DAG("test-dag", schedule=None) as dag:
BaseOperator(task_id="t1", executor="test.custom.executor")
with patch.object(ExecutorLoader, "lookup_executor_name_by_str"):
_validate_executor_fields(dag)
class TestDagBag:
def setup_class(self):
db_clean_up()
def teardown_class(self):
db_clean_up()
def test_dagbag_with_bundle_name(self, tmp_path):
"""Test that DagBag constructor accepts and stores bundle_name parameter."""
dagbag = DagBag(dag_folder=os.fspath(tmp_path), include_examples=False, bundle_name="test_bundle")
assert dagbag.bundle_name == "test_bundle"
# Test with None (default)
dagbag2 = DagBag(dag_folder=os.fspath(tmp_path), include_examples=False)
assert dagbag2.bundle_name is None
def test_get_existing_dag(self, tmp_path):
"""
Test that we're able to parse some example DAGs and retrieve them
"""
dagbag = DagBag(dag_folder=os.fspath(tmp_path), include_examples=True, bundle_name="test_bundle")
some_expected_dag_ids = ["example_bash_operator", "example_branch_operator"]
for dag_id in some_expected_dag_ids:
dag = dagbag.get_dag(dag_id)
assert dag is not None
assert dag_id == dag.dag_id
assert dagbag.size() >= 7
def test_get_non_existing_dag(self, tmp_path):
"""
test that retrieving a non existing dag id returns None without crashing
"""
dagbag = DagBag(dag_folder=os.fspath(tmp_path), include_examples=False)
non_existing_dag_id = "non_existing_dag_id"
assert dagbag.get_dag(non_existing_dag_id) is None
def test_serialized_dag_not_existing_doesnt_raise(self, tmp_path, session):
"""
test that retrieving a non existing dag id returns None without crashing
"""
non_existing_dag_id = "non_existing_dag_id"
assert session.scalar(select(True).where(SerializedDagModel.dag_id == non_existing_dag_id)) is None
def test_dont_load_example(self, tmp_path):
"""
test that the example are not loaded
"""
dagbag = DagBag(dag_folder=os.fspath(tmp_path), include_examples=False)
assert dagbag.size() == 0
def test_safe_mode_heuristic_match(self, tmp_path):
"""
With safe mode enabled, a file matching the discovery heuristics
should be discovered.
"""
path = tmp_path / "testfile.py"
path.write_text("# airflow\n# DAG")
with conf_vars({("core", "dags_folder"): os.fspath(path.parent)}):
dagbag = DagBag(include_examples=False, safe_mode=True)
assert len(dagbag.dagbag_stats) == 1
assert dagbag.dagbag_stats[0].file == path.name
def test_safe_mode_heuristic_mismatch(self, tmp_path):
"""
With safe mode enabled, a file not matching the discovery heuristics
should not be discovered.
"""
path = tmp_path / "testfile.py"
path.write_text("")
with conf_vars({("core", "dags_folder"): os.fspath(path.parent)}):
dagbag = DagBag(include_examples=False, safe_mode=True)
assert len(dagbag.dagbag_stats) == 0
def test_safe_mode_disabled(self, tmp_path):
"""With safe mode disabled, an empty python file should be discovered."""
path = tmp_path / "testfile.py"
path.write_text("")
with conf_vars({("core", "dags_folder"): os.fspath(path.parent)}):
dagbag = DagBag(include_examples=False, safe_mode=False)
assert len(dagbag.dagbag_stats) == 1
assert dagbag.dagbag_stats[0].file == path.name
def test_dagbag_stats_file_is_relative_path_with_mixed_separators(self, tmp_path):
"""
Test that dagbag_stats.file contains a relative path even when DAGS_FOLDER
and filepath have different path separators (simulates Windows behavior).
On Windows, settings.DAGS_FOLDER may use forward slashes (e.g., 'C:/foo/dags')
while filepath from os.path operations uses backslashes (e.g., 'C:\\foo\\dags\\my_dag.py').
This test verifies that path normalization works correctly in such cases.
See: https://github.com/apache/airflow/issues/XXXXX
"""
path = tmp_path / "testfile.py"
path.write_text("# airflow\n# DAG")
# Simulate the Windows scenario where DAGS_FOLDER has forward slashes
# but the filesystem returns paths with backslashes
dags_folder_with_forward_slashes = path.parent.as_posix()
with conf_vars({("core", "dags_folder"): dags_folder_with_forward_slashes}):
dagbag = DagBag(include_examples=False, safe_mode=True)
assert len(dagbag.dagbag_stats) == 1
assert dagbag.dagbag_stats[0].file == path.name
def test_dagbag_stats_includes_bundle_info(self, tmp_path):
"""Test that FileLoadStat includes bundle_path and bundle_name from DagBag."""
path = tmp_path / "testfile.py"
path.write_text("# airflow\n# DAG")
bundle_path = tmp_path / "bundle"
bundle_path.mkdir()
bundle_name = "test_bundle"
with conf_vars({("core", "dags_folder"): os.fspath(path.parent)}):
dagbag = DagBag(
include_examples=False,
safe_mode=True,
bundle_path=bundle_path,
bundle_name=bundle_name,
)
assert len(dagbag.dagbag_stats) == 1
stat = dagbag.dagbag_stats[0]
assert stat.bundle_path == bundle_path
assert stat.bundle_name == bundle_name
def test_dagbag_stats_bundle_info_none_when_not_provided(self, tmp_path):
"""Test that FileLoadStat has None for bundle_path and bundle_name when not provided."""
path = tmp_path / "testfile.py"
path.write_text("# airflow\n# DAG")
with conf_vars({("core", "dags_folder"): os.fspath(path.parent)}):
dagbag = DagBag(include_examples=False, safe_mode=True)
assert len(dagbag.dagbag_stats) == 1
stat = dagbag.dagbag_stats[0]
assert stat.bundle_path is None
assert stat.bundle_name is None
def test_process_file_that_contains_multi_bytes_char(self, tmp_path):
"""
test that we're able to parse file that contains multi-byte char
"""
path = tmp_path / "testfile.py"
path.write_text("\u3042") # write multi-byte char (hiragana)
dagbag = DagBag(dag_folder=os.fspath(path.parent), include_examples=False)
assert dagbag.process_file(os.fspath(path)) == []
def test_process_file_duplicated_dag_id(self, tmp_path):
"""Loading a DAG with ID that already existed in a DAG bag should result in an import error."""
dagbag = DagBag(dag_folder=os.fspath(tmp_path), include_examples=False)
def create_dag():
from airflow.sdk import dag
@dag(schedule=None, default_args={"owner": "owner1"})
def my_flow():
pass
my_dag = my_flow() # noqa: F841
source_lines = [line[12:] for line in inspect.getsource(create_dag).splitlines(keepends=True)[1:]]
path1 = tmp_path / "testfile1.py"
path2 = tmp_path / "testfile2.py"
path1.write_text("".join(source_lines))
path2.write_text("".join(source_lines))
found_1 = dagbag.process_file(os.fspath(path1))
assert len(found_1) == 1
assert found_1[0].dag_id == "my_flow"
assert dagbag.import_errors == {}
dags_in_bag = dagbag.dags
found_2 = dagbag.process_file(os.fspath(path2))
assert len(found_2) == 0
assert dagbag.import_errors[os.fspath(path2)].startswith(
"AirflowDagDuplicatedIdException: Ignoring DAG"
)
assert dagbag.dags == dags_in_bag # Should not change.
def test_import_errors_use_relative_path_with_bundle(self, tmp_path):
"""Import errors should use relative paths when bundle_path is set."""
bundle_path = tmp_path / "bundle"
bundle_path.mkdir()
dag_path = bundle_path / "subdir" / "my_dag.py"
dag_path.parent.mkdir(parents=True)
dag_path.write_text("from airflow.sdk import DAG\nraise ImportError('test error')")
dagbag = DagBag(
dag_folder=os.fspath(dag_path),
include_examples=False,
bundle_path=bundle_path,
bundle_name="test_bundle",
)
expected_relative_path = "subdir/my_dag.py"
assert expected_relative_path in dagbag.import_errors
# Absolute path should NOT be a key
assert os.fspath(dag_path) not in dagbag.import_errors
assert "test error" in dagbag.import_errors[expected_relative_path]
def test_import_errors_use_relative_path_for_bagging_errors(self, tmp_path):
"""Errors during DAG bagging should use relative paths when bundle_path is set."""
bundle_path = tmp_path / "bundle"
bundle_path.mkdir()
def create_dag():
from airflow.sdk import dag
@dag(schedule=None, default_args={"owner": "owner1"})
def my_flow():
pass
my_flow()
source_lines = [line[12:] for line in inspect.getsource(create_dag).splitlines(keepends=True)[1:]]
path1 = bundle_path / "testfile1.py"
path2 = bundle_path / "testfile2.py"
path1.write_text("".join(source_lines))
path2.write_text("".join(source_lines))
dagbag = DagBag(
dag_folder=os.fspath(bundle_path),
include_examples=False,
bundle_path=bundle_path,
bundle_name="test_bundle",
)
# The DAG should load successfully from one file
assert "my_flow" in dagbag.dags
# One file should have a duplicate DAG error - file order is not guaranteed
assert len(dagbag.import_errors) == 1
error_path = next(iter(dagbag.import_errors.keys()))
# The error key should be a relative path (not absolute)
# and of any of the two test files
assert error_path in ("testfile1.py", "testfile2.py")
# Absolute paths should NOT be keys
assert os.fspath(path1) not in dagbag.import_errors
assert os.fspath(path2) not in dagbag.import_errors
assert "AirflowDagDuplicatedIdException" in dagbag.import_errors[error_path]
def test_zip_skip_log(self, caplog, test_zip_path):
"""
test the loading of a DAG from within a zip file that skips another file because
it doesn't have "airflow" and "DAG"
"""
caplog.set_level(logging.INFO)
dagbag = DagBag(dag_folder=test_zip_path, include_examples=False)
assert dagbag.has_logged
assert (
f"File {test_zip_path}:file_no_airflow_dag.py "
"assumed to contain no DAGs. Skipping." in caplog.text
)
def test_zip(self, tmp_path, test_zip_path):
"""
test the loading of a DAG within a zip file that includes dependencies
"""
syspath_before = deepcopy(sys.path)
dagbag = DagBag(dag_folder=os.fspath(tmp_path), include_examples=False)
dagbag.process_file(test_zip_path)
assert dagbag.get_dag("test_zip_dag")
assert sys.path == syspath_before # sys.path doesn't change
assert not dagbag.import_errors
@patch("airflow.dag_processing.importers.python_importer._timeout")
@patch("airflow.dag_processing.dagbag.settings.get_dagbag_import_timeout")
def test_process_dag_file_without_timeout(
self, mocked_get_dagbag_import_timeout, mocked_timeout, tmp_path
):
"""
Test dag file parsing without timeout
"""
mocked_get_dagbag_import_timeout.return_value = 0
dagbag = DagBag(dag_folder=os.fspath(tmp_path), include_examples=False)
dagbag.process_file(os.path.join(TEST_DAGS_FOLDER, "test_sensor.py"))
mocked_timeout.assert_not_called()
mocked_get_dagbag_import_timeout.return_value = -1
dagbag = DagBag(dag_folder=os.fspath(tmp_path), include_examples=False)
dagbag.process_file(os.path.join(TEST_DAGS_FOLDER, "test_sensor.py"))
mocked_timeout.assert_not_called()
@patch("airflow.dag_processing.importers.python_importer._timeout")
@patch("airflow.dag_processing.dagbag.settings.get_dagbag_import_timeout")
def test_process_dag_file_with_non_default_timeout(
self, mocked_get_dagbag_import_timeout, mocked_timeout, tmp_path
):
"""
Test customized dag file parsing timeout
"""
timeout_value = 100
mocked_get_dagbag_import_timeout.return_value = timeout_value
# ensure the test value is not equal to the default value
assert timeout_value != settings.conf.getfloat("core", "DAGBAG_IMPORT_TIMEOUT")
dagbag = DagBag(dag_folder=os.fspath(tmp_path), include_examples=False)
dagbag.process_file(os.path.join(TEST_DAGS_FOLDER, "test_sensor.py"))
mocked_timeout.assert_called_once_with(timeout_value, error_message=mock.ANY)
@patch("airflow.dag_processing.importers.python_importer.settings.get_dagbag_import_timeout")
def test_check_value_type_from_get_dagbag_import_timeout(
self, mocked_get_dagbag_import_timeout, tmp_path
):
"""
Test correctness of value from get_dagbag_import_timeout
"""
mocked_get_dagbag_import_timeout.return_value = "1"
dagbag = DagBag(dag_folder=os.fspath(tmp_path), include_examples=False)
with pytest.raises(
TypeError, match=r"Value \(1\) from get_dagbag_import_timeout must be int or float"
):
dagbag.process_file(os.path.join(TEST_DAGS_FOLDER, "test_sensor.py"))
@pytest.fixture
def invalid_cron_dag(self) -> str:
return os.path.join(TEST_DAGS_FOLDER, "test_invalid_cron.py")
@pytest.fixture
def invalid_cron_zipped_dag(self, invalid_cron_dag: str, tmp_path: pathlib.Path) -> str:
zipped = tmp_path / "test_zip_invalid_cron.zip"
with zipfile.ZipFile(zipped, "w") as zf:
zf.write(invalid_cron_dag, os.path.basename(invalid_cron_dag))
return os.fspath(zipped)
@pytest.mark.parametrize("invalid_dag_name", ["invalid_cron_dag", "invalid_cron_zipped_dag"])
def test_process_file_cron_validity_check(
self, request: pytest.FixtureRequest, invalid_dag_name: str, tmp_path
):
"""Test if an invalid cron expression as schedule interval can be identified"""
dagbag = DagBag(dag_folder=os.fspath(tmp_path), include_examples=False)
assert len(dagbag.import_errors) == 0
dagbag.process_file(request.getfixturevalue(invalid_dag_name))
assert len(dagbag.import_errors) == 1
assert len(dagbag.dags) == 0
def test_process_file_invalid_param_check(self, tmp_path):
"""
test if an invalid param in the dags can be identified
"""
invalid_dag_files = [
"test_invalid_param.py",
"test_invalid_param2.py",
"test_invalid_param3.py",
"test_invalid_param4.py",
]
dagbag = DagBag(dag_folder=os.fspath(tmp_path), include_examples=False)
assert len(dagbag.import_errors) == 0
for file in invalid_dag_files:
dagbag.process_file(os.path.join(TEST_DAGS_FOLDER, file))
assert len(dagbag.import_errors) == len(invalid_dag_files)
assert len(dagbag.dags) == 0
def test_process_file_valid_param_check(self, tmp_path):
"""
test if valid params in the dags param can be validated (positive test)
"""
valid_dag_files = [
"test_valid_param.py",
"test_valid_param2.py",
]
dagbag = DagBag(dag_folder=os.fspath(tmp_path), include_examples=False)
assert len(dagbag.import_errors) == 0
for file in valid_dag_files:
dagbag.process_file(os.path.join(TEST_DAGS_FOLDER, file))
assert len(dagbag.import_errors) == 0
assert len(dagbag.dags) == len(valid_dag_files)
@patch.object(DagModel, "get_current")
def test_get_dag_without_refresh(self, mock_dagmodel):
"""
Test that, once a DAG is loaded, it doesn't get refreshed again if it
hasn't been expired.
"""
dag_id = "example_bash_operator"
mock_dagmodel.return_value = DagModel()
mock_dagmodel.return_value.last_expired = None
mock_dagmodel.return_value.fileloc = "foo"
class _TestDagBag(DagBag):
process_file_calls = 0
def process_file(self, filepath, only_if_updated=True, safe_mode=True):
if os.path.basename(filepath) == "example_bash_operator.py":
_TestDagBag.process_file_calls += 1
super().process_file(filepath, only_if_updated, safe_mode)
dagbag = _TestDagBag(include_examples=True)
dagbag.process_file_calls
# Should not call process_file again, since it's already loaded during init.
assert dagbag.process_file_calls == 1
assert dagbag.get_dag(dag_id) is not None
assert dagbag.process_file_calls == 1
@pytest.mark.parametrize(
("file_to_load", "expected"),
(
pytest.param(
pathlib.Path(example_dags_folder) / "example_bash_operator.py",
{
"example_bash_operator": f"{example_dags_folder.relative_to(AIRFLOW_ROOT_PATH) / 'example_bash_operator.py'}"
},
id="example_bash_operator",
),
),
)
def test_get_dag_registration(self, file_to_load, expected):
pytest.importorskip("system.standard")
dagbag = DagBag(dag_folder=os.devnull, include_examples=False)
dagbag.process_file(os.fspath(file_to_load))
for dag_id, path in expected.items():
dag = dagbag.get_dag(dag_id)
assert dag, f"{dag_id} was bagged"
assert dag.fileloc.endswith(path)
@pytest.mark.parametrize(
("expected"),
(
pytest.param(
{
"test_zip_dag": "test_zip.zip/test_zip.py",
"test_zip_autoregister": "test_zip.zip/test_zip.py",
},
id="test_zip.zip",
),
),
)
def test_get_zip_dag_registration(self, test_zip_path, expected):
dagbag = DagBag(dag_folder=os.devnull, include_examples=False)
dagbag.process_file(test_zip_path)
for dag_id, path in expected.items():
dag = dagbag.get_dag(dag_id)
assert dag, f"{dag_id} was bagged"
assert dag.fileloc.endswith(f"{pathlib.Path(test_zip_path).parent}/{path}")
def test_dag_registration_with_failure(self):
dagbag = DagBag(dag_folder=os.devnull, include_examples=False)
found = dagbag.process_file(str(TEST_DAGS_FOLDER / "test_invalid_dup_task.py"))
assert found == []
@pytest.fixture
def zip_with_valid_dag_and_dup_tasks(self, tmp_path: pathlib.Path) -> str:
failing_dag_file = TEST_DAGS_FOLDER / "test_invalid_dup_task.py"
working_dag_file = TEST_DAGS_FOLDER / "test_example_bash_operator.py"
zipped = tmp_path / "test_zip_invalid_dup_task.zip"
with zipfile.ZipFile(zipped, "w") as zf:
zf.write(failing_dag_file, failing_dag_file.name)
zf.write(working_dag_file, working_dag_file.name)
return os.fspath(zipped)
def test_dag_registration_with_failure_zipped(self, zip_with_valid_dag_and_dup_tasks):
dagbag = DagBag(dag_folder=os.devnull, include_examples=False)
found = dagbag.process_file(zip_with_valid_dag_and_dup_tasks)
assert len(found) == 1
assert [dag.dag_id for dag in found] == ["test_example_bash_operator"]
@patch.object(DagModel, "get_current")
def test_refresh_py_dag(self, mock_dagmodel, tmp_path):
"""
Test that we can refresh an ordinary .py DAG
"""
dag_id = "example_bash_operator"
fileloc = str(example_dags_folder / "example_bash_operator.py")
mock_dagmodel.return_value = DagModel()
mock_dagmodel.return_value.last_expired = datetime.max.replace(tzinfo=timezone.utc)
mock_dagmodel.return_value.fileloc = fileloc
class _TestDagBag(DagBag):
process_file_calls = 0
def process_file(self, filepath, only_if_updated=True, safe_mode=True):
if filepath == fileloc:
_TestDagBag.process_file_calls += 1
return super().process_file(filepath, only_if_updated, safe_mode)
dagbag = _TestDagBag(dag_folder=os.fspath(tmp_path), include_examples=True)
assert dagbag.process_file_calls == 1
dag = dagbag.get_dag(dag_id)
assert dag is not None
assert dag_id == dag.dag_id
assert dagbag.process_file_calls == 2
@patch.object(DagModel, "get_current")
def test_refresh_packaged_dag(self, mock_dagmodel, test_zip_path):
"""
Test that we can refresh a packaged DAG
"""
dag_id = "test_zip_dag"
fileloc = os.path.realpath(os.path.join(test_zip_path, "test_zip.py"))
mock_dagmodel.return_value = DagModel()
mock_dagmodel.return_value.last_expired = datetime.max.replace(tzinfo=timezone.utc)
mock_dagmodel.return_value.fileloc = fileloc
class _TestDagBag(DagBag):
process_file_calls = 0
def process_file(self, filepath, only_if_updated=True, safe_mode=True):
if filepath in fileloc:
_TestDagBag.process_file_calls += 1
return super().process_file(filepath, only_if_updated, safe_mode)
dagbag = _TestDagBag(dag_folder=os.path.realpath(test_zip_path), include_examples=False)
assert dagbag.process_file_calls == 1
dag = dagbag.get_dag(dag_id)
assert dag is not None
assert dag_id == dag.dag_id
assert dagbag.process_file_calls == 2
def process_dag(self, create_dag, tmp_path):
"""
Helper method to process a file generated from the input create_dag function.
"""
# write source to file
source = textwrap.dedent("".join(inspect.getsource(create_dag).splitlines(True)[1:-1]))
path = tmp_path / "testfile.py"
path.write_text(source)
dagbag = DagBag(dag_folder=os.fspath(path.parent), include_examples=False)
found_dags = dagbag.process_file(os.fspath(path))
return dagbag, found_dags, os.fspath(path)
def validate_dags(self, expected_dag, actual_found_dags, actual_dagbag, should_be_found=True):
actual_found_dag_ids = [dag.dag_id for dag in actual_found_dags]
dag_id = expected_dag.dag_id
actual_dagbag.log.info("validating %s", dag_id)
assert (dag_id in actual_found_dag_ids) == should_be_found, (
f'dag "{dag_id}" should {"" if should_be_found else "not "}'
f'have been found after processing dag "{expected_dag.dag_id}"'
)
assert (dag_id in actual_dagbag.dags) == should_be_found, (
f'dag "{dag_id}" should {"" if should_be_found else "not "}'
f'be in dagbag.dags after processing dag "{expected_dag.dag_id}"'
)
def test_skip_cycle_dags(self, tmp_path):
"""
Don't crash when loading an invalid (contains a cycle) DAG file.
Don't load the dag into the DagBag either
"""
# Define Dag to load
def basic_cycle():
import datetime
from airflow.models.dag import DAG
from airflow.providers.standard.operators.empty import EmptyOperator
dag_name = "cycle_dag"
default_args = {"owner": "owner1", "start_date": datetime.datetime(2016, 1, 1)}
dag = DAG(dag_name, schedule=timedelta(days=1), default_args=default_args)
# A -> A
with dag:
op_a = EmptyOperator(task_id="A")
op_a.set_downstream(op_a)
return dag
test_dag = basic_cycle()
# Perform processing dag
dagbag, found_dags, file_path = self.process_dag(basic_cycle, tmp_path)
# #Validate correctness
# None of the dags should be found
self.validate_dags(test_dag, found_dags, dagbag, should_be_found=False)
assert file_path in dagbag.import_errors
def test_process_file_with_none(self, tmp_path):
"""
test that process_file can handle Nones
"""
dagbag = DagBag(dag_folder=os.fspath(tmp_path), include_examples=False)
assert dagbag.process_file(None) == []
def test_timeout_dag_errors_are_import_errors(self, tmp_path, caplog):
"""
Test that if the DAG contains Timeout error it will be still loaded to DB as import_errors
"""
dag_file = tmp_path / "timeout_dag.py"
dag_file.write_text("""
import datetime
import time
import airflow
from airflow.providers.standard.operators.python import PythonOperator
time.sleep(1) # Exceeds DAGBAG_IMPORT_TIMEOUT (0.01s), triggers timeout
with airflow.DAG(
"import_timeout",
start_date=datetime.datetime(2022, 1, 1),
schedule=None) as dag:
pass
""")
with conf_vars({("core", "DAGBAG_IMPORT_TIMEOUT"): "0.01"}):
dagbag = DagBag(dag_folder=os.fspath(tmp_path), include_examples=False)
assert dag_file.as_posix() in dagbag.import_errors
assert "DagBag import timeout for" in caplog.text
@staticmethod
def _make_test_traceback(unparseable_filename: str, depth=None) -> str:
python_311_marker = " ^^^^^^^^^^^\n" if PY311 else ""
python_313_marker = [" ~~~~~~~~~^^\n"] if PY313 else []
frames = (
f' File "{unparseable_filename}", line 3, in <module>\n something()\n',
*python_313_marker,
f' File "{unparseable_filename}", line 2, in something\n return airflow_DAG\n{python_311_marker}',
)
depth = 0 if depth is None else -depth
return (
"Traceback (most recent call last):\n"
+ "".join(frames[depth:])
+ "NameError: name 'airflow_DAG' is not defined\n"
)
@pytest.mark.parametrize("depth", (None, 1))
def test_import_error_tracebacks(self, tmp_path, depth):
unparseable_filename = tmp_path.joinpath("dag.py").as_posix()
with open(unparseable_filename, "w") as unparseable_file:
unparseable_file.writelines(INVALID_DAG_WITH_DEPTH_FILE_CONTENTS)
with contextlib.ExitStack() as cm:
if depth is not None:
cm.enter_context(conf_vars({("core", "dagbag_import_error_traceback_depth"): str(depth)}))
dagbag = DagBag(dag_folder=unparseable_filename, include_examples=False)
import_errors = dagbag.import_errors
assert unparseable_filename in import_errors
assert import_errors[unparseable_filename] == self._make_test_traceback(unparseable_filename, depth)
@pytest.mark.parametrize("depth", (None, 1))
def test_import_error_tracebacks_zip(self, tmp_path, depth):
invalid_zip_filename = (tmp_path / "test_zip_invalid.zip").as_posix()
invalid_dag_filename = os.path.join(invalid_zip_filename, "dag.py")
with zipfile.ZipFile(invalid_zip_filename, "w") as invalid_zip_file:
invalid_zip_file.writestr("dag.py", INVALID_DAG_WITH_DEPTH_FILE_CONTENTS)
with contextlib.ExitStack() as cm:
if depth is not None:
cm.enter_context(conf_vars({("core", "dagbag_import_error_traceback_depth"): str(depth)}))
dagbag = DagBag(dag_folder=invalid_zip_filename, include_examples=False)
import_errors = dagbag.import_errors
assert invalid_dag_filename in import_errors
assert import_errors[invalid_dag_filename] == self._make_test_traceback(invalid_dag_filename, depth)
@patch("airflow.settings.task_policy", cluster_policies.example_task_policy)
def test_task_cluster_policy_violation(self):
"""
test that file processing results in import error when task does not
obey cluster policy.
"""
dag_file = os.path.join(TEST_DAGS_FOLDER, "test_missing_owner.py")
dag_id = "test_missing_owner"
err_cls_name = "AirflowClusterPolicyViolation"
dagbag = DagBag(dag_folder=dag_file, include_examples=False)
assert set() == set(dagbag.dag_ids)
expected_import_errors = {
dag_file: (
f"""{err_cls_name}: DAG policy violation (DAG ID: {dag_id}, Path: {dag_file}):\n"""
"""Notices:\n"""
""" * Task must have non-None non-default owner. Current value: airflow"""
)
}
assert expected_import_errors == dagbag.import_errors
@patch("airflow.settings.task_policy", cluster_policies.example_task_policy)
def test_task_cluster_policy_nonstring_owner(self):
"""
test that file processing results in import error when task does not
obey cluster policy and has owner whose type is not string.
"""
TEST_DAGS_CORRUPTED_FOLDER = pathlib.Path(__file__).parent.with_name("dags_corrupted")
dag_file = os.path.join(TEST_DAGS_CORRUPTED_FOLDER, "test_nonstring_owner.py")
dag_id = "test_nonstring_owner"
err_cls_name = "AirflowClusterPolicyViolation"
dagbag = DagBag(dag_folder=dag_file, include_examples=False)
assert set() == set(dagbag.dag_ids)
expected_import_errors = {
dag_file: (
f"""{err_cls_name}: DAG policy violation (DAG ID: {dag_id}, Path: {dag_file}):\n"""
"""Notices:\n"""
""" * owner should be a string. Current value: ['a']"""
)
}
assert expected_import_errors == dagbag.import_errors
@patch("airflow.settings.task_policy", cluster_policies.example_task_policy)
def test_task_cluster_policy_obeyed(self):
"""
test that dag successfully imported without import errors when tasks
obey cluster policy.
"""
dag_file = os.path.join(TEST_DAGS_FOLDER, "test_with_non_default_owner.py")
dagbag = DagBag(dag_folder=dag_file, include_examples=False)
assert {"test_with_non_default_owner"} == set(dagbag.dag_ids)
assert dagbag.import_errors == {}
@patch("airflow.settings.dag_policy", cluster_policies.dag_policy)
def test_dag_cluster_policy_obeyed(self):
dag_file = os.path.join(TEST_DAGS_FOLDER, "test_dag_with_no_tags.py")
dagbag = DagBag(dag_folder=dag_file, include_examples=False)
assert len(dagbag.dag_ids) == 0
assert "has no tags" in dagbag.import_errors[dag_file]
def test_dagbag_dag_collection(self):
dagbag = DagBag(
dag_folder=TEST_DAGS_FOLDER,
include_examples=False,
collect_dags=False,
bundle_name="test_collection",
)
# since collect_dags is False, dagbag.dags should be empty
assert not dagbag.dags
dagbag.collect_dags()
assert dagbag.dags
# test that dagbag.dags is not empty if collect_dags is True
dagbag = DagBag(dag_folder=TEST_DAGS_FOLDER, include_examples=False, bundle_name="test_collection")
assert dagbag.dags
def test_dabgag_captured_warnings(self):
dag_file = os.path.join(TEST_DAGS_FOLDER, "test_dag_warnings.py")
dagbag = DagBag(dag_folder=dag_file, include_examples=False, collect_dags=False)
assert dag_file not in dagbag.captured_warnings
dagbag.collect_dags(dag_folder=dagbag.dag_folder, include_examples=False, only_if_updated=False)
assert dagbag.dagbag_stats[0].warning_num == 2
assert dagbag.captured_warnings == {
dag_file: (
f"{dag_file}:46: DeprecationWarning: Deprecated Parameter",
f"{dag_file}:48: UserWarning: Some Warning",
)
}
with warnings.catch_warnings():
# Disable capture DeprecationWarning, and it should be reflected in captured warnings
warnings.simplefilter("ignore", DeprecationWarning)
dagbag.collect_dags(dag_folder=dagbag.dag_folder, include_examples=False, only_if_updated=False)
assert dag_file in dagbag.captured_warnings
assert len(dagbag.captured_warnings[dag_file]) == 1
assert dagbag.dagbag_stats[0].warning_num == 1
# Disable all warnings, no captured warnings expected
warnings.simplefilter("ignore")
dagbag.collect_dags(dag_folder=dagbag.dag_folder, include_examples=False, only_if_updated=False)
assert dag_file not in dagbag.captured_warnings
assert dagbag.dagbag_stats[0].warning_num == 0
@pytest.fixture
def warning_zipped_dag_path(self, tmp_path: pathlib.Path) -> str:
warnings_dag_file = TEST_DAGS_FOLDER / "test_dag_warnings.py"
zipped = tmp_path / "test_dag_warnings.zip"
with zipfile.ZipFile(zipped, "w") as zf:
zf.write(warnings_dag_file, warnings_dag_file.name)
return os.fspath(zipped)
def test_dabgag_captured_warnings_zip(self, warning_zipped_dag_path: str):
in_zip_dag_file = f"{warning_zipped_dag_path}/test_dag_warnings.py"
dagbag = DagBag(dag_folder=warning_zipped_dag_path, include_examples=False)
assert dagbag.dagbag_stats[0].warning_num == 2
assert dagbag.captured_warnings == {
warning_zipped_dag_path: (
f"{in_zip_dag_file}:46: DeprecationWarning: Deprecated Parameter",
f"{in_zip_dag_file}:48: UserWarning: Some Warning",
)
}
@pytest.mark.parametrize(
("known_pools", "expected"),
(
pytest.param(None, set(), id="disabled"),
pytest.param(
{"default_pool"},
{
DagWarning(
"test",
DagWarningType.NONEXISTENT_POOL,
"Dag 'test' references non-existent pools: ['pool1']",
),
},
id="only-default",
),
pytest.param(
{"default_pool", "pool1"},
set(),
id="known-pools",
),
),
)
def test_dag_warnings_invalid_pool(self, known_pools, expected):
with DAG(dag_id="test") as dag:
BaseOperator(task_id="1")
BaseOperator(task_id="2", pool="pool1")
dagbag = DagBag(dag_folder="", include_examples=False, collect_dags=False, known_pools=known_pools)
dagbag.bag_dag(dag)
assert dagbag.dag_warnings == expected
def test_sigsegv_handling(self, tmp_path, caplog):
"""
Test that a SIGSEGV in a DAG file is handled gracefully and does not crash the process.
"""
# Create a DAG file that will raise a SIGSEGV
dag_file = tmp_path / "bad_dag.py"
dag_file.write_text(
textwrap.dedent(
"""\
import signal
from airflow import DAG
import os
from airflow.decorators import task
os.kill(os.getpid(), signal.SIGSEGV)
with DAG('testbug'):
@task
def mytask():
print(1)
mytask()
"""
)
)
dagbag = DagBag(dag_folder=os.fspath(tmp_path), include_examples=False)
assert "Received SIGSEGV signal while processing" in caplog.text
assert dag_file.as_posix() in dagbag.import_errors
def test_failed_signal_registration_does_not_crash_the_process(self, tmp_path, caplog):
"""Test that a ValueError raised by a signal setting on child process does not crash the main process.
This was raised in test_dag_report.py module in api_fastapi/core_api/routes/public tests
"""
dag_file = tmp_path / "test_dag.py"
dag_file.write_text(
textwrap.dedent(
"""\
from airflow import DAG
from airflow.decorators import task
with DAG('testbug'):
@task
def mytask():
print(1)
mytask()
"""
)
)
with mock.patch("airflow.dag_processing.importers.python_importer.signal.signal") as mock_signal:
mock_signal.side_effect = ValueError("Invalid signal setting")
DagBag(dag_folder=os.fspath(tmp_path), include_examples=False)
assert "SIGSEGV signal handler registration failed. Not in the main thread" in caplog.text
class TestCaptureWithReraise:
@staticmethod
def raise_warnings():
warnings.warn("Foo", UserWarning, stacklevel=2)
warnings.warn("Bar", UserWarning, stacklevel=2)
warnings.warn("Baz", UserWarning, stacklevel=2)
def test_capture_no_warnings(self):
with warnings.catch_warnings():
warnings.simplefilter("error")
with _capture_with_reraise() as cw:
pass
assert cw == []
def test_capture_warnings(self):
with pytest.warns(UserWarning, match="(Foo|Bar|Baz)") as ctx:
with _capture_with_reraise() as cw:
self.raise_warnings()
assert len(cw) == 3
assert len(ctx.list) == 3
def test_capture_warnings_with_parent_error_filter(self):
with warnings.catch_warnings(record=True) as records:
warnings.filterwarnings("error", message="Bar")
with _capture_with_reraise() as cw:
with pytest.raises(UserWarning, match="Bar"):
self.raise_warnings()
assert len(cw) == 1
assert len(records) == 1
def test_capture_warnings_with_parent_ignore_filter(self):
with warnings.catch_warnings(record=True) as records:
warnings.filterwarnings("ignore", message="Baz")
with _capture_with_reraise() as cw:
self.raise_warnings()
assert len(cw) == 2
assert len(records) == 2
def test_capture_warnings_with_filters(self):
with warnings.catch_warnings(record=True) as records:
with _capture_with_reraise() as cw:
warnings.filterwarnings("ignore", message="Foo")
self.raise_warnings()
assert len(cw) == 2
assert len(records) == 2
def test_capture_warnings_with_error_filters(self):
with warnings.catch_warnings(record=True) as records:
with _capture_with_reraise() as cw:
warnings.filterwarnings("error", message="Bar")
with pytest.raises(UserWarning, match="Bar"):
self.raise_warnings()
assert len(cw) == 1
assert len(records) == 1
class TestBundlePathSysPath:
"""Tests for bundle_path sys.path handling in BundleDagBag."""
def test_bundle_path_added_to_syspath(self, tmp_path):
"""Test that BundleDagBag adds bundle_path to sys.path when provided."""
util_file = tmp_path / "bundle_util.py"
util_file.write_text('def get_message(): return "Hello from bundle!"')
dag_file = tmp_path / "test_dag.py"
dag_file.write_text(
textwrap.dedent(
"""\
from airflow.sdk import DAG
from airflow.operators.empty import EmptyOperator
import sys
import bundle_util
with DAG('test_import', description=f"DAG with sys.path: {sys.path}"):
EmptyOperator(task_id="mytask")
"""
)
)
assert str(tmp_path) not in sys.path
dagbag = BundleDagBag(dag_folder=str(dag_file), bundle_path=tmp_path, bundle_name="test-bundle")
# Check import was successful
assert len(dagbag.dags) == 1
assert not dagbag.import_errors
dag = dagbag.get_dag("test_import")
assert dag is not None
assert str(tmp_path) in dag.description # sys.path was enhanced during parse
# Path remains in sys.path (no cleanup - intentional for ephemeral processes)
assert str(tmp_path) in sys.path
# Cleanup for other tests
sys.path.remove(str(tmp_path))
def test_bundle_path_not_duplicated(self, tmp_path):
"""Test that bundle_path is not added to sys.path if already present."""
dag_file = tmp_path / "simple_dag.py"
dag_file.write_text(
textwrap.dedent(
"""\
from airflow.sdk import DAG
from airflow.operators.empty import EmptyOperator
with DAG("simple_dag"):
EmptyOperator(task_id="mytask")
"""
)
)
# Pre-add the path
sys.path.append(str(tmp_path))
count_before = sys.path.count(str(tmp_path))
BundleDagBag(dag_folder=str(dag_file), bundle_path=tmp_path, bundle_name="test-bundle")
# Should not add duplicate
assert sys.path.count(str(tmp_path)) == count_before
# Cleanup for other tests
sys.path.remove(str(tmp_path))
def test_dagbag_no_bundle_path_no_syspath_modification(self, tmp_path):
"""Test that no sys.path modification occurs when DagBag is used without bundle_path."""
dag_file = tmp_path / "simple_dag.py"
dag_file.write_text(
textwrap.dedent(
"""\
from airflow.sdk import DAG
from airflow.operators.empty import EmptyOperator
import sys
with DAG("simple_dag", description=f"DAG with sys.path: {sys.path}") as dag:
EmptyOperator(task_id="mytask")
"""
)
)
syspath_before = deepcopy(sys.path)
dagbag = DagBag(dag_folder=str(dag_file), include_examples=False)
dag = dagbag.get_dag("simple_dag")
assert str(tmp_path) not in dag.description
assert sys.path == syspath_before
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/tests/unit/dag_processing/test_dagbag.py",
"license": "Apache License 2.0",
"lines": 1123,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/microsoft/azure/src/airflow/providers/microsoft/azure/fs/msgraph.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING, Any
from airflow.providers.common.compat.sdk import BaseHook
from airflow.providers.microsoft.azure.utils import get_field
if TYPE_CHECKING:
from fsspec import AbstractFileSystem
schemes = ["msgraph", "sharepoint", "onedrive", "msgd"]
def get_fs(conn_id: str | None, storage_options: dict[str, Any] | None = None) -> AbstractFileSystem:
from msgraphfs import MSGDriveFS
if conn_id is None:
return MSGDriveFS({})
conn = BaseHook.get_connection(conn_id)
extras = conn.extra_dejson
conn_type = conn.conn_type or "msgraph"
options: dict[str, Any] = {}
# Get authentication parameters with fallback handling
client_id = conn.login or get_field(
conn_id=conn_id, conn_type=conn_type, extras=extras, field_name="client_id"
)
client_secret = conn.password or get_field(
conn_id=conn_id, conn_type=conn_type, extras=extras, field_name="client_secret"
)
tenant_id = conn.host or get_field(
conn_id=conn_id, conn_type=conn_type, extras=extras, field_name="tenant_id"
)
if client_id:
options["client_id"] = client_id
if client_secret:
options["client_secret"] = client_secret
if tenant_id:
options["tenant_id"] = tenant_id
# Process additional fields from extras
fields = [
"drive_id",
"scope",
"token_endpoint",
"redirect_uri",
"token_endpoint_auth_method",
"code_challenge_method",
"update_token",
"username",
"password",
]
for field in fields:
value = get_field(conn_id=conn_id, conn_type=conn_type, extras=extras, field_name=field)
if value is not None:
if value == "":
options.pop(field, "")
else:
options[field] = value
# Update with storage options
options.update(storage_options or {})
# Create oauth2 client parameters if authentication is provided
oauth2_client_params = {}
if options.get("client_id") and options.get("client_secret") and options.get("tenant_id"):
oauth2_client_params = {
"client_id": options["client_id"],
"client_secret": options["client_secret"],
"tenant_id": options["tenant_id"],
}
# Add additional oauth2 parameters supported by authlib
oauth2_params = [
"scope",
"token_endpoint",
"redirect_uri",
"token_endpoint_auth_method",
"code_challenge_method",
"update_token",
"username",
"password",
]
for param in oauth2_params:
if param in options:
oauth2_client_params[param] = options[param]
# Determine which filesystem to return based on drive_id
drive_id = options.get("drive_id")
return MSGDriveFS(drive_id=drive_id, oauth2_client_params=oauth2_client_params)
| {
"repo_id": "apache/airflow",
"file_path": "providers/microsoft/azure/src/airflow/providers/microsoft/azure/fs/msgraph.py",
"license": "Apache License 2.0",
"lines": 93,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/microsoft/azure/tests/unit/microsoft/azure/fs/test_msgraph.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from unittest.mock import MagicMock, patch
import pytest
from airflow.models.connection import Connection
from airflow.providers.microsoft.azure.fs.msgraph import get_fs
@pytest.fixture
def mock_connection():
return Connection(
conn_id="msgraph_default",
conn_type="msgraph",
login="test_client_id",
password="test_client_secret",
host="test_tenant_id",
extra={"drive_id": "test_drive_id"},
)
@pytest.fixture
def mock_connection_minimal():
return Connection(
conn_id="msgraph_minimal",
conn_type="msgraph",
login="test_client_id",
password="test_client_secret",
host="test_tenant_id",
)
class TestMSGraphFS:
@patch("airflow.providers.microsoft.azure.fs.msgraph.BaseHook.get_connection")
@patch("msgraphfs.MSGDriveFS")
def test_get_fs_with_drive_id(self, mock_msgdrivefs, mock_get_connection, mock_connection):
mock_get_connection.return_value = mock_connection
mock_fs_instance = MagicMock()
mock_msgdrivefs.return_value = mock_fs_instance
result = get_fs("msgraph_default")
mock_msgdrivefs.assert_called_once_with(
drive_id="test_drive_id",
oauth2_client_params={
"client_id": "test_client_id",
"client_secret": "test_client_secret",
"tenant_id": "test_tenant_id",
},
)
assert result == mock_fs_instance
@patch("msgraphfs.MSGDriveFS")
def test_get_fs_no_connection(self, mock_msgdrivefs):
mock_fs_instance = MagicMock()
mock_msgdrivefs.return_value = mock_fs_instance
result = get_fs(None)
mock_msgdrivefs.assert_called_once_with({})
assert result == mock_fs_instance
@patch("airflow.providers.microsoft.azure.fs.msgraph.BaseHook.get_connection")
@patch("msgraphfs.MSGDriveFS")
def test_get_fs_with_extra_oauth_params(self, mock_msgdrivefs, mock_get_connection):
connection = Connection(
conn_id="msgraph_extra",
conn_type="msgraph",
login="test_client_id",
password="test_client_secret",
host="test_tenant_id",
extra={
"drive_id": "test_drive_id",
"scope": "https://graph.microsoft.com/.default",
"token_endpoint": "https://login.microsoftonline.com/test/oauth2/v2.0/token",
"redirect_uri": "http://localhost:8080/callback",
},
)
mock_get_connection.return_value = connection
mock_fs_instance = MagicMock()
mock_msgdrivefs.return_value = mock_fs_instance
result = get_fs("msgraph_extra")
expected_oauth2_params = {
"client_id": "test_client_id",
"client_secret": "test_client_secret",
"tenant_id": "test_tenant_id",
"scope": "https://graph.microsoft.com/.default",
"token_endpoint": "https://login.microsoftonline.com/test/oauth2/v2.0/token",
"redirect_uri": "http://localhost:8080/callback",
}
mock_msgdrivefs.assert_called_once_with(
drive_id="test_drive_id", oauth2_client_params=expected_oauth2_params
)
assert result == mock_fs_instance
@patch("airflow.providers.microsoft.azure.fs.msgraph.BaseHook.get_connection")
@patch("msgraphfs.MSGDriveFS")
def test_get_fs_with_storage_options(self, mock_msgdrivefs, mock_get_connection, mock_connection_minimal):
mock_get_connection.return_value = mock_connection_minimal
mock_fs_instance = MagicMock()
mock_msgdrivefs.return_value = mock_fs_instance
storage_options = {"drive_id": "storage_drive_id", "scope": "custom.scope"}
result = get_fs("msgraph_minimal", storage_options=storage_options)
expected_oauth2_params = {
"client_id": "test_client_id",
"client_secret": "test_client_secret",
"tenant_id": "test_tenant_id",
"scope": "custom.scope",
}
mock_msgdrivefs.assert_called_once_with(
drive_id="storage_drive_id", oauth2_client_params=expected_oauth2_params
)
assert result == mock_fs_instance
@patch("airflow.providers.microsoft.azure.fs.msgraph.BaseHook.get_connection")
@patch("msgraphfs.MSGDriveFS")
def test_get_fs_incomplete_credentials(self, mock_msgdrivefs, mock_get_connection):
# Connection with missing client_secret
connection = Connection(
conn_id="msgraph_incomplete",
conn_type="msgraph",
login="test_client_id",
host="test_tenant_id",
)
mock_get_connection.return_value = connection
mock_fs_instance = MagicMock()
mock_msgdrivefs.return_value = mock_fs_instance
result = get_fs("msgraph_incomplete")
# Should return default filesystem when credentials are incomplete
mock_msgdrivefs.assert_called_once_with(drive_id=None, oauth2_client_params={})
assert result == mock_fs_instance
| {
"repo_id": "apache/airflow",
"file_path": "providers/microsoft/azure/tests/unit/microsoft/azure/fs/test_msgraph.py",
"license": "Apache License 2.0",
"lines": 131,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-core/tests/unit/timetables/test_cron_mixin.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from airflow.timetables._cron import CronMixin
SAMPLE_TZ = "UTC"
def test_valid_cron_expression():
cm = CronMixin("* * 1 * *", SAMPLE_TZ) # every day at midnight
assert isinstance(cm.description, str)
assert "Every minute" in cm.description or "month" in cm.description
def test_invalid_cron_expression():
cm = CronMixin("invalid cron", SAMPLE_TZ)
assert cm.description == ""
def test_dom_and_dow_conflict():
cm = CronMixin("* * 1 * 1", SAMPLE_TZ) # 1st of month or Monday
desc = cm.description
assert "(or)" in desc
assert "Every minute, on day 1 of the month" in desc
assert "Every minute, only on Monday" in desc
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/tests/unit/timetables/test_cron_mixin.py",
"license": "Apache License 2.0",
"lines": 32,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/vertica/tests/unit/vertica/hooks/test_vertica_sql.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import json
from collections import namedtuple
from unittest import mock
from unittest.mock import MagicMock, PropertyMock, patch
import pytest
from sqlalchemy.engine import Engine
from airflow.models import Connection
from airflow.providers.common.compat.sdk import AirflowException
from airflow.providers.vertica.hooks.vertica import VerticaHook
DEFAULT_CONN_ID = "vertica_default"
HOST = "vertica.cloud.com"
PORT = 5433
USER = "user"
PASSWORD = "pass"
DATABASE = "test_db"
SerializableRow = namedtuple("SerializableRow", ["id", "value"])
def get_cursor_descriptions(fields: list[str]) -> list[tuple[str]]:
"""Convert field names into cursor.description tuples."""
return [(field,) for field in fields]
@pytest.fixture(autouse=True)
def create_connection(create_connection_without_db):
"""Create a mocked Airflow connection for Vertica."""
create_connection_without_db(
Connection(
conn_id=DEFAULT_CONN_ID,
conn_type="vertica",
host=HOST,
login=USER,
password=PASSWORD,
schema=DATABASE,
)
)
@pytest.fixture
def vertica_hook():
return VerticaHook(vertica_conn_id=DEFAULT_CONN_ID)
@pytest.fixture
def mock_get_conn():
with patch("airflow.providers.vertica.hooks.vertica.VerticaHook.get_conn") as mock_conn:
yield mock_conn
@pytest.fixture
def mock_cursor(mock_get_conn):
cursor = MagicMock()
type(cursor).rowcount = PropertyMock(return_value=1)
cursor.fetchall.return_value = [("1", "row1")]
cursor.description = get_cursor_descriptions(["id", "value"])
cursor.nextset.side_effect = [False]
mock_get_conn.return_value.cursor.return_value = cursor
return cursor
def test_sqlalchemy_url_property(vertica_hook):
url = vertica_hook.sqlalchemy_url.render_as_string(hide_password=False)
expected_url = f"vertica-python://{USER}:{PASSWORD}@{HOST}:{PORT}/{DATABASE}"
assert url.startswith(expected_url)
@pytest.mark.parametrize(
("return_last", "split_statements", "sql", "expected_calls", "cursor_results", "expected_result"),
[
pytest.param(
True,
False,
"SELECT * FROM table",
["SELECT * FROM table"],
[("1", "row1"), ("2", "row2")],
[SerializableRow("1", "row1"), SerializableRow("2", "row2")],
id="Single query, return_last=True",
),
pytest.param(
False,
False,
"SELECT * FROM table",
["SELECT * FROM table"],
[("1", "row1"), ("2", "row2")],
[SerializableRow("1", "row1"), SerializableRow("2", "row2")],
id="Single query, return_last=False",
),
pytest.param(
True,
True,
"SELECT * FROM table1; SELECT * FROM table2;",
["SELECT * FROM table1;", "SELECT * FROM table2;"],
[[("1", "row1"), ("2", "row2")], [("3", "row3"), ("4", "row4")]],
[SerializableRow("3", "row3"), SerializableRow("4", "row4")],
id="Multiple queries, split_statements=True, return_last=True",
),
pytest.param(
True,
False,
"SELECT * FROM table1; SELECT * FROM table2;",
["SELECT * FROM table1; SELECT * FROM table2;"],
[("1", "row1"), ("2", "row2")],
[SerializableRow("1", "row1"), SerializableRow("2", "row2")],
id="Multiple queries, split_statements=False",
),
pytest.param(
True,
False,
"SELECT * FROM empty",
["SELECT * FROM empty"],
[],
[],
id="Empty result",
),
],
)
def test_vertica_run_queries(
vertica_hook,
mock_cursor,
return_last,
split_statements,
sql,
expected_calls,
cursor_results,
expected_result,
):
if split_statements:
mock_cursor.fetchall.side_effect = cursor_results
mock_cursor.nextset.side_effect = [True] * (len(cursor_results) - 1) + [False]
else:
mock_cursor.fetchall.return_value = cursor_results
mock_cursor.nextset.side_effect = lambda: False
result = vertica_hook.run(
sql,
handler=lambda cur: cur.fetchall(),
split_statements=split_statements,
return_last=return_last,
)
expected_mock_calls = [mock.call(sql_call) for sql_call in expected_calls]
mock_cursor.execute.assert_has_calls(expected_mock_calls)
assert [SerializableRow(*row) for row in result] == expected_result
def test_run_with_multiple_statements(vertica_hook, mock_cursor):
mock_cursor.fetchall.side_effect = [[(1,)], [(2,)]]
mock_cursor.nextset.side_effect = [True, False]
sql = "SELECT 1; SELECT 2;"
results = vertica_hook.run(sql, handler=lambda cur: cur.fetchall(), split_statements=True)
mock_cursor.execute.assert_has_calls(
[
mock.call("SELECT 1;"),
mock.call("SELECT 2;"),
]
)
assert results == [(2,)]
def test_get_uri(vertica_hook):
"""
Test that the get_uri() method returns the correct connection string.
"""
assert vertica_hook.get_uri() == "vertica-python://user:pass@vertica.cloud.com:5433/test_db"
def test_get_sqlalchemy_engine(vertica_hook):
"""
Test that the get_sqlalchemy_engine() method returns a valid SQLAlchemy engine.
"""
with patch("airflow.providers.common.sql.hooks.sql.create_engine") as mock_create_engine:
mock_engine = MagicMock(spec=Engine)
mock_create_engine.return_value = mock_engine
engine = vertica_hook.get_sqlalchemy_engine()
assert engine is mock_engine
mock_create_engine.assert_called_once()
call_args = mock_create_engine.call_args[1]
assert "url" in call_args
actual_url = call_args["url"]
assert actual_url.drivername == "vertica-python"
assert actual_url.username == "user"
assert actual_url.password == "pass"
assert actual_url.host == "vertica.cloud.com"
assert actual_url.port == 5433
assert actual_url.database == "test_db"
@pytest.mark.parametrize("sql", ["", "\n", " "])
def test_run_with_no_query(vertica_hook, sql):
"""
Test that running with no SQL query raises a ValueError.
"""
with pytest.raises(ValueError, match="List of SQL statements is empty"):
vertica_hook.run(sql)
def test_run_with_invalid_column_names(vertica_hook, mock_cursor):
invalid_names = [("1_2_3",), ("select",), ("from",)]
mock_cursor.description = invalid_names
mock_cursor.fetchall.return_value = [(1, "row1", "bar")]
result = vertica_hook.run(sql="SELECT * FROM table", handler=lambda cur: cur.fetchall())
assert result[0][0] == 1
assert result[0][1] == "row1"
assert result[0][2] == "bar"
@pytest.fixture
def vertica_hook_with_timeout(create_connection_without_db):
create_connection_without_db(
Connection(
conn_id="vertica_timeout",
conn_type="vertica",
host="vertica.cloud.com",
login="user",
password="pass",
schema="test_db",
extra=json.dumps({"execution_timeout": 1}),
)
)
return VerticaHook(vertica_conn_id="vertica_timeout")
def test_execution_timeout_exceeded(vertica_hook_with_timeout, mock_cursor):
mock_error_response = MagicMock()
mock_error_response.error_message.return_value = "Mock error message for test."
with patch(
"airflow.providers.common.sql.hooks.sql.DbApiHook.run",
side_effect=AirflowException("Query exceeded execution timeout"),
):
with pytest.raises(AirflowException, match="Query exceeded execution timeout"):
vertica_hook_with_timeout.run(sql="SELECT * FROM table1")
| {
"repo_id": "apache/airflow",
"file_path": "providers/vertica/tests/unit/vertica/hooks/test_vertica_sql.py",
"license": "Apache License 2.0",
"lines": 218,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/amazon/src/airflow/providers/amazon/aws/notifications/ses.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from collections.abc import Iterable, Sequence
from functools import cached_property
from typing import Any
from airflow.providers.amazon.aws.hooks.ses import SesHook
from airflow.providers.amazon.version_compat import AIRFLOW_V_3_1_PLUS
from airflow.providers.common.compat.notifier import BaseNotifier
from airflow.utils.helpers import prune_dict
class SesNotifier(BaseNotifier):
"""
Amazon Simple Email Service (SES) Notifier.
:param mail_from: Email address to set as email's from
:param to: List of email addresses to set as email's to
:param subject: Email's subject
:param html_content: Content of email in HTML format
:param files: List of paths of files to be attached
:param cc: List of email addresses to set as email's CC
:param bcc: List of email addresses to set as email's BCC
:param mime_subtype: Can be used to specify the subtype of the message. Default = mixed
:param mime_charset: Email's charset. Default = UTF-8.
:param return_path: The email address to which replies will be sent. By default, replies
are sent to the original sender's email address.
:param reply_to: The email address to which message bounces and complaints should be sent.
"Return-Path" is sometimes called "envelope from", "envelope sender", or "MAIL FROM".
:param custom_headers: Additional headers to add to the MIME message.
No validations are run on these values, and they should be able to be encoded.
"""
template_fields: Sequence[str] = (
"aws_conn_id",
"region_name",
"mail_from",
"to",
"subject",
"html_content",
"files",
"cc",
"bcc",
"mime_subtype",
"mime_charset",
"reply_to",
"return_path",
"custom_headers",
)
def __init__(
self,
*,
aws_conn_id: str | None = SesHook.default_conn_name,
region_name: str | None = None,
mail_from: str,
to: str | Iterable[str],
subject: str,
html_content: str,
files: list[str] | None = None,
cc: str | Iterable[str] | None = None,
bcc: str | Iterable[str] | None = None,
mime_subtype: str = "mixed",
mime_charset: str = "utf-8",
reply_to: str | None = None,
return_path: str | None = None,
custom_headers: dict[str, Any] | None = None,
**kwargs,
):
if AIRFLOW_V_3_1_PLUS:
# Support for passing context was added in 3.1.0
super().__init__(**kwargs)
else:
super().__init__()
self.aws_conn_id = aws_conn_id
self.region_name = region_name
self.mail_from = mail_from
self.to = to
self.subject = subject
self.html_content = html_content
self.files = files
self.cc = cc
self.bcc = bcc
self.mime_subtype = mime_subtype
self.mime_charset = mime_charset
self.reply_to = reply_to
self.return_path = return_path
self.custom_headers = custom_headers
def _build_send_kwargs(self):
return prune_dict(
{
"mail_from": self.mail_from,
"to": self.to,
"subject": self.subject,
"html_content": self.html_content,
"files": self.files,
"cc": self.cc,
"bcc": self.bcc,
"mime_subtype": self.mime_subtype,
"mime_charset": self.mime_charset,
"reply_to": self.reply_to,
"return_path": self.return_path,
"custom_headers": self.custom_headers,
}
)
@cached_property
def hook(self) -> SesHook:
"""Amazon Simple Email Service (SES) Hook (cached)."""
return SesHook(aws_conn_id=self.aws_conn_id, region_name=self.region_name)
def notify(self, context):
"""Send email using Amazon Simple Email Service (SES)."""
self.hook.send_email(**self._build_send_kwargs())
async def async_notify(self, context):
"""Send email using Amazon Simple Email Service (SES) (async)."""
await self.hook.asend_email(**self._build_send_kwargs())
send_ses_notification = SesNotifier
| {
"repo_id": "apache/airflow",
"file_path": "providers/amazon/src/airflow/providers/amazon/aws/notifications/ses.py",
"license": "Apache License 2.0",
"lines": 125,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/amazon/tests/unit/amazon/aws/notifications/test_ses.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from unittest import mock
import pytest
from airflow.providers.amazon.aws.notifications.ses import SesNotifier, send_ses_notification
from airflow.providers.amazon.version_compat import NOTSET
TEST_EMAIL_PARAMS = {
"mail_from": "from@test.com",
"to": "to@test.com",
"subject": "Test Subject",
"html_content": "<p>Test Content</p>",
}
# The hook sets these default values if they are not provided
HOOK_DEFAULTS = {
"mime_charset": "utf-8",
"mime_subtype": "mixed",
}
class TestSesNotifier:
def test_class_and_notifier_are_same(self):
assert send_ses_notification is SesNotifier
@pytest.mark.parametrize(
"aws_conn_id",
[
pytest.param("aws_test_conn_id", id="custom-conn"),
pytest.param(None, id="none-conn"),
pytest.param(NOTSET, id="default-value"),
],
)
@pytest.mark.parametrize(
"region_name",
[
pytest.param("eu-west-2", id="custom-region"),
pytest.param(None, id="no-region"),
pytest.param(NOTSET, id="default-value"),
],
)
def test_parameters_propagate_to_hook(self, aws_conn_id, region_name):
"""Test notifier attributes propagate to SesHook."""
notifier_kwargs = {}
if aws_conn_id is not NOTSET:
notifier_kwargs["aws_conn_id"] = aws_conn_id
if region_name is not NOTSET:
notifier_kwargs["region_name"] = region_name
notifier = SesNotifier(**notifier_kwargs, **TEST_EMAIL_PARAMS)
with mock.patch("airflow.providers.amazon.aws.notifications.ses.SesHook") as mock_hook:
hook = notifier.hook
assert hook is notifier.hook, "Hook property not cached"
mock_hook.assert_called_once_with(
aws_conn_id=(aws_conn_id if aws_conn_id is not NOTSET else "aws_default"),
region_name=(region_name if region_name is not NOTSET else None),
)
# Basic check for notifier
notifier.notify({})
mock_hook.return_value.send_email.assert_called_once_with(**TEST_EMAIL_PARAMS, **HOOK_DEFAULTS)
@pytest.mark.asyncio
async def test_async_notify(self):
"""Test async notification sends correctly."""
notifier = SesNotifier(**TEST_EMAIL_PARAMS)
with mock.patch("airflow.providers.amazon.aws.notifications.ses.SesHook") as mock_hook:
mock_hook.return_value.asend_email = mock.AsyncMock()
await notifier.async_notify({})
mock_hook.return_value.asend_email.assert_called_once_with(**TEST_EMAIL_PARAMS, **HOOK_DEFAULTS)
def test_ses_notifier_with_optional_params(self):
"""Test notifier handles all optional parameters correctly."""
email_params = {
**TEST_EMAIL_PARAMS,
"files": ["test.txt"],
"cc": ["cc@test.com"],
"bcc": ["bcc@test.com"],
"mime_subtype": "alternative",
"mime_charset": "ascii",
"reply_to": "reply@test.com",
"return_path": "bounce@test.com",
"custom_headers": {"X-Custom": "value"},
}
notifier = SesNotifier(**email_params)
with mock.patch("airflow.providers.amazon.aws.notifications.ses.SesHook") as mock_hook:
notifier.notify({})
mock_hook.return_value.send_email.assert_called_once_with(**email_params)
def test_ses_notifier_templated(self, create_dag_without_db):
"""Test template fields are properly rendered."""
templated_params = {
"aws_conn_id": "{{ dag.dag_id }}",
"region_name": "{{ var_region }}",
"mail_from": "{{ var_from }}",
"to": "{{ var_to }}",
"subject": "{{ var_subject }}",
"html_content": "Hello {{ var_name }}",
"cc": ["cc@{{ var_domain }}"],
"bcc": ["bcc@{{ var_domain }}"],
"reply_to": "reply@{{ var_domain }}",
}
notifier = SesNotifier(**templated_params)
with mock.patch("airflow.providers.amazon.aws.notifications.ses.SesHook") as mock_hook:
context = {
"dag": create_dag_without_db("test_ses_notifier_templated"),
"var_region": "us-west-1",
"var_from": "from@example.com",
"var_to": "to@example.com",
"var_subject": "Test Email",
"var_name": "John",
"var_domain": "example.com",
}
notifier(context)
mock_hook.assert_called_once_with(
aws_conn_id="test_ses_notifier_templated",
region_name="us-west-1",
)
mock_hook.return_value.send_email.assert_called_once_with(
mail_from="from@example.com",
to="to@example.com",
subject="Test Email",
html_content="Hello John",
cc=["cc@example.com"],
bcc=["bcc@example.com"],
mime_subtype="mixed",
mime_charset="utf-8",
reply_to="reply@example.com",
)
| {
"repo_id": "apache/airflow",
"file_path": "providers/amazon/tests/unit/amazon/aws/notifications/test_ses.py",
"license": "Apache License 2.0",
"lines": 134,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/standard/src/airflow/providers/standard/decorators/stub.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import ast
from collections.abc import Callable
from typing import TYPE_CHECKING, Any
from airflow.providers.common.compat.sdk import (
DecoratedOperator,
TaskDecorator,
task_decorator_factory,
)
if TYPE_CHECKING:
from airflow.providers.common.compat.sdk import Context
class _StubOperator(DecoratedOperator):
custom_operator_name: str = "@task.stub"
def __init__(
self,
*,
python_callable: Callable,
task_id: str,
**kwargs,
) -> None:
super().__init__(
python_callable=python_callable,
task_id=task_id,
**kwargs,
)
# Validate python callable
module = ast.parse(self.get_python_source())
if len(module.body) != 1:
raise RuntimeError("Expected a single statement")
fn = module.body[0]
if not isinstance(fn, ast.FunctionDef):
raise RuntimeError("Expected a single sync function")
for stmt in fn.body:
if isinstance(stmt, ast.Pass):
continue
if isinstance(stmt, ast.Expr):
if isinstance(stmt.value, ast.Constant) and isinstance(stmt.value.value, (str, type(...))):
continue
raise ValueError(
f"Functions passed to @task.stub must be an empty function (`pass`, or `...` only) (got {stmt})"
)
...
def execute(self, context: Context) -> Any:
raise RuntimeError(
"@task.stub should not be executed directly -- we expected this to go to a remote worker. "
"Check your pool and worker configs"
)
def stub(
python_callable: Callable | None = None,
queue: str | None = None,
executor: str | None = None,
**kwargs,
) -> TaskDecorator:
"""
Define a stub task in the DAG.
Stub tasks exist in the Dag graph only, but the execution must happen in an external
environment via the Task Execution Interface.
"""
return task_decorator_factory(
decorated_operator_class=_StubOperator,
python_callable=python_callable,
queue=queue,
executor=executor,
**kwargs,
)
| {
"repo_id": "apache/airflow",
"file_path": "providers/standard/src/airflow/providers/standard/decorators/stub.py",
"license": "Apache License 2.0",
"lines": 81,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/standard/tests/unit/standard/decorators/test_stub.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import contextlib
import pytest
from airflow.providers.standard.decorators.stub import stub
def fn_ellipsis(): ...
def fn_pass(): ...
def fn_doc():
"""Some string"""
def fn_doc_pass():
"""Some string"""
pass
def fn_code():
return None
@pytest.mark.parametrize(
("fn", "error"),
[
pytest.param(fn_ellipsis, contextlib.nullcontext(), id="ellipsis"),
pytest.param(fn_pass, contextlib.nullcontext(), id="pass"),
pytest.param(fn_doc, contextlib.nullcontext(), id="doc"),
pytest.param(fn_doc_pass, contextlib.nullcontext(), id="doc-and-pass"),
pytest.param(fn_code, pytest.raises(ValueError, match="must be an empty function"), id="not-empty"),
],
)
def test_stub_signature(fn, error):
with error:
stub(fn)()
| {
"repo_id": "apache/airflow",
"file_path": "providers/standard/tests/unit/standard/decorators/test_stub.py",
"license": "Apache License 2.0",
"lines": 42,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-core/src/airflow/dag_processing/dagbag.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import contextlib
import os
import sys
import textwrap
import warnings
from collections.abc import Generator
from datetime import datetime, timedelta
from pathlib import Path
from typing import TYPE_CHECKING, NamedTuple
from tabulate import tabulate
from airflow import settings
from airflow._shared.timezones import timezone
from airflow.configuration import conf
from airflow.dag_processing.importers import get_importer_registry
from airflow.exceptions import (
AirflowClusterPolicyError,
AirflowClusterPolicySkipDag,
AirflowClusterPolicyViolation,
AirflowDagDuplicatedIdException,
AirflowException,
UnknownExecutorException,
)
from airflow.executors.executor_loader import ExecutorLoader
from airflow.listeners.listener import get_listener_manager
from airflow.serialization.definitions.notset import NOTSET, ArgNotSet, is_arg_set
from airflow.serialization.serialized_objects import LazyDeserializedDAG
from airflow.utils.file import correct_maybe_zipped
from airflow.utils.log.logging_mixin import LoggingMixin
from airflow.utils.session import NEW_SESSION, provide_session
if TYPE_CHECKING:
from sqlalchemy.orm import Session
from airflow import DAG
from airflow.models.dagwarning import DagWarning
@contextlib.contextmanager
def _capture_with_reraise() -> Generator[list[warnings.WarningMessage], None, None]:
"""Capture warnings in context and re-raise it on exit from the context manager."""
captured_warnings = []
try:
with warnings.catch_warnings(record=True) as captured_warnings:
yield captured_warnings
finally:
if captured_warnings:
for cw in captured_warnings:
warnings.warn_explicit(
message=cw.message,
category=cw.category,
filename=cw.filename,
lineno=cw.lineno,
source=cw.source,
)
class FileLoadStat(NamedTuple):
"""
Information about single file.
:param file: Loaded file.
:param duration: Time spent on process file.
:param dag_num: Total number of DAGs loaded in this file.
:param task_num: Total number of Tasks loaded in this file.
:param dags: DAGs names loaded in this file.
:param warning_num: Total number of warnings captured from processing this file.
:param bundle_path: The bundle path from DagBag, if any.
:param bundle_name: The bundle name from DagBag, if any.
"""
file: str
duration: timedelta
dag_num: int
task_num: int
dags: str
warning_num: int
bundle_path: Path | None
bundle_name: str | None
def _executor_exists(executor_name: str, team_name: str | None) -> bool:
"""Check if executor exists, with global fallback for teams."""
try:
# First pass check for team-specific executor or a global executor (i.e. team_name=None)
ExecutorLoader.lookup_executor_name_by_str(executor_name, team_name=team_name, validate_teams=False)
return True
except UnknownExecutorException:
if team_name:
# If we had a team_name but didn't find an executor, check if there is a global executor that
# satisfies the request.
try:
ExecutorLoader.lookup_executor_name_by_str(
executor_name, team_name=None, validate_teams=False
)
return True
except UnknownExecutorException:
pass
return False
def _validate_executor_fields(dag: DAG, bundle_name: str | None = None) -> None:
"""Validate that executors specified in tasks are available and owned by the same team as the dag bundle."""
import logging
log = logging.getLogger(__name__)
dag_team_name = None
# Check if multi team is available by reading the multi_team configuration (which is boolean)
if conf.getboolean("core", "multi_team"):
# Get team name from bundle configuration if available
if bundle_name:
from airflow.dag_processing.bundles.manager import DagBundlesManager
bundle_manager = DagBundlesManager()
bundle_config = bundle_manager._bundle_config[bundle_name]
dag_team_name = bundle_config.team_name
if dag_team_name:
log.debug(
"Found team '%s' for DAG '%s' via bundle '%s'", dag_team_name, dag.dag_id, bundle_name
)
for task in dag.tasks:
if not task.executor:
continue
if not _executor_exists(task.executor, dag_team_name):
if dag_team_name:
raise UnknownExecutorException(
f"Task '{task.task_id}' specifies executor '{task.executor}', which is not available "
f"for team '{dag_team_name}' (the team associated with DAG '{dag.dag_id}') or as a global executor. "
f"Make sure '{task.executor}' is configured for team '{dag_team_name}' or globally in your "
"[core] executors configuration, or update the task's executor to use one of the "
f"configured executors for team '{dag_team_name}' or available global executors."
)
raise UnknownExecutorException(
f"Task '{task.task_id}' specifies executor '{task.executor}', which is not available. "
"Make sure it is listed in your [core] executors configuration, or update the task's "
"executor to use one of the configured executors."
)
class DagBag(LoggingMixin):
"""
A dagbag is a collection of dags, parsed out of a folder tree and has high level configuration settings.
Some possible setting are database to use as a backend and what executor
to use to fire off tasks. This makes it easier to run distinct environments
for say production and development, tests, or for different teams or security
profiles. What would have been system level settings are now dagbag level so
that one system can run multiple, independent settings sets.
:param dag_folder: the folder to scan to find DAGs
:param include_examples: whether to include the examples that ship
with airflow or not
:param safe_mode: when ``False``, scans all python modules for dags.
When ``True`` uses heuristics (files containing ``DAG`` and ``airflow`` strings)
to filter python modules to scan for dags.
:param load_op_links: Should the extra operator link be loaded via plugins when
de-serializing the DAG? This flag is set to False in Scheduler so that Extra Operator links
are not loaded to not run User code in Scheduler.
:param collect_dags: when True, collects dags during class initialization.
:param known_pools: If not none, then generate warnings if a Task attempts to use an unknown pool.
"""
def __init__(
self,
dag_folder: str | Path | None = None, # todo AIP-66: rename this to path
include_examples: bool | ArgNotSet = NOTSET,
safe_mode: bool | ArgNotSet = NOTSET,
load_op_links: bool = True,
collect_dags: bool = True,
known_pools: set[str] | None = None,
bundle_path: Path | None = None,
bundle_name: str | None = None,
):
super().__init__()
self.bundle_path = bundle_path
self.bundle_name = bundle_name
dag_folder = dag_folder or settings.DAGS_FOLDER
self.dag_folder = dag_folder
self.dags: dict[str, DAG] = {}
# the file's last modified timestamp when we last read it
self.file_last_changed: dict[str, datetime] = {}
# Store import errors with relative file paths as keys (relative to bundle_path)
self.import_errors: dict[str, str] = {}
self.captured_warnings: dict[str, tuple[str, ...]] = {}
self.has_logged = False
# Only used by SchedulerJob to compare the dag_hash to identify change in DAGs
self.dags_hash: dict[str, str] = {}
self.known_pools = known_pools
self.dagbag_import_error_tracebacks = conf.getboolean("core", "dagbag_import_error_tracebacks")
self.dagbag_import_error_traceback_depth = conf.getint("core", "dagbag_import_error_traceback_depth")
if collect_dags:
self.collect_dags(
dag_folder=dag_folder,
include_examples=(
include_examples
if is_arg_set(include_examples)
else conf.getboolean("core", "LOAD_EXAMPLES")
),
safe_mode=(
safe_mode if is_arg_set(safe_mode) else conf.getboolean("core", "DAG_DISCOVERY_SAFE_MODE")
),
)
# Should the extra operator link be loaded via plugins?
# This flag is set to False in Scheduler so that Extra Operator links are not loaded
self.load_op_links = load_op_links
def size(self) -> int:
""":return: the amount of dags contained in this dagbag"""
return len(self.dags)
@property
def dag_ids(self) -> list[str]:
"""
Get DAG ids.
:return: a list of DAG IDs in this bag
"""
return list(self.dags)
@provide_session
def get_dag(self, dag_id, session: Session = NEW_SESSION):
"""
Get the DAG out of the dictionary, and refreshes it if expired.
:param dag_id: DAG ID
"""
# Avoid circular import
from airflow.models.dag import DagModel
dag = self.dags.get(dag_id)
# If DAG Model is absent, we can't check last_expired property. Is the DAG not yet synchronized?
if (orm_dag := DagModel.get_current(dag_id, session=session)) is None:
return dag
is_expired = (
orm_dag.last_expired and dag and dag.last_loaded and dag.last_loaded < orm_dag.last_expired
)
if is_expired:
# Remove associated dags so we can re-add them.
self.dags.pop(dag_id, None)
if dag is None or is_expired:
# Reprocess source file.
found_dags = self.process_file(
filepath=correct_maybe_zipped(orm_dag.fileloc), only_if_updated=False
)
# If the source file no longer exports `dag_id`, delete it from self.dags
if found_dags and dag_id in [found_dag.dag_id for found_dag in found_dags]:
return self.dags[dag_id]
self.dags.pop(dag_id, None)
return self.dags.get(dag_id)
def process_file(self, filepath, only_if_updated=True, safe_mode=True):
"""Process a DAG file and return found DAGs."""
if filepath is None or not os.path.isfile(filepath):
return []
try:
file_last_changed_on_disk = datetime.fromtimestamp(os.path.getmtime(filepath))
if (
only_if_updated
and filepath in self.file_last_changed
and file_last_changed_on_disk == self.file_last_changed[filepath]
):
return []
except Exception as e:
self.log.exception(e)
return []
self.captured_warnings.pop(filepath, None)
registry = get_importer_registry()
importer = registry.get_importer(filepath)
if importer is None:
self.log.debug("No importer found for file: %s", filepath)
return []
result = importer.import_file(
file_path=filepath,
bundle_path=self.bundle_path,
bundle_name=self.bundle_name,
safe_mode=safe_mode,
)
if result.skipped_files:
for skipped in result.skipped_files:
if not self.has_logged:
self.has_logged = True
self.log.info("File %s assumed to contain no DAGs. Skipping.", skipped)
if result.errors:
for error in result.errors:
# Use the relative file path from error (importer provides relative paths)
# Fall back to converting filepath to relative if error.file_path is not set
error_path = error.file_path if error.file_path else self._get_relative_fileloc(filepath)
error_msg = error.stacktrace if error.stacktrace else error.message
self.import_errors[error_path] = error_msg
self.log.error("Error loading DAG from %s: %s", error_path, error.message)
if result.warnings:
formatted_warnings = [
f"{w.file_path}:{w.line_number}: {w.warning_type}: {w.message}" for w in result.warnings
]
self.captured_warnings[filepath] = tuple(formatted_warnings)
# Re-emit warnings so they can be handled by Python's warning system
for w in result.warnings:
warnings.warn_explicit(
message=w.message,
category=UserWarning,
filename=w.file_path,
lineno=w.line_number or 0,
)
bagged_dags = []
for dag in result.dags:
try:
if dag.fileloc is None:
dag.fileloc = filepath
# Validate before adding to bag (matches original _process_modules behavior)
dag.validate()
_validate_executor_fields(dag, self.bundle_name)
self.bag_dag(dag=dag)
bagged_dags.append(dag)
except AirflowClusterPolicySkipDag:
self.log.debug("DAG %s skipped by cluster policy", dag.dag_id)
except Exception as e:
self.log.exception("Error bagging DAG from %s", filepath)
relative_path = self._get_relative_fileloc(filepath)
self.import_errors[relative_path] = f"{type(e).__name__}: {e}"
self.file_last_changed[filepath] = file_last_changed_on_disk
return bagged_dags
@property
def dag_warnings(self) -> set[DagWarning]:
"""Get the set of DagWarnings for the bagged dags."""
from airflow.models.dagwarning import DagWarning, DagWarningType
# None means this feature is not enabled. Empty set means we don't know about any pools at all!
if self.known_pools is None:
return set()
def get_pools(dag) -> dict[str, set[str]]:
return {dag.dag_id: {task.pool for task in dag.tasks}}
pool_dict: dict[str, set[str]] = {}
for dag in self.dags.values():
pool_dict.update(get_pools(dag))
warnings: set[DagWarning] = set()
for dag_id, dag_pools in pool_dict.items():
nonexistent_pools = dag_pools - self.known_pools
if nonexistent_pools:
warnings.add(
DagWarning(
dag_id,
DagWarningType.NONEXISTENT_POOL,
f"Dag '{dag_id}' references non-existent pools: {sorted(nonexistent_pools)!r}",
)
)
return warnings
def _get_relative_fileloc(self, filepath: str) -> str:
"""
Get the relative file location for a given filepath.
:param filepath: Absolute path to the file
:return: Relative path from bundle_path, or original filepath if no bundle_path
"""
if self.bundle_path:
return str(Path(filepath).relative_to(self.bundle_path))
return filepath
def bag_dag(self, dag: DAG):
"""
Add the DAG into the bag.
:raises: AirflowDagCycleException if a cycle is detected.
:raises: AirflowDagDuplicatedIdException if this dag already exists in the bag.
"""
dag.check_cycle()
dag.resolve_template_files()
dag.last_loaded = timezone.utcnow()
try:
settings.dag_policy(dag)
for task in dag.tasks:
if getattr(task, "end_from_trigger", False) and get_listener_manager().has_listeners:
raise AirflowException(
"Listeners are not supported with end_from_trigger=True for deferrable operators. "
"Task %s in DAG %s has end_from_trigger=True with listeners from plugins. "
"Set end_from_trigger=False to use listeners.",
task.task_id,
dag.dag_id,
)
settings.task_policy(task)
except (AirflowClusterPolicyViolation, AirflowClusterPolicySkipDag):
raise
except Exception as e:
self.log.exception(e)
raise AirflowClusterPolicyError(e)
from airflow.sdk.exceptions import AirflowDagCycleException
try:
prev_dag = self.dags.get(dag.dag_id)
if prev_dag and prev_dag.fileloc != dag.fileloc:
raise AirflowDagDuplicatedIdException(
dag_id=dag.dag_id,
incoming=dag.fileloc,
existing=self.dags[dag.dag_id].fileloc,
)
self.dags[dag.dag_id] = dag
self.log.debug("Loaded DAG %s", dag)
except (AirflowDagCycleException, AirflowDagDuplicatedIdException):
# There was an error in bagging the dag. Remove it from the list of dags
self.log.exception("Exception bagging dag: %s", dag.dag_id)
raise
def collect_dags(
self,
dag_folder: str | Path | None = None,
only_if_updated: bool = True,
include_examples: bool = conf.getboolean("core", "LOAD_EXAMPLES"),
safe_mode: bool = conf.getboolean("core", "DAG_DISCOVERY_SAFE_MODE"),
):
"""
Look for python modules in a given path, import them, and add them to the dagbag collection.
Note that if a ``.airflowignore`` file is found while processing
the directory, it will behave much like a ``.gitignore``,
ignoring files that match any of the patterns specified
in the file.
**Note**: The patterns in ``.airflowignore`` are interpreted as either
un-anchored regexes or gitignore-like glob expressions, depending on
the ``DAG_IGNORE_FILE_SYNTAX`` configuration parameter.
"""
self.log.info("Filling up the DagBag from %s", dag_folder)
dag_folder = dag_folder or self.dag_folder
# Used to store stats around DagBag processing
stats = []
# Ensure dag_folder is a str -- it may have been a pathlib.Path
dag_folder = correct_maybe_zipped(str(dag_folder))
registry = get_importer_registry()
files_to_parse = registry.list_dag_files(dag_folder, safe_mode=safe_mode)
if include_examples:
from airflow import example_dags
example_dag_folder = next(iter(example_dags.__path__))
files_to_parse.extend(registry.list_dag_files(example_dag_folder, safe_mode=safe_mode))
for filepath in files_to_parse:
try:
file_parse_start_dttm = timezone.utcnow()
found_dags = self.process_file(filepath, only_if_updated=only_if_updated, safe_mode=safe_mode)
file_parse_end_dttm = timezone.utcnow()
try:
relative_file = Path(filepath).relative_to(Path(self.dag_folder)).as_posix()
except ValueError:
# filepath is not under dag_folder (e.g., example DAGs from a different location)
relative_file = Path(filepath).as_posix()
stats.append(
FileLoadStat(
file=relative_file,
duration=file_parse_end_dttm - file_parse_start_dttm,
dag_num=len(found_dags),
task_num=sum(len(dag.tasks) for dag in found_dags),
dags=str([dag.dag_id for dag in found_dags]),
warning_num=len(self.captured_warnings.get(filepath, [])),
bundle_path=self.bundle_path,
bundle_name=self.bundle_name,
)
)
except Exception as e:
self.log.exception(e)
self.dagbag_stats = sorted(stats, key=lambda x: x.duration, reverse=True)
def dagbag_report(self):
"""Print a report around DagBag loading stats."""
stats = self.dagbag_stats
dag_folder = self.dag_folder
duration = sum((o.duration for o in stats), timedelta()).total_seconds()
dag_num = sum(o.dag_num for o in stats)
task_num = sum(o.task_num for o in stats)
table = tabulate(stats, headers="keys")
report = textwrap.dedent(
f"""\n
-------------------------------------------------------------------
DagBag loading stats for {dag_folder}
-------------------------------------------------------------------
Number of DAGs: {dag_num}
Total task number: {task_num}
DagBag parsing time: {duration}\n{table}
"""
)
return report
class BundleDagBag(DagBag):
"""
Bundle-aware DagBag that permanently modifies sys.path.
This class adds the bundle_path to sys.path permanently to allow DAG files
to import modules from their bundle directory. No cleanup is performed.
WARNING: Only use for one-off usages like CLI commands. Using this in long-running
processes will cause sys.path to accumulate entries.
Same parameters as DagBag, but bundle_path is required and examples are not loaded.
"""
def __init__(self, *args, bundle_path: Path | None = None, **kwargs):
if not bundle_path:
raise ValueError("bundle_path is required for BundleDagBag")
if str(bundle_path) not in sys.path:
sys.path.append(str(bundle_path))
# Warn if user explicitly set include_examples=True, since bundles never contain examples
if kwargs.get("include_examples") is True:
warnings.warn(
"include_examples=True is ignored for BundleDagBag. "
"Bundles do not contain example DAGs, so include_examples is always False.",
UserWarning,
stacklevel=2,
)
kwargs["bundle_path"] = bundle_path
kwargs["include_examples"] = False
super().__init__(*args, **kwargs)
@provide_session
def sync_bag_to_db(
dagbag: DagBag,
bundle_name: str,
bundle_version: str | None,
*,
session: Session = NEW_SESSION,
) -> None:
"""Save attributes about list of DAG to the DB."""
from airflow.dag_processing.collection import update_dag_parsing_results_in_db
import_errors = {(bundle_name, rel_path): error for rel_path, error in dagbag.import_errors.items()}
# Build the set of all files that were parsed and include files with import errors
# in case they are not in file_last_changed
files_parsed = set(import_errors)
if dagbag.bundle_path:
files_parsed.update(
(bundle_name, dagbag._get_relative_fileloc(abs_filepath))
for abs_filepath in dagbag.file_last_changed
)
update_dag_parsing_results_in_db(
bundle_name,
bundle_version,
[LazyDeserializedDAG.from_dag(dag) for dag in dagbag.dags.values()],
import_errors,
None, # file parsing duration is not well defined when parsing multiple files / multiple DAGs.
dagbag.dag_warnings,
session=session,
files_parsed=files_parsed,
)
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/dag_processing/dagbag.py",
"license": "Apache License 2.0",
"lines": 513,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/keycloak/src/airflow/providers/keycloak/auth_manager/services/token.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import json
from fastapi import HTTPException, status
from keycloak import KeycloakAuthenticationError
from airflow.api_fastapi.app import get_auth_manager
from airflow.providers.common.compat.sdk import conf
from airflow.providers.keycloak.auth_manager.keycloak_auth_manager import KeycloakAuthManager
from airflow.providers.keycloak.auth_manager.user import KeycloakAuthManagerUser
def create_token_for(
username: str,
password: str,
expiration_time_in_seconds: int = conf.getint("api_auth", "jwt_expiration_time"),
) -> str:
client = KeycloakAuthManager.get_keycloak_client()
try:
tokens = client.token(username, password)
except KeycloakAuthenticationError:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail="Invalid credentials",
)
userinfo_raw: dict | bytes = client.userinfo(tokens["access_token"])
# Decode bytes to dict if necessary
userinfo: dict = json.loads(userinfo_raw) if isinstance(userinfo_raw, bytes) else userinfo_raw
user = KeycloakAuthManagerUser(
user_id=userinfo["sub"],
name=userinfo["preferred_username"],
access_token=tokens["access_token"],
refresh_token=tokens["refresh_token"],
)
return get_auth_manager().generate_jwt(user, expiration_time_in_seconds=expiration_time_in_seconds)
def create_client_credentials_token(
client_id: str,
client_secret: str,
expiration_time_in_seconds: int = conf.getint("api_auth", "jwt_expiration_time"),
) -> str:
"""
Create token using OAuth2 client_credentials grant type.
This authentication flow uses the provided client_id and client_secret
to obtain a token for a service account. The Keycloak client must have:
- Service accounts roles: ON
- Client Authentication: ON (confidential client)
The service account must be configured with the appropriate roles/permissions.
"""
# Get Keycloak client with service account credentials
client = KeycloakAuthManager.get_keycloak_client(
client_id=client_id,
client_secret=client_secret,
)
try:
tokens = client.token(grant_type="client_credentials")
except KeycloakAuthenticationError:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail="Client credentials authentication failed",
)
# For client_credentials, get the service account user info
# The token represents the service account associated with the client
userinfo_raw: dict | bytes = client.userinfo(tokens["access_token"])
# Decode bytes to dict if necessary
userinfo: dict = json.loads(userinfo_raw) if isinstance(userinfo_raw, bytes) else userinfo_raw
user = KeycloakAuthManagerUser(
user_id=userinfo["sub"],
name=userinfo.get("preferred_username", userinfo.get("clientId", "service-account")),
access_token=tokens["access_token"],
refresh_token=tokens.get(
"refresh_token"
), # client_credentials may not return refresh_token (RFC6749 section 4.4.3)
)
return get_auth_manager().generate_jwt(user, expiration_time_in_seconds=expiration_time_in_seconds)
| {
"repo_id": "apache/airflow",
"file_path": "providers/keycloak/src/airflow/providers/keycloak/auth_manager/services/token.py",
"license": "Apache License 2.0",
"lines": 86,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/keycloak/tests/unit/keycloak/auth_manager/services/test_token.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from unittest.mock import Mock, patch
import fastapi
import pytest
from keycloak import KeycloakAuthenticationError
from airflow.providers.common.compat.sdk import conf
from airflow.providers.keycloak.auth_manager.services.token import (
create_client_credentials_token,
create_token_for,
)
from tests_common.test_utils.config import conf_vars
class TestTokenService:
token = "token"
test_username = "test_user"
test_password = "test_pass"
test_access_token = "access_token"
test_refresh_token = "refresh_token"
@conf_vars(
{
("api_auth", "jwt_expiration_time"): "10",
}
)
@patch("airflow.providers.keycloak.auth_manager.services.token.get_auth_manager")
@patch("airflow.providers.keycloak.auth_manager.services.token.KeycloakAuthManager.get_keycloak_client")
def test_create_token(self, mock_get_keycloak_client, mock_get_auth_manager):
mock_keycloak_client = Mock()
mock_keycloak_client.token.return_value = {
"access_token": self.test_access_token,
"refresh_token": self.test_refresh_token,
}
mock_keycloak_client.userinfo.return_value = {"sub": "sub", "preferred_username": "username"}
mock_get_keycloak_client.return_value = mock_keycloak_client
mock_auth_manager = Mock()
mock_get_auth_manager.return_value = mock_auth_manager
mock_auth_manager.generate_jwt.return_value = self.token
assert create_token_for(username=self.test_username, password=self.test_password) == self.token
mock_keycloak_client.token.assert_called_once_with(self.test_username, self.test_password)
mock_keycloak_client.userinfo.assert_called_once_with(self.test_access_token)
@conf_vars(
{
("api_auth", "jwt_cli_expiration_time"): "10",
("api_auth", "jwt_expiration_time"): "10",
}
)
@patch("airflow.providers.keycloak.auth_manager.services.token.KeycloakAuthManager.get_keycloak_client")
def test_create_token_with_invalid_creds(self, mock_get_keycloak_client):
mock_keycloak_client = Mock()
mock_keycloak_client.token.side_effect = KeycloakAuthenticationError()
mock_get_keycloak_client.return_value = mock_keycloak_client
with pytest.raises(fastapi.exceptions.HTTPException):
create_token_for(
username=self.test_username,
password=self.test_password,
expiration_time_in_seconds=conf.getint("api_auth", "jwt_cli_expiration_time"),
)
@conf_vars(
{
("api_auth", "jwt_expiration_time"): "10",
}
)
@patch("airflow.providers.keycloak.auth_manager.services.token.get_auth_manager")
@patch("airflow.providers.keycloak.auth_manager.services.token.KeycloakAuthManager.get_keycloak_client")
def test_create_token_client_credentials(self, mock_get_keycloak_client, mock_get_auth_manager):
test_client_id = "test_client"
test_client_secret = "test_secret"
test_access_token = "access_token"
mock_keycloak_client = Mock()
mock_keycloak_client.token.return_value = {
"access_token": test_access_token,
}
mock_keycloak_client.userinfo.return_value = {
"sub": "service-account-sub",
"preferred_username": "service-account-test_client",
}
mock_get_keycloak_client.return_value = mock_keycloak_client
mock_auth_manager = Mock()
mock_get_auth_manager.return_value = mock_auth_manager
mock_auth_manager.generate_jwt.return_value = self.token
result = create_client_credentials_token(client_id=test_client_id, client_secret=test_client_secret)
assert result == self.token
mock_get_keycloak_client.assert_called_once_with(
client_id=test_client_id, client_secret=test_client_secret
)
mock_keycloak_client.token.assert_called_once_with(grant_type="client_credentials")
mock_keycloak_client.userinfo.assert_called_once_with(test_access_token)
@conf_vars(
{
("api_auth", "jwt_expiration_time"): "10",
}
)
@patch("airflow.providers.keycloak.auth_manager.services.token.KeycloakAuthManager.get_keycloak_client")
def test_create_token_client_credentials_with_invalid_credentials(self, mock_get_keycloak_client):
test_client_id = "invalid_client"
test_client_secret = "invalid_secret"
mock_keycloak_client = Mock()
mock_keycloak_client.token.side_effect = KeycloakAuthenticationError()
mock_get_keycloak_client.return_value = mock_keycloak_client
with pytest.raises(fastapi.exceptions.HTTPException) as exc_info:
create_client_credentials_token(client_id=test_client_id, client_secret=test_client_secret)
assert exc_info.value.status_code == 403
assert "Client credentials authentication failed" in exc_info.value.detail
| {
"repo_id": "apache/airflow",
"file_path": "providers/keycloak/tests/unit/keycloak/auth_manager/services/test_token.py",
"license": "Apache License 2.0",
"lines": 117,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:scripts/ci/prek/check_contextmanager_class_decorators.py | #!/usr/bin/env python3
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Check for problematic context manager decorators on test classes.
Context managers (ContextDecorator, @contextlib.contextmanager) when used as class decorators
transform the class into a callable wrapper, which prevents pytest from collecting the class.
"""
from __future__ import annotations
import ast
import sys
from pathlib import Path
class ContextManagerClassDecoratorChecker(ast.NodeVisitor):
"""AST visitor to check for context manager decorators on test classes."""
def __init__(self, filename: str):
self.filename = filename
self.errors: list[str] = []
def visit_ClassDef(self, node: ast.ClassDef) -> None:
"""Check class definitions for problematic decorators."""
if not node.name.startswith("Test"):
self.generic_visit(node)
return
for decorator in node.decorator_list:
decorator_name = self._get_decorator_name(decorator)
if self._is_problematic_decorator(decorator_name):
self.errors.append(
f"{self.filename}:{node.lineno}: Class '{node.name}' uses @{decorator_name} "
f"decorator which prevents pytest collection. Use @pytest.mark.usefixtures instead."
)
self.generic_visit(node)
def _get_decorator_name(self, decorator: ast.expr) -> str:
"""Extract decorator name from AST node."""
if isinstance(decorator, ast.Name):
return decorator.id
if isinstance(decorator, ast.Call):
if isinstance(decorator.func, ast.Name):
return decorator.func.id
if isinstance(decorator.func, ast.Attribute):
return f"{self._get_attr_chain(decorator.func)}"
elif isinstance(decorator, ast.Attribute):
return f"{self._get_attr_chain(decorator)}"
return "unknown"
def _get_attr_chain(self, node: ast.Attribute) -> str:
"""Get the full attribute chain (e.g., 'contextlib.contextmanager')."""
if isinstance(node.value, ast.Name):
return f"{node.value.id}.{node.attr}"
if isinstance(node.value, ast.Attribute):
return f"{self._get_attr_chain(node.value)}.{node.attr}"
return node.attr
def _is_problematic_decorator(self, decorator_name: str) -> bool:
"""Check if decorator is known to break pytest class collection."""
problematic_decorators = {
"conf_vars",
"env_vars",
"contextlib.contextmanager",
"contextmanager",
}
return decorator_name in problematic_decorators
def check_file(filepath: Path) -> list[str]:
"""Check a single file for problematic decorators."""
try:
with open(filepath, encoding="utf-8") as f:
content = f.read()
tree = ast.parse(content, filename=str(filepath))
checker = ContextManagerClassDecoratorChecker(str(filepath))
checker.visit(tree)
return checker.errors
except Exception as e:
return [f"{filepath}: Error parsing file: {e}"]
def main() -> int:
"""Main entry point."""
if len(sys.argv) < 2:
print("Usage: check_contextmanager_class_decorators.py <file_or_directory>...")
return 1
all_errors = []
for arg in sys.argv[1:]:
path = Path(arg)
if path.is_file() and path.suffix == ".py":
if "test" in str(path): # Only check test files
all_errors.extend(check_file(path))
else:
print(f"Skipping non-test file: {path}")
elif path.is_dir():
for py_file in path.rglob("*.py"):
if "test" in str(py_file): # Only check test files
all_errors.extend(check_file(py_file))
if all_errors:
print("Found problematic context manager class decorators:")
for error in all_errors:
print(f" {error}")
return 1
return 0
if __name__ == "__main__":
sys.exit(main())
| {
"repo_id": "apache/airflow",
"file_path": "scripts/ci/prek/check_contextmanager_class_decorators.py",
"license": "Apache License 2.0",
"lines": 108,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:task-sdk/tests/task_sdk/execution_time/test_context_cache.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from unittest.mock import AsyncMock, MagicMock, call, patch
import pytest
from airflow.sdk.definitions.connection import Connection
from airflow.sdk.execution_time.cache import SecretCache
from airflow.sdk.execution_time.comms import ConnectionResult, VariableResult
from airflow.sdk.execution_time.context import (
_delete_variable,
_get_connection,
_get_variable,
_set_variable,
)
from airflow.sdk.execution_time.secrets import ExecutionAPISecretsBackend
from tests_common.test_utils.config import conf_vars
class TestConnectionCacheIntegration:
"""Test the integration of SecretCache with connection access."""
@staticmethod
@conf_vars({("secrets", "use_cache"): "true"})
def setup_method():
SecretCache.reset()
SecretCache.init()
@staticmethod
def teardown_method():
SecretCache.reset()
@patch("airflow.sdk.execution_time.supervisor.ensure_secrets_backend_loaded")
def test_get_connection_uses_cache_when_available(self, mock_ensure_backends):
"""Test that _get_connection uses cache when connection is cached."""
conn_id = "test_conn"
uri = "postgres://user:pass@host:5432/db"
SecretCache.save_connection_uri(conn_id, uri)
result = _get_connection(conn_id)
assert result.conn_id == conn_id
assert result.conn_type == "postgres"
assert result.host == "host"
assert result.login == "user"
assert result.password == "pass"
assert result.port == 5432
assert result.schema == "db"
mock_ensure_backends.assert_not_called()
@patch("airflow.sdk.execution_time.supervisor.ensure_secrets_backend_loaded")
def test_get_connection_from_backend_saves_to_cache(self, mock_ensure_backends):
"""Test that connection from secrets backend is retrieved correctly and cached."""
conn_id = "test_conn"
conn = Connection(conn_id=conn_id, conn_type="mysql", host="host", port=3306)
mock_backend = MagicMock(spec=["get_connection"])
mock_backend.get_connection.return_value = conn
mock_ensure_backends.return_value = [mock_backend]
result = _get_connection(conn_id)
assert result.conn_id == conn_id
assert result.conn_type == "mysql"
mock_backend.get_connection.assert_called_once_with(conn_id=conn_id)
cached_uri = SecretCache.get_connection_uri(conn_id)
cached_conn = Connection.from_uri(cached_uri, conn_id=conn_id)
assert cached_conn.conn_type == "mysql"
assert cached_conn.host == "host"
@patch("airflow.sdk.execution_time.supervisor.ensure_secrets_backend_loaded")
def test_get_connection_from_api(self, mock_ensure_backends, mock_supervisor_comms):
"""Test that connection from API server works correctly."""
conn_id = "test_conn"
conn_result = ConnectionResult(
conn_id=conn_id,
conn_type="mysql",
host="host",
port=3306,
login="user",
password="pass",
)
mock_ensure_backends.return_value = [ExecutionAPISecretsBackend()]
mock_supervisor_comms.send.return_value = conn_result
result = _get_connection(conn_id)
assert result.conn_id == conn_id
assert result.conn_type == "mysql"
# Called for GetConnection (and possibly MaskSecret)
assert mock_supervisor_comms.send.call_count >= 1
cached_uri = SecretCache.get_connection_uri(conn_id)
cached_conn = Connection.from_uri(cached_uri, conn_id=conn_id)
assert cached_conn.conn_type == "mysql"
assert cached_conn.host == "host"
@patch("airflow.sdk.execution_time.context.mask_secret")
def test_get_connection_masks_secrets(self, mock_mask_secret):
"""Test that connection secrets are masked from logs."""
conn_id = "test_conn"
conn = Connection(
conn_id=conn_id, conn_type="mysql", login="user", password="password", extra='{"key": "value"}'
)
mock_backend = MagicMock(spec=["get_connection"])
mock_backend.get_connection.return_value = conn
with patch(
"airflow.sdk.execution_time.supervisor.ensure_secrets_backend_loaded", return_value=[mock_backend]
):
result = _get_connection(conn_id)
assert result.conn_id == conn_id
# Check that password and extra were masked
mock_mask_secret.assert_has_calls(
[
call("password"),
call('{"key": "value"}'),
],
any_order=True,
)
class TestVariableCacheIntegration:
"""Test the integration of SecretCache with variable access."""
@staticmethod
@conf_vars({("secrets", "use_cache"): "true"})
def setup_method():
SecretCache.reset()
SecretCache.init()
@staticmethod
def teardown_method():
SecretCache.reset()
@patch("airflow.sdk.execution_time.supervisor.ensure_secrets_backend_loaded")
def test_get_variable_uses_cache_when_available(self, mock_ensure_backends):
"""Test that _get_variable uses cache when variable is cached."""
key = "test_key"
value = "test_value"
SecretCache.save_variable(key, value)
result = _get_variable(key, deserialize_json=False)
assert result == value
mock_ensure_backends.assert_not_called()
@patch("airflow.sdk.execution_time.supervisor.ensure_secrets_backend_loaded")
def test_get_variable_from_backend_saves_to_cache(self, mock_ensure_backends):
"""Test that variable from secrets backend is saved to cache."""
key = "test_key"
value = "test_value"
mock_backend = MagicMock(spec=["get_variable"])
mock_backend.get_variable.return_value = value
mock_ensure_backends.return_value = [mock_backend]
result = _get_variable(key, deserialize_json=False)
assert result == value
mock_backend.get_variable.assert_called_once_with(key=key)
cached_value = SecretCache.get_variable(key)
assert cached_value == value
@patch("airflow.sdk.execution_time.supervisor.ensure_secrets_backend_loaded")
def test_get_variable_from_api_saves_to_cache(self, mock_ensure_backends, mock_supervisor_comms):
"""Test that variable from API server is saved to cache."""
key = "test_key"
value = "test_value"
var_result = VariableResult(key=key, value=value)
mock_ensure_backends.return_value = [ExecutionAPISecretsBackend()]
mock_supervisor_comms.send.return_value = var_result
result = _get_variable(key, deserialize_json=False)
assert result == value
cached_value = SecretCache.get_variable(key)
assert cached_value == value
@patch("airflow.sdk.execution_time.supervisor.ensure_secrets_backend_loaded")
def test_get_variable_with_json_deserialization(self, mock_ensure_backends):
"""Test that _get_variable handles JSON deserialization correctly with cache."""
key = "test_key"
json_value = '{"key": "value", "number": 42}'
SecretCache.save_variable(key, json_value)
result = _get_variable(key, deserialize_json=True)
assert result == {"key": "value", "number": 42}
cached_value = SecretCache.get_variable(key)
assert cached_value == json_value
def test_set_variable_invalidates_cache(self, mock_supervisor_comms):
"""Test that _set_variable invalidates the cache."""
key = "test_key"
old_value = "old_value"
new_value = "new_value"
SecretCache.save_variable(key, old_value)
_set_variable(key, new_value)
mock_supervisor_comms.send.assert_called_once()
with pytest.raises(SecretCache.NotPresentException):
SecretCache.get_variable(key)
def test_delete_variable_invalidates_cache(self, mock_supervisor_comms):
"""Test that _delete_variable invalidates the cache."""
key = "test_key"
value = "test_value"
SecretCache.save_variable(key, value)
from airflow.sdk.execution_time.comms import OKResponse
mock_supervisor_comms.send.return_value = OKResponse(ok=True)
_delete_variable(key)
mock_supervisor_comms.send.assert_called_once()
with pytest.raises(SecretCache.NotPresentException):
SecretCache.get_variable(key)
class TestAsyncConnectionCache:
"""Test the integration of SecretCache with async connection access."""
@staticmethod
@conf_vars({("secrets", "use_cache"): "true"})
def setup_method():
SecretCache.reset()
SecretCache.init()
@staticmethod
def teardown_method():
SecretCache.reset()
@pytest.mark.asyncio
async def test_async_get_connection_uses_cache(self):
"""Test that _async_get_connection uses cache when connection is cached."""
from airflow.sdk.execution_time.context import _async_get_connection
conn_id = "test_conn"
uri = "postgres://user:pass@host:5432/db"
SecretCache.save_connection_uri(conn_id, uri)
result = await _async_get_connection(conn_id)
assert result.conn_id == conn_id
assert result.conn_type == "postgres"
assert result.host == "host"
assert result.login == "user"
assert result.password == "pass"
assert result.port == 5432
assert result.schema == "db"
@pytest.mark.asyncio
async def test_async_get_connection_from_api(self, mock_supervisor_comms):
"""Test that async connection from API server works correctly."""
from airflow.sdk.execution_time.context import _async_get_connection
conn_id = "test_conn"
conn_result = ConnectionResult(
conn_id=conn_id,
conn_type="mysql",
host="host",
port=3306,
)
# Configure asend to return the conn_result when awaited
mock_supervisor_comms.asend = AsyncMock(return_value=conn_result)
result = await _async_get_connection(conn_id)
assert result.conn_id == conn_id
assert result.conn_type == "mysql"
mock_supervisor_comms.asend.assert_called_once()
cached_uri = SecretCache.get_connection_uri(conn_id)
cached_conn = Connection.from_uri(cached_uri, conn_id=conn_id)
assert cached_conn.conn_type == "mysql"
assert cached_conn.host == "host"
class TestCacheDisabled:
"""Test behavior when cache is disabled."""
@staticmethod
@conf_vars({("secrets", "use_cache"): "false"})
def setup_method():
SecretCache.reset()
SecretCache.init()
@staticmethod
def teardown_method():
SecretCache.reset()
@patch("airflow.sdk.execution_time.supervisor.ensure_secrets_backend_loaded")
def test_get_connection_no_cache_when_disabled(self, mock_ensure_backends, mock_supervisor_comms):
"""Test that cache is not used when disabled."""
conn_id = "test_conn"
conn_result = ConnectionResult(conn_id=conn_id, conn_type="mysql", host="host")
mock_ensure_backends.return_value = [ExecutionAPISecretsBackend()]
mock_supervisor_comms.send.return_value = conn_result
result = _get_connection(conn_id)
assert result.conn_id == conn_id
# Called for GetConnection (and possibly MaskSecret)
assert mock_supervisor_comms.send.call_count >= 1
_get_connection(conn_id)
# Called twice since cache is disabled
assert mock_supervisor_comms.send.call_count >= 2
| {
"repo_id": "apache/airflow",
"file_path": "task-sdk/tests/task_sdk/execution_time/test_context_cache.py",
"license": "Apache License 2.0",
"lines": 261,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-core/tests/unit/models/test_team.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import pytest
from airflow.models.team import Team
class TestTeam:
"""Unit tests for Team model class methods."""
@pytest.mark.db_test
def test_get_name_if_exists_returns_name(self, testing_team):
assert Team.get_name_if_exists("testing") == "testing"
@pytest.mark.db_test
def test_get_name_if_exists_returns_none(self):
assert Team.get_name_if_exists("nonexistent") is None
@pytest.mark.db_test
def test_get_all_team_names_with_teams(self, testing_team):
result = Team.get_all_team_names()
assert result == {"testing"}
assert isinstance(result, set)
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/tests/unit/models/test_team.py",
"license": "Apache License 2.0",
"lines": 32,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-ctl/docs/images/diagrams/airflowctl_api_network_architecture_diagram.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from pathlib import Path
from diagrams import Cluster, Diagram, Edge
from diagrams.onprem.client import User
from diagrams.onprem.compute import Server
from rich.console import Console
MY_DIR = Path(__file__).parent
MY_FILENAME = Path(__file__).with_suffix("").name
console = Console(width=400, color_system="standard")
graph_attr = {
"concentrate": "false",
"splines": "splines",
}
edge_attr = {
"minlen": "1",
}
def generate_airflowctl_api_network_diagram():
image_file = (MY_DIR / MY_FILENAME).with_suffix(".png")
console.print(f"[bright_blue]Generating network diagram {image_file}")
with Diagram(
name="airflowctl<->API Network Diagram",
show=False,
direction="LR",
filename=MY_FILENAME,
edge_attr=edge_attr,
graph_attr=graph_attr,
):
# Machine network with client
with Cluster("Machine Network", graph_attr={"margin": "30", "width": "10"}):
client = User("Client\n(The machine/host has the airflowctl installed)")
# Airflow deployment network with API server
with Cluster("Apache Airflow Deployment Network", graph_attr={"margin": "30"}):
api_server = Server("Apache Airflow API Server\n(e.g. DNS: https://airflow.internal.api.com)")
# Edges representing the flows
(
client
>> Edge(
color="blue",
style="solid",
label="Login Request\n(if not manually used in --api-token or env var. Authentication done with username/password)",
)
>> api_server
)
(
api_server
>> Edge(
color="darkgreen",
style="solid",
label="Returns Token",
)
>> client
)
console.print(f"[green]Generated network diagram {image_file}")
if __name__ == "__main__":
generate_airflowctl_api_network_diagram()
| {
"repo_id": "apache/airflow",
"file_path": "airflow-ctl/docs/images/diagrams/airflowctl_api_network_architecture_diagram.py",
"license": "Apache License 2.0",
"lines": 71,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:shared/logging/src/airflow_shared/logging/_config.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import structlog.processors
OLD_DEFAULT_LOG_FORMAT = "[%(asctime)s] {%(filename)s:%(lineno)d} %(levelname)s - %(message)s"
OLD_DEFAULT_COLOR_LOG_FORMAT = (
"[%(blue)s%(asctime)s%(reset)s] {%(blue)s%(filename)s:%(reset)s%(lineno)d} "
"%(log_color)s%(levelname)s%(reset)s - %(log_color)s%(message)s%(reset)s"
)
# This doesn't load the values from config, to avoid a cross dependency between shared logging and shared
# config modules.
def translate_config_values(
log_format: str, callsite_params: list[str]
) -> tuple[str, tuple[structlog.processors.CallsiteParameter, ...]]:
if log_format == OLD_DEFAULT_LOG_FORMAT:
# It's the default, use the coloured version by default. This will automatically not put color codes
# if we're not a tty, or if colors are disabled
log_format = OLD_DEFAULT_COLOR_LOG_FORMAT
# This will raise an exception if the value isn't valid
params_out = tuple(
getattr(structlog.processors.CallsiteParameter, p, None) or structlog.processors.CallsiteParameter(p)
for p in filter(None, callsite_params)
)
return log_format, params_out
| {
"repo_id": "apache/airflow",
"file_path": "shared/logging/src/airflow_shared/logging/_config.py",
"license": "Apache License 2.0",
"lines": 39,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:airflow-e2e-tests/tests/airflow_e2e_tests/basic_tests/test_basic_dag_operations.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from datetime import datetime, timezone
from airflow_e2e_tests.e2e_test_utils.clients import AirflowClient, TaskSDKClient
class TestBasicDagFunctionality:
"""Test basic DAG functionality using the Airflow REST API."""
airflow_client = AirflowClient()
def test_dag_unpause(self):
self.airflow_client.un_pause_dag(
"example_xcom_test",
)
def test_xcom_value(self):
resp = self.airflow_client.trigger_dag(
"example_xcom_test", json={"logical_date": datetime.now(timezone.utc).isoformat()}
)
self.airflow_client.wait_for_dag_run(
dag_id="example_xcom_test",
run_id=resp["dag_run_id"],
)
xcom_value_resp = self.airflow_client.get_xcom_value(
dag_id="example_xcom_test",
task_id="bash_push",
key="manually_pushed_value",
run_id=resp["dag_run_id"],
)
assert xcom_value_resp["value"] == "manually_pushed_value", xcom_value_resp
class TestTaskSDKBasicFunctionality:
"""Test basic functionality of Task SDK using the Task SDK REST API."""
task_sdk_client = TaskSDKClient()
def test_task_sdk_health_check(self):
response = self.task_sdk_client.health_check()
assert response.status_code == 200
| {
"repo_id": "apache/airflow",
"file_path": "airflow-e2e-tests/tests/airflow_e2e_tests/basic_tests/test_basic_dag_operations.py",
"license": "Apache License 2.0",
"lines": 47,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-e2e-tests/tests/airflow_e2e_tests/basic_tests/test_example_dags.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import pytest
from airflow_e2e_tests.e2e_test_utils.clients import AirflowClient
DAG_IDS = [
"example_bash_decorator",
"example_bash_operator",
"example_branch_datetime_operator",
"example_branch_datetime_operator_2",
"example_branch_datetime_operator_3",
"example_branch_dop_operator_v3",
"example_branch_labels",
"example_branch_operator",
"example_branch_python_operator_decorator",
"example_complex",
"example_custom_weight",
"example_dag_decorator",
"example_dynamic_task_mapping",
"example_dynamic_task_mapping_with_no_taskflow_operators",
"example_external_task_marker_parent",
"example_nested_branch_dag",
"example_sensor_decorator",
"example_setup_teardown",
"example_setup_teardown_taskflow",
"example_short_circuit_decorator",
"example_short_circuit_operator",
"example_simplest_dag",
"example_skip_dag",
"example_task_group",
"example_task_group_decorator",
"example_task_mapping_second_order",
"example_time_delta_sensor_async",
"example_trigger_controller_dag",
"example_trigger_target_dag",
"example_weekday_branch_operator",
"example_workday_timetable",
"example_xcom",
"example_xcom_args",
"example_xcom_args_with_operators",
"latest_only",
"latest_only_with_trigger",
"tutorial",
"tutorial_dag",
"tutorial_taskflow_api",
"tutorial_taskflow_api_virtualenv",
"tutorial_taskflow_templates",
]
class TestExampleDags:
"""Test Airflow Core example dags."""
airflow_client = AirflowClient()
@pytest.mark.parametrize(
"dag_id",
DAG_IDS,
ids=[dag_id for dag_id in DAG_IDS],
)
def test_example_dags(self, dag_id):
"""Test that DAGs can be triggered and complete successfully."""
state = self.airflow_client.trigger_dag_and_wait(dag_id)
assert state == "success", f"DAG {dag_id} did not complete successfully. Final state: {state}"
| {
"repo_id": "apache/airflow",
"file_path": "airflow-e2e-tests/tests/airflow_e2e_tests/basic_tests/test_example_dags.py",
"license": "Apache License 2.0",
"lines": 74,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-e2e-tests/tests/airflow_e2e_tests/constants.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import os
from pathlib import Path
AIRFLOW_ROOT_PATH = Path(__file__).resolve().parents[3]
DOCKER_COMPOSE_HOST_PORT = os.environ.get("HOST_PORT", "localhost:8080")
DEFAULT_PYTHON_MAJOR_MINOR_VERSION = "3.10"
DEFAULT_DOCKER_IMAGE = f"ghcr.io/apache/airflow/main/prod/python{DEFAULT_PYTHON_MAJOR_MINOR_VERSION}:latest"
DOCKER_IMAGE = os.environ.get("DOCKER_IMAGE") or DEFAULT_DOCKER_IMAGE
os.environ["AIRFLOW_UID"] = str(os.getuid())
DOCKER_COMPOSE_PATH = (
AIRFLOW_ROOT_PATH / "airflow-core" / "docs" / "howto" / "docker-compose" / "docker-compose.yaml"
)
AIRFLOW_WWW_USER_USERNAME = os.environ.get("_AIRFLOW_WWW_USER_USERNAME", "airflow")
AIRFLOW_WWW_USER_PASSWORD = os.environ.get("_AIRFLOW_WWW_USER_PASSWORD", "airflow")
E2E_DAGS_FOLDER = AIRFLOW_ROOT_PATH / "airflow-e2e-tests" / "tests" / "airflow_e2e_tests" / "dags"
# The logs folder where the Airflow logs will be copied to and uploaded to github artifacts
LOGS_FOLDER = AIRFLOW_ROOT_PATH / "airflow-e2e-tests" / "logs"
TEST_REPORT_FILE = AIRFLOW_ROOT_PATH / "airflow-e2e-tests" / "_e2e_test_report.json"
LOCALSTACK_PATH = AIRFLOW_ROOT_PATH / "airflow-e2e-tests" / "docker" / "localstack.yml"
E2E_TEST_MODE = os.environ.get("E2E_TEST_MODE", "basic")
AWS_INIT_PATH = AIRFLOW_ROOT_PATH / "airflow-e2e-tests" / "scripts" / "init-aws.sh"
| {
"repo_id": "apache/airflow",
"file_path": "airflow-e2e-tests/tests/airflow_e2e_tests/constants.py",
"license": "Apache License 2.0",
"lines": 37,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-e2e-tests/tests/airflow_e2e_tests/dags/example_xcom_test.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Example DAG demonstrating the usage of XComs."""
from __future__ import annotations
from airflow.models.xcom_arg import XComArg
from airflow.providers.standard.operators.bash import BashOperator
from airflow.sdk import DAG, task
value_1 = [1, 2, 3]
value_2 = {"a": "b"}
@task
def push(ti=None):
"""Pushes an XCom without a specific target."""
ti.xcom_push(key="value from pusher 1", value=value_1)
@task
def push_by_returning():
"""Pushes an XCom without a specific target, just by returning it."""
return value_2
def _compare_values(pulled_value, check_value):
if pulled_value != check_value:
raise ValueError(f"The two values differ {pulled_value} and {check_value}")
@task
def puller(pulled_value_2, ti=None):
"""Pull all previously pushed XComs and check if the pushed values match the pulled values."""
pulled_value_1 = ti.xcom_pull(task_ids="push", key="value from pusher 1")
_compare_values(pulled_value_1, value_1)
_compare_values(pulled_value_2, value_2)
@task
def pull_value_from_bash_push(ti=None):
bash_pushed_via_return_value = ti.xcom_pull(key="return_value", task_ids="bash_push")
bash_manually_pushed_value = ti.xcom_pull(key="manually_pushed_value", task_ids="bash_push")
print(f"The xcom value pushed by task push via return value is {bash_pushed_via_return_value}")
print(f"The xcom value pushed by task push manually is {bash_manually_pushed_value}")
with DAG(
"example_xcom_test",
schedule=None,
catchup=False,
tags=["example"],
) as dag:
bash_push = BashOperator(
task_id="bash_push",
bash_command='echo "bash_push demo" && '
'echo "Manually set xcom value '
'{{ ti.xcom_push(key="manually_pushed_value", value="manually_pushed_value") }}" && '
'echo "value_by_return"',
)
bash_pull = BashOperator(
task_id="bash_pull",
bash_command='echo "bash pull demo" && '
f'echo "The xcom pushed manually is {XComArg(bash_push, key="manually_pushed_value")}" && '
f'echo "The returned_value xcom is {XComArg(bash_push)}" && '
'echo "finished"',
do_xcom_push=False,
)
python_pull_from_bash = pull_value_from_bash_push()
[bash_pull, python_pull_from_bash] << bash_push
puller(push_by_returning()) << push()
| {
"repo_id": "apache/airflow",
"file_path": "airflow-e2e-tests/tests/airflow_e2e_tests/dags/example_xcom_test.py",
"license": "Apache License 2.0",
"lines": 70,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-e2e-tests/tests/airflow_e2e_tests/e2e_test_utils/clients.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import time
from datetime import datetime, timezone
from functools import cached_property
import requests
from requests.adapters import HTTPAdapter
from urllib3.util.retry import Retry
from airflow_e2e_tests.constants import (
AIRFLOW_WWW_USER_PASSWORD,
AIRFLOW_WWW_USER_USERNAME,
DOCKER_COMPOSE_HOST_PORT,
)
class AirflowClient:
"""Client for interacting with the Airflow REST API."""
def __init__(self):
self.session = requests.Session()
@cached_property
def token(self):
Retry.DEFAULT_BACKOFF_MAX = 32
retry = Retry(total=10, backoff_factor=1, status_forcelist=[429, 500, 502, 503, 504])
session = requests.Session()
session.mount("http://", HTTPAdapter(max_retries=retry))
session.mount("https://", HTTPAdapter(max_retries=retry))
api_server_url = DOCKER_COMPOSE_HOST_PORT
if not api_server_url.startswith(("http://", "https://")):
api_server_url = "http://" + DOCKER_COMPOSE_HOST_PORT
url = f"{api_server_url}/auth/token"
login_response = session.post(
url,
json={"username": AIRFLOW_WWW_USER_USERNAME, "password": AIRFLOW_WWW_USER_PASSWORD},
)
access_token = login_response.json().get("access_token")
assert access_token, (
f"Failed to get JWT token from redirect url {url} with status code {login_response}"
)
return access_token
def _make_request(
self,
method: str,
endpoint: str,
base_url: str = f"http://{DOCKER_COMPOSE_HOST_PORT}/api/v2",
**kwargs,
):
response = requests.request(
method=method,
url=f"{base_url}/{endpoint}",
headers={"Authorization": f"Bearer {self.token}", "Content-Type": "application/json"},
**kwargs,
)
response.raise_for_status()
return response.json()
def un_pause_dag(self, dag_id: str):
return self._make_request(
method="PATCH",
endpoint=f"dags/{dag_id}",
json={"is_paused": False},
)
def trigger_dag(self, dag_id: str, json=None):
if json is None:
json = {}
return self._make_request(method="POST", endpoint=f"dags/{dag_id}/dagRuns", json=json)
def wait_for_dag_run(self, dag_id: str, run_id: str, timeout=300, check_interval=5):
start_time = time.time()
while time.time() - start_time < timeout:
response = self._make_request(
method="GET",
endpoint=f"dags/{dag_id}/dagRuns/{run_id}",
)
state = response.get("state")
if state in {"success", "failed"}:
return state
time.sleep(check_interval)
raise TimeoutError(f"DAG run {run_id} for DAG {dag_id} did not complete within {timeout} seconds.")
def get_xcom_value(self, dag_id: str, task_id: str, run_id: str, key: str, map_index=-1):
return self._make_request(
method="GET",
endpoint=f"dags/{dag_id}/dagRuns/{run_id}/taskInstances/{task_id}/xcomEntries/{key}?map_index={map_index}",
)
def trigger_dag_and_wait(self, dag_id: str, json=None):
"""Trigger a DAG and wait for it to complete."""
self.un_pause_dag(dag_id)
resp = self.trigger_dag(dag_id, json=json or {"logical_date": datetime.now(timezone.utc).isoformat()})
# Wait for the DAG run to complete
return self.wait_for_dag_run(
dag_id=dag_id,
run_id=resp["dag_run_id"],
)
def get_task_logs(self, dag_id: str, run_id: str, task_id: str, try_number: int = 1):
"""Get task logs via API."""
return self._make_request(
method="GET",
endpoint=f"dags/{dag_id}/dagRuns/{run_id}/taskInstances/{task_id}/logs/{try_number}",
)
class TaskSDKClient:
"""Client for interacting with the Task SDK API."""
def __init__(self):
pass
@cached_property
def client(self):
from airflow.sdk.api.client import Client
client = Client(base_url=f"http://{DOCKER_COMPOSE_HOST_PORT}/execution", token="not-a-token")
return client
def health_check(self):
response = self.client.get("health/ping", headers={"Airflow-API-Version": "2025-08-10"})
return response
| {
"repo_id": "apache/airflow",
"file_path": "airflow-e2e-tests/tests/airflow_e2e_tests/e2e_test_utils/clients.py",
"license": "Apache License 2.0",
"lines": 121,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-core/src/airflow/cli/commands/team_command.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Teams sub-commands."""
from __future__ import annotations
import re
from sqlalchemy import func, select
from sqlalchemy.exc import IntegrityError
from airflow.cli.simple_table import AirflowConsole
from airflow.models.connection import Connection
from airflow.models.pool import Pool
from airflow.models.team import Team, dag_bundle_team_association_table
from airflow.models.variable import Variable
from airflow.utils import cli as cli_utils
from airflow.utils.providers_configuration_loader import providers_configuration_loaded
from airflow.utils.session import NEW_SESSION, provide_session
NO_TEAMS_LIST_MSG = "No teams found."
def _show_teams(teams, output):
"""Display teams in the specified output format."""
AirflowConsole().print_as(
data=teams,
output=output,
mapper=lambda x: {
"name": x.name,
},
)
def _extract_team_name(args):
"""Extract and validate team name from args."""
team_name = args.name.strip()
if not team_name:
raise SystemExit("Team name cannot be empty")
if not re.match(r"^[a-zA-Z0-9_-]{3,50}$", team_name):
raise SystemExit("Invalid team name: must match regex ^[a-zA-Z0-9_-]{3,50}$")
return team_name
@cli_utils.action_cli
@providers_configuration_loaded
@provide_session
def team_create(args, session=NEW_SESSION):
"""Create a new team. Team names must be 3-50 characters long and contain only alphanumeric characters, hyphens, and underscores."""
team_name = _extract_team_name(args)
# Check if team with this name already exists
if session.scalar(select(Team).where(Team.name == team_name)):
raise SystemExit(f"Team with name '{team_name}' already exists")
# Create new team (UUID will be auto-generated by the database)
new_team = Team(name=team_name)
try:
session.add(new_team)
session.commit()
print(f"Team '{team_name}' created successfully.")
except IntegrityError as e:
session.rollback()
raise SystemExit(f"Failed to create team '{team_name}': {e}")
@cli_utils.action_cli
@providers_configuration_loaded
@provide_session
def team_delete(args, session=NEW_SESSION):
"""Delete a team after checking for associations."""
team_name = _extract_team_name(args)
# Find the team
team = session.scalar(select(Team).where(Team.name == team_name))
if not team:
raise SystemExit(f"Team '{team_name}' does not exist")
# Check for associations
associations = []
# Check DAG bundle associations
dag_bundle_count = session.scalar(
select(func.count())
.select_from(dag_bundle_team_association_table)
.where(dag_bundle_team_association_table.c.team_name == team.name)
)
if dag_bundle_count:
associations.append(f"{dag_bundle_count} DAG bundle(s)")
# Check connection associations
if connection_count := session.scalar(
select(func.count(Connection.id)).where(Connection.team_name == team.name)
):
associations.append(f"{connection_count} connection(s)")
# Check variable associations
if variable_count := session.scalar(
select(func.count(Variable.id)).where(Variable.team_name == team.name)
):
associations.append(f"{variable_count} variable(s)")
# Check pool associations
if pool_count := session.scalar(select(func.count(Pool.id)).where(Pool.team_name == team.name)):
associations.append(f"{pool_count} pool(s)")
# If there are associations, prevent deletion
if associations:
association_list = ", ".join(associations)
raise SystemExit(
f"Cannot delete team '{team_name}' because it is associated with: {association_list}. "
f"Please remove these associations first."
)
# Confirm deletion if not using --yes flag
if not args.yes:
confirmation = input(f"Are you sure you want to delete team '{team_name}'? (y/N): ")
if confirmation.upper() != "Y":
print("Team deletion cancelled")
return
# Delete the team
try:
session.delete(team)
session.commit()
print(f"Team '{team_name}' deleted successfully")
except Exception as e:
session.rollback()
raise SystemExit(f"Failed to delete team '{team_name}': {e}")
@cli_utils.action_cli
@providers_configuration_loaded
@provide_session
def team_list(args, session=NEW_SESSION):
"""List all teams."""
teams = session.scalars(select(Team).order_by(Team.name)).all()
if not teams:
print(NO_TEAMS_LIST_MSG)
else:
_show_teams(teams=teams, output=args.output)
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/cli/commands/team_command.py",
"license": "Apache License 2.0",
"lines": 130,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:airflow-core/tests/unit/cli/commands/test_team_command.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from unittest.mock import patch
import pytest
from sqlalchemy import select
from airflow import models, settings
from airflow.cli import cli_parser
from airflow.cli.commands import team_command
from airflow.models import Connection, Pool, Variable
from airflow.models.dagbundle import DagBundleModel
from airflow.models.team import Team, dag_bundle_team_association_table
from airflow.settings import Session
from tests_common.test_utils.db import (
clear_db_connections,
clear_db_dag_bundles,
clear_db_pools,
clear_db_teams,
clear_db_variables,
)
pytestmark = pytest.mark.db_test
class TestCliTeams:
@classmethod
def _cleanup(cls):
clear_db_connections(add_default_connections_back=False)
clear_db_variables()
clear_db_pools()
clear_db_dag_bundles()
clear_db_teams()
@classmethod
def setup_class(cls):
cls.dagbag = models.DagBag(include_examples=True)
cls.parser = cli_parser.get_parser()
settings.configure_orm()
cls.session = Session
cls._cleanup()
def teardown_method(self):
"""Called after each test method."""
self._cleanup()
def test_team_create_success(self, stdout_capture):
"""Test successful team creation."""
with stdout_capture as stdout:
team_command.team_create(self.parser.parse_args(["teams", "create", "test-team"]))
# Verify team was created in database
team = self.session.scalar(select(Team).where(Team.name == "test-team"))
assert team is not None
assert team.name == "test-team"
# Verify output message
output = stdout.getvalue()
assert "Team 'test-team' created successfully" in output
assert str(team.name) in output
def test_team_create_empty_name(self):
"""Test team creation with empty name."""
with pytest.raises(SystemExit, match="Team name cannot be empty"):
team_command.team_create(self.parser.parse_args(["teams", "create", ""]))
def test_team_create_invalid_name(self):
with pytest.raises(SystemExit, match="Invalid team name"):
team_command.team_create(self.parser.parse_args(["teams", "create", "test with space"]))
def test_team_create_whitespace_name(self):
"""Test team creation with whitespace-only name."""
with pytest.raises(SystemExit, match="Team name cannot be empty"):
team_command.team_create(self.parser.parse_args(["teams", "create", " "]))
def test_team_create_duplicate_name(self):
"""Test team creation with duplicate name."""
# Create first team
team_command.team_create(self.parser.parse_args(["teams", "create", "duplicate-team"]))
# Try to create team with same name
with pytest.raises(SystemExit, match="Team with name 'duplicate-team' already exists"):
team_command.team_create(self.parser.parse_args(["teams", "create", "duplicate-team"]))
def test_team_list_empty(self, stdout_capture):
"""Test listing teams when none exist."""
with stdout_capture as stdout:
team_command.team_list(self.parser.parse_args(["teams", "list"]))
# Should not error, just show empty result
output = stdout.getvalue()
# The exact output format depends on the AirflowConsole implementation
# but it should not contain any team names
assert team_command.NO_TEAMS_LIST_MSG in output
def test_team_list_with_teams(self, stdout_capture):
"""Test listing teams when teams exist."""
# Create test teams
team_command.team_create(self.parser.parse_args(["teams", "create", "team-alpha"]))
team_command.team_create(self.parser.parse_args(["teams", "create", "team-beta"]))
with stdout_capture as stdout:
team_command.team_list(self.parser.parse_args(["teams", "list"]))
output = stdout.getvalue()
assert "team-alpha" in output
assert "team-beta" in output
def test_team_list_with_output_format(self):
"""Test listing teams with different output formats."""
# Create a test team
team_command.team_create(self.parser.parse_args(["teams", "create", "format-test"]))
# Test different output formats
team_command.team_list(self.parser.parse_args(["teams", "list", "--output", "json"]))
team_command.team_list(self.parser.parse_args(["teams", "list", "--output", "yaml"]))
team_command.team_list(self.parser.parse_args(["teams", "list", "--output", "plain"]))
def test_team_delete_success(self, stdout_capture):
"""Test successful team deletion."""
# Create team first
team_command.team_create(self.parser.parse_args(["teams", "create", "delete-me"]))
# Verify team exists
team = self.session.scalar(select(Team).where(Team.name == "delete-me"))
assert team is not None
# Delete team with --yes flag
with stdout_capture as stdout:
team_command.team_delete(self.parser.parse_args(["teams", "delete", "delete-me", "--yes"]))
# Verify team was deleted
team = self.session.scalar(select(Team).where(Team.name == "delete-me"))
assert team is None
# Verify output message
output = stdout.getvalue()
assert "Team 'delete-me' deleted successfully" in output
def test_team_delete_nonexistent(self):
"""Test deleting a team that doesn't exist."""
with pytest.raises(SystemExit, match="Team 'nonexistent' does not exist"):
team_command.team_delete(self.parser.parse_args(["teams", "delete", "nonexistent", "--yes"]))
def test_team_delete_empty_name(self):
"""Test deleting team with empty name."""
with pytest.raises(SystemExit, match="Team name cannot be empty"):
team_command.team_delete(self.parser.parse_args(["teams", "delete", "", "--yes"]))
def test_team_delete_with_dag_bundle_association(self):
"""Test deleting team that has DAG bundle associations."""
# Create team
team_command.team_create(self.parser.parse_args(["teams", "create", "bundle-team"]))
team = self.session.scalar(select(Team).where(Team.name == "bundle-team"))
# Create a DAG bundle first
dag_bundle = DagBundleModel(name="test-bundle")
self.session.add(dag_bundle)
self.session.commit()
# Create a DAG bundle association
self.session.execute(
dag_bundle_team_association_table.insert().values(
dag_bundle_name="test-bundle", team_name=team.name
)
)
self.session.commit()
# Try to delete team
with pytest.raises(
SystemExit,
match="Cannot delete team 'bundle-team' because it is associated with: 1 DAG bundle\\(s\\)",
):
team_command.team_delete(self.parser.parse_args(["teams", "delete", "bundle-team", "--yes"]))
def test_team_delete_with_connection_association(self):
"""Test deleting team that has connection associations."""
# Create team
team_command.team_create(self.parser.parse_args(["teams", "create", "conn-team"]))
team = self.session.scalar(select(Team).where(Team.name == "conn-team"))
# Create connection associated with team
conn = Connection(conn_id="test-conn", conn_type="http", team_name=team.name)
self.session.add(conn)
self.session.commit()
# Try to delete team
with pytest.raises(
SystemExit,
match="Cannot delete team 'conn-team' because it is associated with: 1 connection\\(s\\)",
):
team_command.team_delete(self.parser.parse_args(["teams", "delete", "conn-team", "--yes"]))
def test_team_delete_with_variable_association(self):
"""Test deleting team that has variable associations."""
# Create team
team_command.team_create(self.parser.parse_args(["teams", "create", "var-team"]))
team = self.session.scalar(select(Team).where(Team.name == "var-team"))
# Create variable associated with team
var = Variable(key="test-var", val="test-value", team_name=team.name)
self.session.add(var)
self.session.commit()
# Try to delete team
with pytest.raises(
SystemExit, match="Cannot delete team 'var-team' because it is associated with: 1 variable\\(s\\)"
):
team_command.team_delete(self.parser.parse_args(["teams", "delete", "var-team", "--yes"]))
def test_team_delete_with_pool_association(self):
"""Test deleting team that has pool associations."""
# Create team
team_command.team_create(self.parser.parse_args(["teams", "create", "pool-team"]))
team = self.session.scalar(select(Team).where(Team.name == "pool-team"))
# Create pool associated with team
pool = Pool(
pool="test-pool", slots=5, description="Test pool", include_deferred=False, team_name=team.name
)
self.session.add(pool)
self.session.commit()
# Try to delete team
with pytest.raises(
SystemExit, match="Cannot delete team 'pool-team' because it is associated with: 1 pool\\(s\\)"
):
team_command.team_delete(self.parser.parse_args(["teams", "delete", "pool-team", "--yes"]))
def test_team_delete_with_multiple_associations(self):
"""Test deleting team that has multiple types of associations."""
# Create team
team_command.team_create(self.parser.parse_args(["teams", "create", "multi-team"]))
team = self.session.scalar(select(Team).where(Team.name == "multi-team"))
# Create a DAG bundle first
dag_bundle = DagBundleModel(name="multi-bundle")
self.session.add(dag_bundle)
self.session.commit()
# Create multiple associations
conn = Connection(conn_id="multi-conn", conn_type="http", team_name=team.name)
var = Variable(key="multi-var", val="value", team_name=team.name)
pool = Pool(
pool="multi-pool", slots=3, description="Multi pool", include_deferred=False, team_name=team.name
)
self.session.add_all([conn, var, pool])
self.session.execute(
dag_bundle_team_association_table.insert().values(
dag_bundle_name="multi-bundle", team_name=team.name
)
)
self.session.commit()
# Try to delete team
with pytest.raises(SystemExit) as exc_info:
team_command.team_delete(self.parser.parse_args(["teams", "delete", "multi-team", "--yes"]))
error_msg = str(exc_info.value)
assert "Cannot delete team 'multi-team' because it is associated with:" in error_msg
assert "1 DAG bundle(s)" in error_msg
assert "1 connection(s)" in error_msg
assert "1 variable(s)" in error_msg
assert "1 pool(s)" in error_msg
@patch("builtins.input", return_value="Y")
def test_team_delete_with_confirmation_yes(self, mock_input, stdout_capture):
"""Test team deletion with user confirmation (Yes)."""
# Create team
team_command.team_create(self.parser.parse_args(["teams", "create", "confirm-yes"]))
# Delete without --yes flag (should prompt for confirmation)
with stdout_capture as stdout:
team_command.team_delete(self.parser.parse_args(["teams", "delete", "confirm-yes"]))
# Verify team was deleted
team = self.session.scalar(select(Team).where(Team.name == "confirm-yes"))
assert team is None
output = stdout.getvalue()
assert "Team 'confirm-yes' deleted successfully" in output
@patch("builtins.input", return_value="N")
def test_team_delete_with_confirmation_no(self, mock_input, stdout_capture):
"""Test team deletion with user confirmation (No)."""
# Create team
team_command.team_create(self.parser.parse_args(["teams", "create", "confirm-no"]))
# Delete without --yes flag (should prompt for confirmation)
with stdout_capture as stdout:
team_command.team_delete(self.parser.parse_args(["teams", "delete", "confirm-no"]))
# Verify team was NOT deleted
team = self.session.scalar(select(Team).where(Team.name == "confirm-no"))
assert team is not None
output = stdout.getvalue()
assert "Team deletion cancelled" in output
@patch("builtins.input", return_value="invalid")
def test_team_delete_with_confirmation_invalid(self, mock_input, stdout_capture):
"""Test team deletion with invalid confirmation input."""
# Create team
team_command.team_create(self.parser.parse_args(["teams", "create", "confirm-invalid"]))
# Delete without --yes flag (should prompt for confirmation)
with stdout_capture as stdout:
team_command.team_delete(self.parser.parse_args(["teams", "delete", "confirm-invalid"]))
# Verify team was NOT deleted (invalid input treated as No)
team = self.session.scalar(select(Team).where(Team.name == "confirm-invalid"))
assert team is not None
output = stdout.getvalue()
assert "Team deletion cancelled" in output
def test_team_operations_integration(self):
"""Test integration of create, list, and delete operations."""
# Start with empty state
teams = self.session.scalars(select(Team)).all()
assert len(teams) == 0
# Create multiple teams
team_command.team_create(self.parser.parse_args(["teams", "create", "integration-1"]))
team_command.team_create(self.parser.parse_args(["teams", "create", "integration-2"]))
team_command.team_create(self.parser.parse_args(["teams", "create", "integration-3"]))
# Verify all teams exist
teams = self.session.scalars(select(Team)).all()
assert len(teams) == 3
team_names = [team.name for team in teams]
assert "integration-1" in team_names
assert "integration-2" in team_names
assert "integration-3" in team_names
# Delete one team
team_command.team_delete(self.parser.parse_args(["teams", "delete", "integration-2", "--yes"]))
# Verify correct team was deleted
teams = self.session.scalars(select(Team)).all()
assert len(teams) == 2
team_names = [team.name for team in teams]
assert "integration-1" in team_names
assert "integration-2" not in team_names
assert "integration-3" in team_names
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/tests/unit/cli/commands/test_team_command.py",
"license": "Apache License 2.0",
"lines": 297,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/amazon/tests/unit/amazon/aws/executors/batch/test_utils.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import datetime
from unittest import mock
import pytest
from airflow.models.taskinstancekey import TaskInstanceKey
from airflow.providers.amazon.aws.executors.batch.utils import (
CONFIG_DEFAULTS,
CONFIG_GROUP_NAME,
AllBatchConfigKeys,
BatchExecutorException,
BatchJob,
BatchJobCollection,
BatchJobInfo,
BatchQueuedJob,
BatchSubmitJobKwargsConfigKeys,
)
from airflow.utils.state import State
class TestBatchQueuedJob:
"""Tests for the BatchQueuedJob dataclass."""
def test_batch_queued_job_creation(self):
"""Test BatchQueuedJob object creation."""
key = mock.Mock(spec=TaskInstanceKey)
command = ["airflow", "tasks", "run"]
queue = "default_queue"
executor_config = {"key": "value"}
attempt_number = 1
next_attempt_time = datetime.datetime.now()
queued_job = BatchQueuedJob(
key=key,
command=command,
queue=queue,
executor_config=executor_config,
attempt_number=attempt_number,
next_attempt_time=next_attempt_time,
)
assert queued_job.key is key
assert queued_job.command == command
assert queued_job.queue == queue
assert queued_job.executor_config == executor_config
assert queued_job.attempt_number == attempt_number
assert queued_job.next_attempt_time is next_attempt_time
class TestBatchJobInfo:
"""Tests for the BatchJobInfo dataclass."""
def test_batch_job_info_creation(self):
"""Test BatchJobInfo object creation."""
cmd = ["airflow", "tasks", "run"]
queue = "default_queue"
config = {"key": "value"}
job_info = BatchJobInfo(cmd=cmd, queue=queue, config=config)
assert job_info.cmd == cmd
assert job_info.queue == queue
assert job_info.config == config
class TestBatchJob:
"""Tests for the BatchJob class."""
@pytest.mark.parametrize(
("batch_status", "expected_airflow_state"),
[
("SUBMITTED", State.QUEUED),
("PENDING", State.QUEUED),
("RUNNABLE", State.QUEUED),
("STARTING", State.QUEUED),
("RUNNING", State.RUNNING),
("SUCCEEDED", State.SUCCESS),
("FAILED", State.FAILED),
("UNKNOWN_STATUS", State.QUEUED), # Default case
],
)
def test_get_job_state_mappings(self, batch_status, expected_airflow_state):
"""Test job state mappings from AWS Batch status to Airflow state."""
job = BatchJob(job_id="job_id_123", status=batch_status)
assert job.get_job_state() == expected_airflow_state
def test_repr_method(self):
"""Test the __repr__ method for a meaningful string representation."""
job_id = "test-job-123"
status = "RUNNING"
job = BatchJob(job_id=job_id, status=status)
expected_repr = f"({job_id} -> {status}, {State.RUNNING})"
assert repr(job) == expected_repr
def test_status_reason_initialization(self):
"""Test BatchJob initialization with an optional status_reason."""
job_id = "job-456"
status = "FAILED"
status_reason = "Insufficient resources"
job = BatchJob(job_id=job_id, status=status, status_reason=status_reason)
assert job.job_id == job_id
assert job.status == status
assert job.status_reason == status_reason
def test_status_reason_default(self):
"""Test BatchJob initialization with a default status_reason."""
job_id = "job-789"
status = "SUCCEEDED"
job = BatchJob(job_id=job_id, status=status)
assert job.job_id == job_id
assert job.status == status
assert job.status_reason is None
class TestBatchJobCollection:
"""Tests for the BatchJobCollection class."""
@pytest.fixture(autouse=True)
def _setup_collection(self):
"""Set up a BatchJobCollection for testing."""
self.collection = BatchJobCollection()
self.key1 = mock.Mock(spec=TaskInstanceKey)
self.key2 = mock.Mock(spec=TaskInstanceKey)
self.job_id1 = "batch-job-001"
self.job_id2 = "batch-job-002"
self.cmd1 = ["command1"]
self.cmd2 = ["command2"]
self.queue1 = "queue1"
self.queue2 = "queue2"
self.config1 = {"conf1": "val1"}
self.config2 = {"conf2": "val2"}
def test_add_job(self):
"""Test adding a job to the collection."""
self.collection.add_job(
job_id=self.job_id1,
airflow_task_key=self.key1,
airflow_cmd=self.cmd1,
queue=self.queue1,
exec_config=self.config1,
attempt_number=1,
)
assert len(self.collection) == 1
assert self.collection.key_to_id[self.key1] == self.job_id1
assert self.collection.id_to_key[self.job_id1] == self.key1
assert self.collection.failure_count_by_id(self.job_id1) == 1
assert self.collection.id_to_job_info[self.job_id1].cmd == self.cmd1
assert self.collection.id_to_job_info[self.job_id1].queue == self.queue1
assert self.collection.id_to_job_info[self.job_id1].config == self.config1
def test_add_multiple_jobs(self):
"""Test adding multiple jobs to the collection."""
self.collection.add_job(
job_id=self.job_id1,
airflow_task_key=self.key1,
airflow_cmd=self.cmd1,
queue=self.queue1,
exec_config=self.config1,
attempt_number=1,
)
self.collection.add_job(
job_id=self.job_id2,
airflow_task_key=self.key2,
airflow_cmd=self.cmd2,
queue=self.queue2,
exec_config=self.config2,
attempt_number=2,
)
assert len(self.collection) == 2
assert self.collection.key_to_id[self.key1] == self.job_id1
assert self.collection.key_to_id[self.key2] == self.job_id2
assert self.collection.failure_count_by_id(self.job_id1) == 1
assert self.collection.failure_count_by_id(self.job_id2) == 2
def test_pop_by_id(self):
"""Test removing a job from the collection by its ID."""
self.collection.add_job(
job_id=self.job_id1,
airflow_task_key=self.key1,
airflow_cmd=self.cmd1,
queue=self.queue1,
exec_config=self.config1,
attempt_number=1,
)
assert len(self.collection) == 1
popped_key = self.collection.pop_by_id(self.job_id1)
assert popped_key is self.key1
assert len(self.collection) == 0
assert self.job_id1 not in self.collection.id_to_key
assert self.key1 not in self.collection.key_to_id
# id_to_job_info is NOT removed by pop_by_id in the current implementation.
assert self.job_id1 in self.collection.id_to_job_info
assert self.collection.id_to_job_info[self.job_id1].cmd == self.cmd1
# id_to_failure_counts is a defaultdict, so accessing a removed key returns 0.
assert self.collection.id_to_failure_counts[self.job_id1] == 0
def test_pop_non_existent_job_id(self):
"""Test popping a job ID that does not exist."""
with pytest.raises(KeyError):
self.collection.pop_by_id("non-existent-job-id")
def test_failure_count_by_id(self):
"""Test getting failure count for a job ID."""
attempt_number = 5
self.collection.add_job(
job_id=self.job_id1,
airflow_task_key=self.key1,
airflow_cmd=self.cmd1,
queue=self.queue1,
exec_config=self.config1,
attempt_number=attempt_number,
)
assert self.collection.failure_count_by_id(self.job_id1) == attempt_number
def test_failure_count_non_existent_job_id(self):
"""Test getting failure count for a non-existent job ID."""
# id_to_failure_counts is a defaultdict, so it returns 0 for missing keys.
assert self.collection.failure_count_by_id("non-existent-job-id") == 0
def test_increment_failure_count(self):
"""Test incrementing the failure count for a job ID."""
initial_attempt = 1
self.collection.add_job(
job_id=self.job_id1,
airflow_task_key=self.key1,
airflow_cmd=self.cmd1,
queue=self.queue1,
exec_config=self.config1,
attempt_number=initial_attempt,
)
assert self.collection.failure_count_by_id(self.job_id1) == initial_attempt
self.collection.increment_failure_count(self.job_id1)
assert self.collection.failure_count_by_id(self.job_id1) == initial_attempt + 1
def test_increment_failure_count_non_existent_job_id(self):
"""Test incrementing failure count for a non-existent job ID."""
# id_to_failure_counts is a defaultdict, so incrementing a missing key sets it to 1.
self.collection.increment_failure_count("non-existent-job-id")
assert self.collection.failure_count_by_id("non-existent-job-id") == 1
def test_get_all_jobs_empty(self):
"""Test getting all job IDs from an empty collection."""
assert self.collection.get_all_jobs() == []
def test_get_all_jobs_with_jobs(self):
"""Test getting all job IDs from a collection with jobs."""
self.collection.add_job(
job_id=self.job_id1,
airflow_task_key=self.key1,
airflow_cmd=self.cmd1,
queue=self.queue1,
exec_config=self.config1,
attempt_number=1,
)
self.collection.add_job(
job_id=self.job_id2,
airflow_task_key=self.key2,
airflow_cmd=self.cmd2,
queue=self.queue2,
exec_config=self.config2,
attempt_number=1,
)
all_jobs = self.collection.get_all_jobs()
assert len(all_jobs) == 2
assert self.job_id1 in all_jobs
assert self.job_id2 in all_jobs
def test_len_method(self):
"""Test the __len__ method of the collection."""
assert len(self.collection) == 0
self.collection.add_job(
job_id=self.job_id1,
airflow_task_key=self.key1,
airflow_cmd=self.cmd1,
queue=self.queue1,
exec_config=self.config1,
attempt_number=1,
)
assert len(self.collection) == 1
self.collection.add_job(
job_id=self.job_id2,
airflow_task_key=self.key2,
airflow_cmd=self.cmd2,
queue=self.queue2,
exec_config=self.config2,
attempt_number=1,
)
assert len(self.collection) == 2
self.collection.pop_by_id(self.job_id1)
assert len(self.collection) == 1
class TestConfigKeys:
"""Tests for configuration key constants."""
def test_batch_submit_job_kwargs_config_keys_values(self):
"""Test BatchSubmitJobKwargsConfigKeys have correct string values."""
assert BatchSubmitJobKwargsConfigKeys.JOB_NAME == "job_name"
assert BatchSubmitJobKwargsConfigKeys.JOB_QUEUE == "job_queue"
assert BatchSubmitJobKwargsConfigKeys.JOB_DEFINITION == "job_definition"
assert BatchSubmitJobKwargsConfigKeys.EKS_PROPERTIES_OVERRIDE == "eks_properties_override"
assert BatchSubmitJobKwargsConfigKeys.NODE_OVERRIDE == "node_override"
def test_all_batch_config_keys_values(self):
"""Test AllBatchConfigKeys have correct string values, including inherited ones."""
# Test keys specific to AllBatchConfigKeys
assert AllBatchConfigKeys.MAX_SUBMIT_JOB_ATTEMPTS == "max_submit_job_attempts"
assert AllBatchConfigKeys.AWS_CONN_ID == "conn_id"
assert AllBatchConfigKeys.SUBMIT_JOB_KWARGS == "submit_job_kwargs"
assert AllBatchConfigKeys.REGION_NAME == "region_name"
assert AllBatchConfigKeys.CHECK_HEALTH_ON_STARTUP == "check_health_on_startup"
# Test inherited keys from BatchSubmitJobKwargsConfigKeys
assert AllBatchConfigKeys.JOB_NAME == "job_name"
assert AllBatchConfigKeys.JOB_QUEUE == "job_queue"
assert AllBatchConfigKeys.JOB_DEFINITION == "job_definition"
assert AllBatchConfigKeys.EKS_PROPERTIES_OVERRIDE == "eks_properties_override"
assert AllBatchConfigKeys.NODE_OVERRIDE == "node_override"
def test_config_defaults(self):
"""Test that CONFIG_DEFAULTS is a dictionary with expected keys and values."""
assert isinstance(CONFIG_DEFAULTS, dict)
assert "conn_id" in CONFIG_DEFAULTS
assert CONFIG_DEFAULTS["conn_id"] == "aws_default"
assert "max_submit_job_attempts" in CONFIG_DEFAULTS
assert CONFIG_DEFAULTS["max_submit_job_attempts"] == "3"
assert "check_health_on_startup" in CONFIG_DEFAULTS
assert CONFIG_DEFAULTS["check_health_on_startup"] == "True"
def test_config_group_name(self):
"""Test that CONFIG_GROUP_NAME is a string."""
assert isinstance(CONFIG_GROUP_NAME, str)
assert CONFIG_GROUP_NAME == "aws_batch_executor"
class TestBatchExecutorException:
"""Tests for the BatchExecutorException class."""
def test_exception_inheritance(self):
"""Test that BatchExecutorException is a subclass of Exception."""
assert issubclass(BatchExecutorException, Exception)
def test_exception_can_be_raised_and_caught(self):
"""Test that the exception can be raised and caught."""
with pytest.raises(BatchExecutorException):
raise BatchExecutorException("Test exception message")
def test_exception_message(self):
"""Test that the exception stores the message correctly."""
error_message = "An unexpected error occurred in the AWS Batch ecosystem."
exc = BatchExecutorException(error_message)
assert str(exc) == error_message
| {
"repo_id": "apache/airflow",
"file_path": "providers/amazon/tests/unit/amazon/aws/executors/batch/test_utils.py",
"license": "Apache License 2.0",
"lines": 325,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:shared/logging/src/airflow_shared/logging/_noncaching.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import os
from typing import BinaryIO, TextIO, TypeVar
__all__ = [
"make_file_io_non_caching",
]
_IO = TypeVar("_IO", TextIO, BinaryIO)
def make_file_io_non_caching(io: _IO) -> _IO:
try:
fd = io.fileno()
os.posix_fadvise(fd, 0, 0, os.POSIX_FADV_DONTNEED)
except Exception:
# in case either file descriptor cannot be retrieved or fadvise is not available
# we should simply return the wrapper retrieved by FileHandler's open method
# the advice to the kernel is just an advice and if we cannot give it, we won't
pass
return io
| {
"repo_id": "apache/airflow",
"file_path": "shared/logging/src/airflow_shared/logging/_noncaching.py",
"license": "Apache License 2.0",
"lines": 33,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:shared/logging/src/airflow_shared/logging/percent_formatter.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import collections.abc
import datetime
import operator
import re
import sys
from io import StringIO
from typing import TYPE_CHECKING, ClassVar
import structlog.dev
from structlog.dev import ConsoleRenderer
from structlog.processors import CallsiteParameter
if TYPE_CHECKING:
from structlog.dev import ColumnStyles
from structlog.typing import EventDict, WrappedLogger
class _LazyLogRecordDict(collections.abc.Mapping):
__slots__ = ("event", "styles", "level_styles", "method_name", "no_colors")
def __init__(
self, event: EventDict, method_name: str, level_styles: dict[str, str], styles: ColumnStyles
):
self.event = event
self.method_name = method_name
self.level_styles = level_styles
self.styles = styles
self.no_colors = self.styles.reset == ""
def __getitem__(self, key):
# Roughly compatible with names from https://github.com/python/cpython/blob/v3.13.7/Lib/logging/__init__.py#L571
# Plus with ColoredLog added in
# If there is no callsite info (often for stdout/stderr), show the same sort of thing that stdlib
# logging would
# https://github.com/python/cpython/blob/d3c888b4ec15dbd7d6b6ef4f15b558af77c228af/Lib/logging/__init__.py#L1652C34-L1652C48
if key == "lineno":
return self.event.get("lineno") or 0
if key == "filename":
return self.event.get("filename", "(unknown file)")
if key == "funcName":
return self.event.get("funcName", "(unknown function)")
if key in PercentFormatRender.callsite_parameters:
return self.event.get(PercentFormatRender.callsite_parameters[key].value, "(unknown)")
if key == "name":
return self.event.get("logger") or self.event.get("logger_name", "(unknown)")
if key == "levelname":
return self.event.get("level", self.method_name).upper()
if key == "asctime" or key == "created":
return (
self.event.get("timestamp", None)
or datetime.datetime.now(tz=datetime.timezone.utc).isoformat()
)
if key == "message":
return self.event["event"]
if key in ("red", "green", "yellow", "blue", "purple", "cyan"):
if self.no_colors:
return ""
return getattr(structlog.dev, key.upper(), "")
if key == "reset":
return self.styles.reset
if key == "log_color":
if self.no_colors:
return ""
return self.level_styles.get(self.event.get("level", self.method_name), "")
return self.event.get(key)
def __iter__(self):
return self.event.__iter__()
def __len__(self):
return len(self.event)
class PercentFormatRender(ConsoleRenderer):
"""A Structlog processor that uses a stdlib-like percent based format string."""
_fmt: str
# From https://github.com/python/cpython/blob/v3.12.11/Lib/logging/__init__.py#L563-L587
callsite_parameters: ClassVar[dict[str, CallsiteParameter]] = {
"pathname": CallsiteParameter.PATHNAME,
"filename": CallsiteParameter.FILENAME,
"module": CallsiteParameter.MODULE,
"lineno": CallsiteParameter.LINENO,
"funcName": CallsiteParameter.FUNC_NAME,
"thread": CallsiteParameter.THREAD,
"threadName": CallsiteParameter.THREAD_NAME,
"process": CallsiteParameter.PROCESS,
# This one isn't listed in the docs until 3.14, but it's worked for a long time
"processName": CallsiteParameter.PROCESS_NAME,
}
special_keys = {
"event",
"name",
"logger",
"logger_name",
"timestamp",
"level",
} | set(map(operator.attrgetter("value"), callsite_parameters.values()))
@classmethod
def callsite_params_from_fmt_string(cls, fmt: str) -> collections.abc.Iterable[CallsiteParameter]:
# Pattern based on https://github.com/python/cpython/blob/v3.12.11/Lib/logging/__init__.py#L441, but
# with added grouping, and comments to aid clarity, even if we don't care about anything beyond the
# mapping key
pattern = re.compile(
r"""
%\( (?P<key> \w+ ) \) # The mapping key (in parenthesis. The bit we care about)
[#0+ -]* # Conversion flags
(?: \*|\d+ )? # Minimum field width
(?: \. (?: \* | \d+ ) )? # Precision (floating point)
[diouxefgcrsa%] # Conversion type
""",
re.I | re.X,
)
for match in pattern.finditer(fmt):
if param := cls.callsite_parameters.get(match["key"]):
yield param
def __init__(self, fmt: str, **kwargs):
super().__init__(**kwargs)
self._fmt = fmt
def __call__(self, logger: WrappedLogger, method_name: str, event_dict: EventDict):
exc = event_dict.pop("exception", None)
exc_info = event_dict.pop("exc_info", None)
stack = event_dict.pop("stack", None)
params = _LazyLogRecordDict(
event_dict,
method_name,
# To maintain compat with old log levels, we don't want to color info, just everything else
{**ConsoleRenderer.get_default_level_styles(), "info": ""},
self._styles,
)
sio = StringIO()
sio.write(self._fmt % params)
sio.write(
"".join(
" " + self._default_column_formatter(key, val)
for key, val in event_dict.items()
if key not in self.special_keys
).rstrip(" ")
)
if stack is not None:
sio.write("\n" + stack)
if exc_info or exc is not None:
sio.write("\n\n" + "=" * 79 + "\n")
if exc_info:
if isinstance(exc_info, BaseException):
exc_info = (exc_info.__class__, exc_info, exc_info.__traceback__)
if not isinstance(exc_info, tuple):
if (exc_info := sys.exc_info()) == (None, None, None):
exc_info = None
if exc_info:
self._exception_formatter(sio, exc_info)
elif exc is not None:
sio.write("\n" + exc)
return sio.getvalue()
| {
"repo_id": "apache/airflow",
"file_path": "shared/logging/src/airflow_shared/logging/percent_formatter.py",
"license": "Apache License 2.0",
"lines": 161,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:shared/logging/src/airflow_shared/logging/structlog.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import codecs
import io
import itertools
import logging
import os
import re
import sys
from collections.abc import Callable, Iterable, Mapping, Sequence
from functools import cache, cached_property, partial
from pathlib import Path
from types import ModuleType
from typing import TYPE_CHECKING, Any, BinaryIO, Generic, TextIO, TypeVar, cast
import pygtrie
import structlog
import structlog.processors
from structlog.processors import NAME_TO_LEVEL, CallsiteParameter
from ._noncaching import make_file_io_non_caching
from .percent_formatter import PercentFormatRender
if TYPE_CHECKING:
from structlog.typing import (
BindableLogger,
EventDict,
Processor,
WrappedLogger,
)
from .types import Logger
log = logging.getLogger(__name__)
__all__ = [
"configure_logging",
"structlog_processors",
]
JWT_PATTERN = re.compile(r"eyJ[\.A-Za-z0-9-_]*")
LEVEL_TO_FILTERING_LOGGER: dict[int, type[Logger]] = {}
def _make_airflow_structlogger(min_level):
# This uses https://github.com/hynek/structlog/blob/2f0cc42d/src/structlog/_native.py#L126
# as inspiration
LEVEL_TO_NAME = {v: k for k, v in NAME_TO_LEVEL.items()}
# A few things, namely paramiko _really_ wants this to be a stdlib logger. These fns pretends it is enough
# like it to function.
@cached_property
def handlers(self):
return [logging.NullHandler()]
@property
def level(self):
return min_level
@property
def name(self):
return self._logger.name
def _nop(self: Any, event: str, *args: Any, **kw: Any) -> Any:
return None
# Work around an issue in structlog https://github.com/hynek/structlog/issues/745
def make_method(
level: int,
) -> Callable[..., Any]:
name = LEVEL_TO_NAME[level]
if level < min_level:
return _nop
def meth(self: Any, event: str, *args: Any, **kw: Any) -> Any:
if not args:
return self._proxy_to_logger(name, event, **kw)
# See https://github.com/python/cpython/blob/3.13/Lib/logging/__init__.py#L307-L326 for reason
if args and len(args) == 1 and isinstance(args[0], Mapping) and args[0]:
return self._proxy_to_logger(name, event % args[0], **kw)
return self._proxy_to_logger(name, event % args, **kw)
meth.__name__ = name
return meth
base = structlog.make_filtering_bound_logger(min_level)
cls = type(
f"AirflowBoundLoggerFilteringAt{LEVEL_TO_NAME.get(min_level, 'Notset').capitalize()}",
(base,),
{
"isEnabledFor": base.is_enabled_for,
"getEffectiveLevel": base.get_effective_level,
"level": level,
"name": name,
"handlers": handlers,
}
| {name: make_method(lvl) for lvl, name in LEVEL_TO_NAME.items()},
)
LEVEL_TO_FILTERING_LOGGER[min_level] = cls
return cls
AirflowBoundLoggerFilteringAtNotset = _make_airflow_structlogger(NAME_TO_LEVEL["notset"])
AirflowBoundLoggerFilteringAtDebug = _make_airflow_structlogger(NAME_TO_LEVEL["debug"])
AirflowBoundLoggerFilteringAtInfo = _make_airflow_structlogger(NAME_TO_LEVEL["info"])
AirflowBoundLoggerFilteringAtWarning = _make_airflow_structlogger(NAME_TO_LEVEL["warning"])
AirflowBoundLoggerFilteringAtError = _make_airflow_structlogger(NAME_TO_LEVEL["error"])
AirflowBoundLoggerFilteringAtCritical = _make_airflow_structlogger(NAME_TO_LEVEL["critical"])
# We use a trie structure (sometimes also called a "prefix tree") so that we can easily and quickly find the
# most suitable log level to apply. This mirrors the logging level cascade behavior from stdlib logging,
# without the complexity of multiple handlers etc
PER_LOGGER_LEVELS = pygtrie.StringTrie(separator=".")
PER_LOGGER_LEVELS.update(
{
# Top level logging default - changed to respect config in `configure_logging`
"": NAME_TO_LEVEL["info"],
}
)
def make_filtering_logger() -> Callable[..., BindableLogger]:
def maker(logger: WrappedLogger, *args, **kwargs):
# If the logger is a NamedBytesLogger/NamedWriteLogger (an Airflow specific subclass) then
# look up the global per-logger config and redirect to a new class.
logger_name = kwargs.get("context", {}).get("logger_name")
if not logger_name and isinstance(logger, (NamedWriteLogger, NamedBytesLogger)):
logger_name = logger.name
if (level_override := kwargs.get("context", {}).pop("__level_override", None)) is not None:
level = level_override
elif logger_name:
level = PER_LOGGER_LEVELS.longest_prefix(logger_name).get(PER_LOGGER_LEVELS[""])
else:
level = PER_LOGGER_LEVELS[""]
return LEVEL_TO_FILTERING_LOGGER[level](logger, *args, **kwargs) # type: ignore[call-arg]
return maker
class NamedBytesLogger(structlog.BytesLogger):
__slots__ = ("name",)
def __init__(self, name: str | None = None, file: BinaryIO | None = None):
self.name = name
if file is not None:
file = make_file_io_non_caching(file)
super().__init__(file)
class NamedWriteLogger(structlog.WriteLogger):
__slots__ = ("name",)
def __init__(self, name: str | None = None, file: TextIO | None = None):
self.name = name
if file is not None:
file = make_file_io_non_caching(file)
super().__init__(file)
LogOutputType = TypeVar("LogOutputType", bound=TextIO | BinaryIO)
class LoggerFactory(Generic[LogOutputType]):
def __init__(
self,
cls: type[WrappedLogger],
io: LogOutputType | None = None,
):
self.cls = cls
self.io = io
def __call__(self, logger_name: str | None = None, *args: Any) -> WrappedLogger:
return self.cls(logger_name, self.io) # type: ignore[call-arg]
def logger_name(logger: Any, method_name: Any, event_dict: EventDict) -> EventDict:
if logger_name := (event_dict.pop("logger_name", None) or getattr(logger, "name", None)):
event_dict.setdefault("logger", logger_name)
return event_dict
# `eyJ` is `{"` in base64 encoding -- and any value that starts like that is very likely a JWT
# token. Better safe than sorry
def redact_jwt(logger: Any, method_name: str, event_dict: EventDict) -> EventDict:
for k, v in event_dict.items():
if isinstance(v, str):
event_dict[k] = re.sub(JWT_PATTERN, "eyJ***", v)
return event_dict
def drop_positional_args(logger: Any, method_name: Any, event_dict: EventDict) -> EventDict:
event_dict.pop("positional_args", None)
return event_dict
# This is a placeholder fn, that is "edited" in place via the `suppress_logs_and_warning` decorator
# The reason we need to do it this way is that structlog caches loggers on first use, and those include the
# configured processors, so we can't get away with changing the config as it won't have any effect once the
# logger obj is created and has been used once
def respect_stdlib_disable(logger: Any, method_name: Any, event_dict: EventDict) -> EventDict:
return event_dict
@cache
def structlog_processors(
json_output: bool,
log_format: str = "",
colors: bool = True,
callsite_parameters: tuple[CallsiteParameter, ...] = (),
):
"""
Create the correct list of structlog processors for the given config.
Return value is a tuple of three elements:
1. A list of processors shared for structlog and stdlib
2. The final processor/renderer (one that outputs a string) for use with structlog.stdlib.ProcessorFormatter
``callsite_parameters`` specifies the keys to add to the log event dict. If ``log_format`` is specified
then anything callsite related will be added to this list
:meta private:
"""
timestamper = structlog.processors.MaybeTimeStamper(fmt="iso")
# Processors shared between stdlib handlers and structlog processors
shared_processors: list[structlog.typing.Processor] = [
respect_stdlib_disable,
timestamper,
structlog.contextvars.merge_contextvars,
structlog.processors.add_log_level,
structlog.stdlib.PositionalArgumentsFormatter(),
logger_name,
redact_jwt,
structlog.processors.StackInfoRenderer(),
]
if log_format:
# Maintain the order if any params that are given explicitly, then add on anything needed for the
# format string (so use a dict with None as the values as set doesn't preserve order)
params = {
param: None
for param in itertools.chain(
callsite_parameters or [], PercentFormatRender.callsite_params_from_fmt_string(log_format)
)
}
shared_processors.append(
structlog.processors.CallsiteParameterAdder(list(params.keys()), additional_ignores=[__name__])
)
elif callsite_parameters:
shared_processors.append(
structlog.processors.CallsiteParameterAdder(callsite_parameters, additional_ignores=[__name__])
)
# Imports to suppress showing code from these modules. We need the import to get the filepath for
# structlog to ignore.
import contextlib
import click
suppress: tuple[ModuleType, ...] = (click, contextlib)
try:
import httpcore
suppress = (*suppress, httpcore)
except ImportError:
pass
try:
import httpx
suppress = (*suppress, httpx)
except ImportError:
pass
if json_output:
dict_exc_formatter = structlog.tracebacks.ExceptionDictTransformer(
use_rich=False, show_locals=False, suppress=suppress
)
dict_tracebacks = structlog.processors.ExceptionRenderer(dict_exc_formatter)
import msgspec
def json_dumps(msg, default):
# Note: this is likely an "expensive" step, but lets massage the dict order for nice
# viewing of the raw JSON logs.
# Maybe we don't need this once the UI renders the JSON instead of displaying the raw text
msg = {
"timestamp": msg.pop("timestamp"),
"level": msg.pop("level"),
"event": msg.pop("event"),
**msg,
}
return msgspec.json.encode(msg, enc_hook=default)
json = structlog.processors.JSONRenderer(serializer=json_dumps)
def json_processor(logger: Any, method_name: Any, event_dict: EventDict) -> str:
result = json(logger, method_name, event_dict)
return result.decode("utf-8") if isinstance(result, bytes) else result
shared_processors.extend(
(
dict_tracebacks,
structlog.processors.UnicodeDecoder(),
),
)
return shared_processors, json_processor, json
exc_formatter: structlog.dev.RichTracebackFormatter | structlog.typing.ExceptionRenderer
if os.getenv("DEV", "") != "":
# Only use Rich in dev -- otherwise for "production" deployments it makes the logs harder to read as
# it uses lots of ANSI escapes and non ASCII characters. Simpler is better for non-dev non-JSON
exc_formatter = structlog.dev.RichTracebackFormatter(
# These values are picked somewhat arbitrarily to produce useful-but-compact tracebacks. If
# we ever need to change these then they should be configurable.
extra_lines=0,
max_frames=30,
indent_guides=False,
suppress=suppress,
)
else:
exc_formatter = structlog.dev.plain_traceback
my_styles = structlog.dev.ConsoleRenderer.get_default_level_styles(colors=colors)
if colors:
my_styles["debug"] = structlog.dev.CYAN
console: PercentFormatRender | structlog.dev.ConsoleRenderer
if log_format:
console = PercentFormatRender(
fmt=log_format,
exception_formatter=exc_formatter,
level_styles=my_styles,
colors=colors,
)
else:
if callsite_parameters == (CallsiteParameter.FILENAME, CallsiteParameter.LINENO):
# Nicer formatting of the default callsite config
def log_loc(logger: Any, method_name: Any, event_dict: EventDict) -> EventDict:
if (
event_dict.get("logger") != "py.warnings"
and "filename" in event_dict
and "lineno" in event_dict
):
event_dict["loc"] = f"{event_dict.pop('filename')}:{event_dict.pop('lineno')}"
return event_dict
shared_processors.append(log_loc)
console = structlog.dev.ConsoleRenderer(
exception_formatter=exc_formatter,
level_styles=my_styles,
colors=colors,
)
return shared_processors, console, console
def configure_logging(
*,
json_output: bool = False,
log_level: str = "DEBUG",
log_format: str = "",
stdlib_config: dict | None = None,
extra_processors: Sequence[Processor] | None = None,
callsite_parameters: Iterable[CallsiteParameter] | None = None,
colors: bool = True,
output: LogOutputType | None = None,
namespace_log_levels: str | dict[str, str] | None = None,
cache_logger_on_first_use: bool = True,
):
"""
Configure structlog (and stbilb's logging to send via structlog processors too).
If percent_log_format is passed then it will be handled in a similar mode to stdlib, including
interpolations such as ``%(asctime)s`` etc.
:param json_output: Set to true to write all logs as JSON (one per line)
:param log_level: The default log level to use for most logs
:param log_format: A percent-style log format to write non JSON logs with.
:param output: Where to write the logs to. If ``json_output`` is true this must be a binary stream
:param colors: Whether to use colors for non-JSON logs. This only works if standard out is a TTY (that is,
an interactive session), unless overridden by environment variables described below.
Please note that disabling colors also disables all styling, including bold and italics.
The following environment variables control color behavior (set to any non-empty value to activate):
* ``NO_COLOR`` - Disables colors completely. This takes precedence over all other settings,
including ``FORCE_COLOR``.
* ``FORCE_COLOR`` - Forces colors to be enabled, even when output is not going to a TTY. This only
takes effect if ``NO_COLOR`` is not set.
:param callsite_parameters: A list parameters about the callsite (line number, function name etc) to
include in the logs.
If ``log_format`` is specified, then anything required to populate that (such as ``%(lineno)d``) will
be automatically included.
:param namespace_log_levels: Levels of extra loggers to configure.
To make this easier to use, this can be a string consisting of pairs of ``<logger>=<level>`` (either
string, or space delimited) which will set the level for that specific logger.
For example::
``sqlalchemy=INFO sqlalchemy.engine=DEBUG``
"""
if "fatal" not in NAME_TO_LEVEL:
NAME_TO_LEVEL["fatal"] = NAME_TO_LEVEL["critical"]
def is_atty():
return sys.stdout is not None and hasattr(sys.stdout, "isatty") and sys.stdout.isatty()
colors = os.environ.get("NO_COLOR", "") == "" and (
os.environ.get("FORCE_COLOR", "") != "" or (colors and is_atty())
)
stdlib_config = stdlib_config or {}
extra_processors = extra_processors or ()
PER_LOGGER_LEVELS[""] = NAME_TO_LEVEL[log_level.lower()]
# Extract per-logger-tree levels and set them
if isinstance(namespace_log_levels, str):
log_from_level = partial(re.compile(r"\s*=\s*").split, maxsplit=2)
namespace_log_levels = {
log: level for log, level in map(log_from_level, re.split(r"[\s,]+", namespace_log_levels))
}
if namespace_log_levels:
for log, level in namespace_log_levels.items():
try:
loglevel = NAME_TO_LEVEL[level.lower()]
except KeyError:
raise ValueError(f"Invalid log level for logger {log!r}: {level!r}") from None
else:
PER_LOGGER_LEVELS[log] = loglevel
shared_pre_chain, for_stdlib, for_structlog = structlog_processors(
json_output,
log_format=log_format,
colors=colors,
callsite_parameters=tuple(callsite_parameters or ()),
)
shared_pre_chain += list(extra_processors)
pre_chain: list[structlog.typing.Processor] = [structlog.stdlib.add_logger_name] + shared_pre_chain
# Don't cache the loggers during tests, it makes it hard to capture them
if "PYTEST_VERSION" in os.environ:
cache_logger_on_first_use = False
std_lib_formatter: list[Processor] = [
# TODO: Don't include this if we are using PercentFormatter -- it'll delete something we
# just have to recreated!
structlog.stdlib.ProcessorFormatter.remove_processors_meta,
drop_positional_args,
for_stdlib,
]
wrapper_class = cast("type[BindableLogger]", make_filtering_logger())
if json_output:
logger_factory: LoggerFactory[Any] = LoggerFactory(NamedBytesLogger, io=output)
else:
# There is no universal way of telling if a file-like-object is binary (and needs bytes) or text that
# works for files, sockets and io.StringIO/BytesIO.
# If given a binary object, wrap it in a text mode wrapper
text_output: TextIO | None = None
if output is not None and not hasattr(output, "encoding"):
text_output = io.TextIOWrapper(cast("BinaryIO", output), line_buffering=True)
elif output is not None:
text_output = cast("TextIO", output)
logger_factory = LoggerFactory(NamedWriteLogger, io=text_output)
structlog.configure(
processors=shared_pre_chain + [for_structlog],
cache_logger_on_first_use=cache_logger_on_first_use,
wrapper_class=wrapper_class,
logger_factory=logger_factory,
)
import logging.config
config = {**stdlib_config}
config.setdefault("version", 1)
config.setdefault("disable_existing_loggers", False)
config["formatters"] = {**config.get("formatters", {})}
config["handlers"] = {**config.get("handlers", {})}
config["loggers"] = {**config.get("loggers", {})}
config["formatters"].update(
{
"structlog": {
"()": structlog.stdlib.ProcessorFormatter,
"use_get_message": False,
"processors": std_lib_formatter,
"foreign_pre_chain": pre_chain,
"pass_foreign_args": True,
},
}
)
for section in (config["loggers"], config["handlers"]):
for log_config in section.values():
# We want everything to go via structlog, remove whatever the user might have configured
log_config.pop("stream", None)
log_config.pop("formatter", None)
# log_config.pop("handlers", None)
if output and not hasattr(output, "encoding"):
# This is a BinaryIO, we need to give logging.StreamHandler a TextIO
output = codecs.lookup("utf-8").streamwriter(output) # type: ignore
config["handlers"].update(
{
"default": {
"level": log_level.upper(),
"class": "logging.StreamHandler",
"formatter": "structlog",
"stream": output,
},
}
)
config["loggers"].update(
{
# Set Airflow logging to the level requested, but most everything else at "INFO"
"airflow": {"level": log_level.upper()},
# These ones are too chatty even at info
"httpx": {"level": "WARN"},
"sqlalchemy.engine": {"level": "WARN"},
}
)
config["root"] = {
"handlers": ["default"],
"level": log_level.upper(),
"propagate": True,
}
logging.config.dictConfig(config)
def init_log_folder(directory: str | os.PathLike[str], new_folder_permissions: int):
"""
Prepare the log folder and ensure its mode is as configured.
To handle log writing when tasks are impersonated, the log files need to
be writable by the user that runs the Airflow command and the user
that is impersonated. This is mainly to handle corner cases with the
SubDagOperator. When the SubDagOperator is run, all of the operators
run under the impersonated user and create appropriate log files
as the impersonated user. However, if the user manually runs tasks
of the SubDagOperator through the UI, then the log files are created
by the user that runs the Airflow command. For example, the Airflow
run command may be run by the `airflow_sudoable` user, but the Airflow
tasks may be run by the `airflow` user. If the log files are not
writable by both users, then it's possible that re-running a task
via the UI (or vice versa) results in a permission error as the task
tries to write to a log file created by the other user.
We leave it up to the user to manage their permissions by exposing configuration for both
new folders and new log files. Default is to make new log folders and files group-writeable
to handle most common impersonation use cases. The requirement in this case will be to make
sure that the same group is set as default group for both - impersonated user and main airflow
user.
"""
directory = Path(directory)
for parent in reversed(Path(directory).parents):
parent.mkdir(mode=new_folder_permissions, exist_ok=True)
directory.mkdir(mode=new_folder_permissions, exist_ok=True)
def init_log_file(
base_log_folder: str | os.PathLike[str],
local_relative_path: str | os.PathLike[str],
*,
new_folder_permissions: int = 0o775,
new_file_permissions: int = 0o664,
) -> Path:
"""
Ensure log file and parent directories are created with the correct permissions.
Any directories that are missing are created with the right permission bits.
See above ``init_log_folder`` method for more detailed explanation.
"""
full_path = Path(base_log_folder, local_relative_path)
init_log_folder(full_path.parent, new_folder_permissions)
try:
full_path.touch(new_file_permissions)
except OSError as e:
log = structlog.get_logger(__name__)
log.warning("OSError while changing ownership of the log file. %s", e)
return full_path
def reconfigure_logger(
logger: WrappedLogger, without_processor_type: type, level_override: int | None = None
):
procs = getattr(logger, "_processors", None)
if procs is None:
procs = structlog.get_config()["processors"]
procs = [proc for proc in procs if not isinstance(proc, without_processor_type)]
return structlog.wrap_logger(
getattr(logger, "_logger", None),
processors=procs,
**getattr(logger, "_context", {}),
__level_override=level_override,
)
if __name__ == "__main__":
configure_logging(
# json_output=True,
log_format="[%(blue)s%(asctime)s%(reset)s] {%(blue)s%(filename)s:%(reset)s%(lineno)d} %(log_color)s%(levelname)s%(reset)s - %(log_color)s%(message)s%(reset)s",
)
log = logging.getLogger("testing.stlib")
log2 = structlog.get_logger(logger_name="testing.structlog")
def raises():
try:
1 / 0
except ZeroDivisionError:
log.exception("str")
try:
1 / 0
except ZeroDivisionError:
log2.exception("std")
def main():
log.info("in main")
log2.info("in main", key="value")
raises()
main()
| {
"repo_id": "apache/airflow",
"file_path": "shared/logging/src/airflow_shared/logging/structlog.py",
"license": "Apache License 2.0",
"lines": 537,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:shared/logging/src/airflow_shared/logging/types.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import Any, Protocol
from structlog.typing import FilteringBoundLogger
__all__ = [
"Logger",
]
class Logger(FilteringBoundLogger, Protocol): # noqa: D101
name: str
def isEnabledFor(self, level: int): ...
def getEffectiveLevel(self) -> int: ...
# FilteringBoundLogger defines these methods with `event: str` -- in a few places in Airflow we do
# `self.log.exception(e)` or `self.log.info(rule_results_df)` so we correct the types to allow for this
# (as the code already did)
def debug(self, event: Any, *args: Any, **kw: Any) -> Any: ...
def info(self, event: Any, *args: Any, **kw: Any) -> Any: ...
def warning(self, event: Any, *args: Any, **kw: Any) -> Any: ...
def error(self, event: Any, *args: Any, **kw: Any) -> Any: ...
def exception(self, event: Any, *args: Any, **kw: Any) -> Any: ...
def log(self, level: int, event: Any, *args: Any, **kw: Any) -> Any: ...
| {
"repo_id": "apache/airflow",
"file_path": "shared/logging/src/airflow_shared/logging/types.py",
"license": "Apache License 2.0",
"lines": 36,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:shared/logging/tests/logging/test_structlog.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import contextlib
import io
import json
import logging
import os
import sys
import textwrap
from datetime import datetime, timezone
from unittest import mock
import pytest
import structlog
from structlog.dev import BLUE, BRIGHT, CYAN, DIM, GREEN, MAGENTA, RESET_ALL as RESET
from structlog.processors import CallsiteParameter
from airflow_shared.logging import structlog as structlog_module
from airflow_shared.logging.structlog import configure_logging
# We don't want to use the caplog fixture in this test, as the main purpose of this file is to capture the
# _rendered_ output of the tests to make sure it is correct.
PY_3_11 = sys.version_info >= (3, 11)
@pytest.fixture(autouse=True)
def set_time(time_machine):
time_machine.move_to(datetime(1985, 10, 26, microsecond=1, tzinfo=timezone.utc), tick=False)
@pytest.fixture
def structlog_config():
@contextlib.contextmanager
def configurer(**kwargs):
prev_config = structlog.get_config()
try:
if kwargs.get("json_output"):
buff = io.BytesIO()
else:
buff = io.StringIO()
with mock.patch("sys.stdout") as mock_stdout:
mock_stdout.isatty.return_value = True
configure_logging(**kwargs, output=buff)
yield buff
buff.seek(0)
finally:
structlog.configure(**prev_config)
return configurer
@pytest.mark.parametrize(
("get_logger", "config_kwargs", "extra_kwargs", "extra_output"),
[
pytest.param(
structlog.get_logger,
{},
{"key1": "value1"},
f" {CYAN}key1{RESET}={MAGENTA}value1{RESET}",
id="structlog",
),
pytest.param(
structlog.get_logger,
{"callsite_parameters": [CallsiteParameter.PROCESS]},
{"key1": "value1"},
f" {CYAN}key1{RESET}={MAGENTA}value1{RESET} {CYAN}process{RESET}={MAGENTA}{os.getpid()}{RESET}",
id="structlog-callsite",
),
pytest.param(
logging.getLogger,
{},
{},
"",
id="stdlib",
),
pytest.param(
logging.getLogger,
{"callsite_parameters": [CallsiteParameter.PROCESS]},
{},
f" {CYAN}process{RESET}={MAGENTA}{os.getpid()}{RESET}",
id="stdlib-callsite",
),
],
)
def test_colorful(structlog_config, get_logger, config_kwargs, extra_kwargs, extra_output):
with structlog_config(colors=True, **config_kwargs) as sio:
logger = get_logger("my.logger")
# Test that interoplations work too
x = "world"
logger.info("Hello %s", x, **extra_kwargs)
written = sio.getvalue()
# This _might_ be a little bit too specific to structlog's ConsoleRender format
assert (
written == f"{DIM}1985-10-26T00:00:00.000001Z{RESET} [{GREEN}{BRIGHT}info {RESET}]"
f" {BRIGHT}Hello world {RESET}"
f" [{RESET}{BRIGHT}{BLUE}my.logger{RESET}]{RESET}" + extra_output + "\n"
)
@pytest.mark.parametrize(
("no_color", "force_color", "is_tty", "colors_param", "expected_colors"),
[
# NO_COLOR takes precedence over everything
pytest.param("1", "", True, True, False, id="no_color_set_tty_colors_true"),
pytest.param("1", "", True, False, False, id="no_color_set_tty_colors_false"),
pytest.param("1", "", False, True, False, id="no_color_set_no_tty_colors_true"),
pytest.param("1", "", False, False, False, id="no_color_set_no_tty_colors_false"),
pytest.param("1", "1", True, True, False, id="no_color_and_force_color_tty_colors_true"),
pytest.param("1", "1", True, False, False, id="no_color_and_force_color_tty_colors_false"),
pytest.param("1", "1", False, True, False, id="no_color_and_force_color_no_tty_colors_true"),
pytest.param("1", "1", False, False, False, id="no_color_and_force_color_no_tty_colors_false"),
# FORCE_COLOR takes precedence when NO_COLOR is not set
pytest.param("", "1", True, True, True, id="force_color_tty_colors_true"),
pytest.param("", "1", True, False, True, id="force_color_tty_colors_false"),
pytest.param("", "1", False, True, True, id="force_color_no_tty_colors_true"),
pytest.param("", "1", False, False, True, id="force_color_no_tty_colors_false"),
# When neither NO_COLOR nor FORCE_COLOR is set, check TTY and colors param
pytest.param("", "", True, True, True, id="tty_colors_true"),
pytest.param("", "", True, False, False, id="tty_colors_false"),
pytest.param("", "", False, True, False, id="no_tty_colors_true"),
pytest.param("", "", False, False, False, id="no_tty_colors_false"),
],
)
def test_color_config(monkeypatch, no_color, force_color, is_tty, colors_param, expected_colors):
"""Test all combinations of NO_COLOR, FORCE_COLOR, is_atty(), and colors parameter."""
monkeypatch.setenv("NO_COLOR", no_color)
monkeypatch.setenv("FORCE_COLOR", force_color)
with mock.patch("sys.stdout") as mock_stdout:
mock_stdout.isatty.return_value = is_tty
with mock.patch.object(structlog_module, "structlog_processors") as mock_processors:
mock_processors.return_value = ([], None, None)
structlog_module.configure_logging(colors=colors_param)
mock_processors.assert_called_once()
assert mock_processors.call_args.kwargs["colors"] == expected_colors
@pytest.mark.parametrize(
("get_logger", "extra_kwargs", "extra_output"),
[
pytest.param(
structlog.get_logger,
{"key1": "value1"},
f" {CYAN}key1{RESET}={MAGENTA}value1{RESET}",
id="structlog",
),
pytest.param(
logging.getLogger,
{},
"",
id="stdlib",
),
],
)
def test_precent_fmt(structlog_config, get_logger, extra_kwargs, extra_output):
with structlog_config(colors=True, log_format="%(blue)s[%(asctime)s]%(reset)s %(message)s") as sio:
logger = get_logger("my.logger")
logger.info("Hello", **extra_kwargs)
written = sio.getvalue()
print(written)
assert written == f"{BLUE}[1985-10-26T00:00:00.000001Z]{RESET} Hello" + extra_output + "\n"
def test_precent_fmt_force_no_colors(
structlog_config,
):
with structlog_config(
colors=False,
log_format="%(blue)s[%(asctime)s]%(reset)s {%(filename)s:%(lineno)d} %(log_color)s%(levelname)s - %(message)s",
) as sio:
logger = structlog.get_logger("my.logger")
logger.info("Hello", key1="value1")
lineno = sys._getframe().f_lineno - 2
written = sio.getvalue()
assert (
written == f"[1985-10-26T00:00:00.000001Z] {{test_structlog.py:{lineno}}} INFO - Hello key1=value1\n"
)
@pytest.mark.parametrize(
("get_logger", "config_kwargs", "log_kwargs", "expected_kwargs"),
[
pytest.param(
structlog.get_logger,
{},
{"key1": "value1"},
{"key1": "value1"},
id="structlog",
),
pytest.param(
structlog.get_logger,
{"callsite_parameters": [CallsiteParameter.PROCESS]},
{"key1": "value1"},
{"key1": "value1", "process": os.getpid()},
id="structlog-callsite",
),
pytest.param(
logging.getLogger,
{},
{},
{},
id="stdlib",
),
pytest.param(
logging.getLogger,
{"callsite_parameters": [CallsiteParameter.PROCESS]},
{},
{"process": os.getpid()},
id="stdlib-callsite",
),
],
)
def test_json(structlog_config, get_logger, config_kwargs, log_kwargs, expected_kwargs):
with structlog_config(json_output=True, **(config_kwargs or {})) as bio:
logger = get_logger("my.logger")
logger.info("Hello", **log_kwargs)
written = json.load(bio)
assert written == {
"event": "Hello",
"level": "info",
**expected_kwargs,
"logger": "my.logger",
"timestamp": "1985-10-26T00:00:00.000001Z",
}
@pytest.mark.parametrize(
("get_logger"),
[
pytest.param(
structlog.get_logger,
id="structlog",
),
pytest.param(
logging.getLogger,
id="stdlib",
),
],
)
def test_precent_fmt_exc(structlog_config, get_logger, monkeypatch):
monkeypatch.setenv("DEV", "")
with structlog_config(
log_format="%(message)s",
colors=False,
) as sio:
lineno = sys._getframe().f_lineno + 2
try:
1 / 0
except ZeroDivisionError:
get_logger("logger").exception("Error")
written = sio.getvalue()
expected = textwrap.dedent(f"""\
Error
Traceback (most recent call last):
File "{__file__}", line {lineno}, in test_precent_fmt_exc
1 / 0
""")
if PY_3_11:
expected += " ~~^~~\n"
expected += "ZeroDivisionError: division by zero\n"
assert written == expected
@pytest.mark.parametrize(
("get_logger"),
[
pytest.param(
structlog.get_logger,
id="structlog",
),
pytest.param(
logging.getLogger,
id="stdlib",
),
],
)
def test_json_exc(structlog_config, get_logger, monkeypatch):
with structlog_config(json_output=True) as bio:
lineno = sys._getframe().f_lineno + 2
try:
1 / 0
except ZeroDivisionError:
get_logger("logger").exception("Error")
written = bio.getvalue()
written = json.load(bio)
assert written == {
"event": "Error",
"exception": [
{
"exc_notes": [],
"exc_type": "ZeroDivisionError",
"exc_value": "division by zero",
"exceptions": [],
"frames": [
{
"filename": __file__,
"lineno": lineno,
"name": "test_json_exc",
},
],
"is_cause": False,
"is_group": False,
"syntax_error": None,
},
],
"level": "error",
"logger": "logger",
"timestamp": "1985-10-26T00:00:00.000001Z",
}
@pytest.mark.parametrize(
"levels",
(
pytest.param("my.logger=warn", id="str"),
pytest.param({"my.logger": "warn"}, id="dict"),
),
)
def test_logger_filtering(structlog_config, levels):
with structlog_config(
colors=False,
log_format="[%(name)s] %(message)s",
log_level="DEBUG",
namespace_log_levels=levels,
) as sio:
structlog.get_logger("my").info("Hello", key1="value1")
structlog.get_logger("my.logger").info("Hello", key1="value2")
structlog.get_logger("my.logger.sub").info("Hello", key1="value3")
structlog.get_logger("other.logger").info("Hello", key1="value4")
structlog.get_logger("my.logger.sub").warning("Hello", key1="value5")
written = sio.getvalue()
assert written == textwrap.dedent("""\
[my] Hello key1=value1
[other.logger] Hello key1=value4
[my.logger.sub] Hello key1=value5
""")
def test_logger_respects_configured_level(structlog_config):
with structlog_config(
colors=False,
log_format="[%(name)s] %(message)s",
log_level="DEBUG",
) as sio:
my_logger = logging.getLogger("my_logger")
my_logger.debug("Debug message")
written = sio.getvalue()
assert "[my_logger] Debug message\n" in written
| {
"repo_id": "apache/airflow",
"file_path": "shared/logging/tests/logging/test_structlog.py",
"license": "Apache License 2.0",
"lines": 337,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/fab/src/airflow/providers/fab/auth_manager/cli_commands/permissions_command.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Permissions cleanup command."""
from __future__ import annotations
import logging
from typing import TYPE_CHECKING
from airflow.utils import cli as cli_utils
from airflow.utils.providers_configuration_loader import providers_configuration_loaded
from airflow.utils.session import NEW_SESSION, provide_session
from airflow.utils.strings import to_boolean
if TYPE_CHECKING:
from sqlalchemy.orm import Session
log = logging.getLogger(__name__)
@provide_session
def cleanup_dag_permissions(dag_id: str, session: Session = NEW_SESSION) -> None:
"""
Clean up DAG-specific permissions from Flask-AppBuilder tables.
When a DAG is deleted, we need to clean up the corresponding permissions
to prevent orphaned entries in the ab_view_menu table.
This addresses issue #50905: Deleted DAGs not removed from ab_view_menu table
and show up in permissions.
:param dag_id: Specific DAG ID to clean up.
:param session: Database session.
"""
from sqlalchemy import delete, select
from airflow.providers.fab.auth_manager.models import Permission, Resource, assoc_permission_role
from airflow.providers.fab.www.security.permissions import (
RESOURCE_DAG_PREFIX,
RESOURCE_DAG_RUN,
RESOURCE_DETAILS_MAP,
)
# Clean up specific DAG permissions
dag_resources = session.scalars(
select(Resource).filter(
Resource.name.in_(
[
f"{RESOURCE_DAG_PREFIX}{dag_id}", # DAG:dag_id
f"{RESOURCE_DETAILS_MAP[RESOURCE_DAG_RUN]['prefix']}{dag_id}", # DAG_RUN:dag_id
]
)
)
).all()
log.info("Cleaning up DAG-specific permissions for dag_id: %s", dag_id)
if not dag_resources:
return
dag_resource_ids = [resource.id for resource in dag_resources]
# Find all permissions associated with these resources
dag_permissions = session.scalars(
select(Permission).filter(Permission.resource_id.in_(dag_resource_ids))
).all()
if not dag_permissions:
# Delete resources even if no permissions exist
session.execute(delete(Resource).where(Resource.id.in_(dag_resource_ids)))
return
dag_permission_ids = [permission.id for permission in dag_permissions]
# Delete permission-role associations first (foreign key constraint)
session.execute(
delete(assoc_permission_role).where(
assoc_permission_role.c.permission_view_id.in_(dag_permission_ids)
)
)
# Delete permissions
session.execute(delete(Permission).where(Permission.resource_id.in_(dag_resource_ids)))
# Delete resources (ab_view_menu entries)
session.execute(delete(Resource).where(Resource.id.in_(dag_resource_ids)))
log.info("Cleaned up %d DAG-specific permissions", len(dag_permissions))
@cli_utils.action_cli
@providers_configuration_loaded
def permissions_cleanup(args):
"""Clean up DAG permissions in Flask-AppBuilder tables."""
from sqlalchemy import select
from airflow.models import DagModel
from airflow.providers.fab.auth_manager.cli_commands.utils import get_application_builder
from airflow.providers.fab.auth_manager.models import Resource
from airflow.providers.fab.www.security.permissions import (
RESOURCE_DAG_PREFIX,
RESOURCE_DAG_RUN,
RESOURCE_DETAILS_MAP,
)
from airflow.utils.session import create_session
with get_application_builder() as _:
with create_session() as session:
# Get all existing DAG IDs from DagModel
existing_dag_ids = {dag.dag_id for dag in session.scalars(select(DagModel)).all()}
# Get all DAG-related resources from FAB tables
dag_resources = session.scalars(
select(Resource).filter(
Resource.name.like(f"{RESOURCE_DAG_PREFIX}%")
| Resource.name.like(f"{RESOURCE_DETAILS_MAP[RESOURCE_DAG_RUN]['prefix']}%")
)
).all()
orphaned_resources = []
orphaned_dag_ids = set()
for resource in dag_resources:
# Extract DAG ID from resource name
dag_id = None
if resource.name.startswith(RESOURCE_DAG_PREFIX):
dag_id = resource.name[len(RESOURCE_DAG_PREFIX) :]
elif resource.name.startswith(RESOURCE_DETAILS_MAP[RESOURCE_DAG_RUN]["prefix"]):
dag_id = resource.name[len(RESOURCE_DETAILS_MAP[RESOURCE_DAG_RUN]["prefix"]) :]
# Check if this DAG ID still exists
if dag_id and dag_id not in existing_dag_ids:
orphaned_resources.append(resource)
orphaned_dag_ids.add(dag_id)
# Filter by specific DAG ID if provided
if args.dag_id:
if args.dag_id in orphaned_dag_ids:
orphaned_dag_ids = {args.dag_id}
print(f"Filtering to clean up permissions for DAG: {args.dag_id}")
else:
print(
f"DAG '{args.dag_id}' not found in orphaned permissions or still exists in database."
)
return
if not orphaned_dag_ids:
if args.dag_id:
print(f"No orphaned permissions found for DAG: {args.dag_id}")
else:
print("No orphaned DAG permissions found.")
return
print(f"Found orphaned permissions for {len(orphaned_dag_ids)} deleted DAG(s):")
for dag_id in sorted(orphaned_dag_ids):
print(f" - {dag_id}")
if args.dry_run:
print("\nDry run mode: No changes will be made.")
print(f"Would clean up permissions for {len(orphaned_dag_ids)} orphaned DAG(s).")
return
# Perform cleanup if not in dry run mode
if not args.yes:
action = (
f"clean up permissions for {len(orphaned_dag_ids)} DAG(s)"
if not args.dag_id
else f"clean up permissions for DAG '{args.dag_id}'"
)
confirm = input(f"\nDo you want to {action}? [y/N]: ")
if not to_boolean(confirm):
print("Cleanup cancelled.")
return
# Perform the actual cleanup
cleanup_count = 0
for dag_id in orphaned_dag_ids:
try:
cleanup_dag_permissions(dag_id, session)
cleanup_count += 1
if args.verbose:
print(f"Cleaned up permissions for DAG: {dag_id}")
except Exception as e:
print(f"Failed to clean up permissions for DAG {dag_id}: {e}")
print(f"\nSuccessfully cleaned up permissions for {cleanup_count} DAG(s).")
| {
"repo_id": "apache/airflow",
"file_path": "providers/fab/src/airflow/providers/fab/auth_manager/cli_commands/permissions_command.py",
"license": "Apache License 2.0",
"lines": 164,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/fab/tests/unit/fab/auth_manager/cli_commands/test_permissions_command.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Test permissions command."""
from __future__ import annotations
import argparse
from contextlib import redirect_stdout
from importlib import reload
from io import StringIO
from unittest.mock import MagicMock, patch
import pytest
from airflow.cli import cli_parser
from airflow.providers.fab.auth_manager.cli_commands import permissions_command
from airflow.providers.fab.auth_manager.cli_commands.utils import get_application_builder
from tests_common.test_utils.config import conf_vars
pytestmark = pytest.mark.db_test
class TestPermissionsCommand:
"""Test permissions cleanup CLI commands."""
@pytest.fixture(autouse=True)
def _set_attrs(self):
with conf_vars(
{
(
"core",
"auth_manager",
): "airflow.providers.fab.auth_manager.fab_auth_manager.FabAuthManager",
}
):
# Reload the module to use FAB auth manager
reload(cli_parser)
# Clearing the cache before calling it
cli_parser.get_parser.cache_clear()
self.parser = cli_parser.get_parser()
with get_application_builder() as appbuilder:
self.appbuilder = appbuilder
yield
@patch("airflow.providers.fab.auth_manager.cli_commands.permissions_command.cleanup_dag_permissions")
@patch("airflow.providers.fab.auth_manager.models.Resource")
def test_permissions_cleanup_success(self, mock_resource, mock_cleanup_dag_permissions):
"""Test successful cleanup of DAG permissions."""
# Mock args
args = argparse.Namespace()
args.dag_id = None
args.dry_run = False
args.yes = True
args.verbose = True
# Mock orphaned resources
mock_orphaned_resource = MagicMock()
mock_orphaned_resource.name = "DAG:orphaned_dag"
with (
patch("airflow.providers.fab.auth_manager.cli_commands.utils.get_application_builder"),
patch("airflow.utils.session.create_session") as mock_session_ctx,
patch("sqlalchemy.select"),
redirect_stdout(StringIO()),
):
mock_session = MagicMock()
mock_session_ctx.return_value.__enter__.return_value = mock_session
# Mock DagModel query - return existing DAGs
mock_dag_result = MagicMock()
mock_dag_result.all.return_value = [MagicMock(dag_id="existing_dag")]
# Mock Resource query - return orphaned resources
mock_resource_result = MagicMock()
mock_resource_result.all.return_value = [mock_orphaned_resource]
# Setup session.scalars to return different results for different queries
mock_session.scalars.side_effect = [mock_dag_result, mock_resource_result]
permissions_command.permissions_cleanup(args)
# Verify function calls - it should be called exactly once for the orphaned DAG
mock_cleanup_dag_permissions.assert_called_once_with("orphaned_dag", mock_session)
@patch("airflow.providers.fab.auth_manager.cli_commands.permissions_command.cleanup_dag_permissions")
@patch("airflow.providers.fab.auth_manager.models.Resource")
def test_permissions_cleanup_dry_run(self, mock_resource, mock_cleanup_dag_permissions):
"""Test dry run mode for permissions cleanup."""
# Mock args
args = argparse.Namespace()
args.dag_id = None
args.dry_run = True
args.verbose = True
# Mock orphaned resources
mock_orphaned_resource = MagicMock()
mock_orphaned_resource.name = "DAG:orphaned_dag"
with (
patch("airflow.providers.fab.auth_manager.cli_commands.utils.get_application_builder"),
patch("airflow.utils.session.create_session") as mock_session_ctx,
patch("sqlalchemy.select"),
redirect_stdout(StringIO()) as stdout,
):
mock_session = MagicMock()
mock_session_ctx.return_value.__enter__.return_value = mock_session
# Mock DagModel query - return existing DAGs
mock_dag_result = MagicMock()
mock_dag_result.all.return_value = [MagicMock(dag_id="existing_dag")]
# Mock Resource query - return orphaned resources
mock_resource_result = MagicMock()
mock_resource_result.all.return_value = [mock_orphaned_resource]
# Setup session.scalars to return different results for different queries
mock_session.scalars.side_effect = [mock_dag_result, mock_resource_result]
permissions_command.permissions_cleanup(args)
output = stdout.getvalue()
assert "Dry run mode" in output or "No orphaned DAG permissions found" in output
# In dry run mode, cleanup_dag_permissions should NOT be called
mock_cleanup_dag_permissions.assert_not_called()
@patch("airflow.providers.fab.auth_manager.cli_commands.permissions_command.cleanup_dag_permissions")
@patch("airflow.providers.fab.auth_manager.models.Resource")
def test_permissions_cleanup_specific_dag(self, mock_resource, mock_cleanup_dag_permissions):
"""Test cleanup for a specific DAG."""
# Mock args
args = argparse.Namespace()
args.dag_id = "test_dag"
args.dry_run = False
args.yes = True
args.verbose = True
# Mock orphaned resource for the specific DAG
mock_orphaned_resource = MagicMock()
mock_orphaned_resource.name = "DAG:test_dag"
with (
patch("airflow.providers.fab.auth_manager.cli_commands.utils.get_application_builder"),
patch("airflow.utils.session.create_session") as mock_session_ctx,
patch("sqlalchemy.select"),
redirect_stdout(StringIO()),
):
mock_session = MagicMock()
mock_session_ctx.return_value.__enter__.return_value = mock_session
# Mock DagModel query - return existing DAGs (NOT including the target DAG)
mock_dag_result = MagicMock()
mock_dag_result.all.return_value = [
MagicMock(dag_id="existing_dag"),
MagicMock(dag_id="another_existing_dag"),
]
# Mock Resource query - return orphaned resources
mock_resource_result = MagicMock()
mock_resource_result.all.return_value = [mock_orphaned_resource]
# Setup session.scalars to return different results for different queries
mock_session.scalars.side_effect = [mock_dag_result, mock_resource_result]
permissions_command.permissions_cleanup(args)
# Should call cleanup_dag_permissions specifically for test_dag
mock_cleanup_dag_permissions.assert_called_once_with("test_dag", mock_session)
@patch("airflow.providers.fab.auth_manager.cli_commands.permissions_command.cleanup_dag_permissions")
@patch("airflow.providers.fab.auth_manager.models.Resource")
@patch("builtins.input", return_value="n")
def test_permissions_cleanup_no_confirmation(
self, mock_input, mock_resource, mock_cleanup_dag_permissions
):
"""Test cleanup cancellation when user doesn't confirm."""
# Mock args
args = argparse.Namespace()
args.dag_id = None
args.dry_run = False
args.yes = False
args.verbose = False
# Mock orphaned resources
mock_orphaned_resource = MagicMock()
mock_orphaned_resource.name = "DAG:orphaned_dag"
with (
patch("airflow.providers.fab.auth_manager.cli_commands.utils.get_application_builder"),
patch("airflow.utils.session.create_session") as mock_session_ctx,
patch("sqlalchemy.select"),
redirect_stdout(StringIO()) as stdout,
):
mock_session = MagicMock()
mock_session_ctx.return_value.__enter__.return_value = mock_session
# Mock DagModel query - return existing DAGs
mock_dag_result = MagicMock()
mock_dag_result.all.return_value = [MagicMock(dag_id="existing_dag")]
# Mock Resource query - return orphaned resources
mock_resource_result = MagicMock()
mock_resource_result.all.return_value = [mock_orphaned_resource]
# Setup session.scalars to return different results for different queries
mock_session.scalars.side_effect = [mock_dag_result, mock_resource_result]
permissions_command.permissions_cleanup(args)
output = stdout.getvalue()
# Should not call cleanup if user declines or no orphaned permissions found
assert "Cleanup cancelled" in output or "No orphaned DAG permissions found" in output
# cleanup_dag_permissions should NOT be called when user cancels
if "Cleanup cancelled" in output:
mock_cleanup_dag_permissions.assert_not_called()
class TestDagPermissions:
"""Test cases for cleanup_dag_permissions function with real database operations."""
@pytest.fixture(autouse=True)
def _setup_fab_test(self):
"""Setup FAB for testing."""
with conf_vars(
{
(
"core",
"auth_manager",
): "airflow.providers.fab.auth_manager.fab_auth_manager.FabAuthManager",
}
):
with get_application_builder():
yield
def test_cleanup_dag_permissions_removes_specific_dag_resources(self):
"""Test that cleanup_dag_permissions removes only the specified DAG resources."""
from sqlalchemy import select
from airflow.providers.fab.auth_manager.cli_commands.permissions_command import (
cleanup_dag_permissions,
)
from airflow.providers.fab.auth_manager.models import Action, Permission, Resource
from airflow.providers.fab.www.security.permissions import RESOURCE_DAG_PREFIX
from airflow.utils.session import create_session
with create_session() as session:
# Create resources for two different DAGs
target_resource = Resource(name=f"{RESOURCE_DAG_PREFIX}target_dag")
keep_resource = Resource(name=f"{RESOURCE_DAG_PREFIX}keep_dag")
session.add_all([target_resource, keep_resource])
session.flush()
# Get or create action
read_action = session.scalars(select(Action).where(Action.name == "can_read")).first()
if not read_action:
read_action = Action(name="can_read")
session.add(read_action)
session.flush()
# Create permissions
target_perm = Permission(action=read_action, resource=target_resource)
keep_perm = Permission(action=read_action, resource=keep_resource)
session.add_all([target_perm, keep_perm])
session.commit()
# Execute cleanup
cleanup_dag_permissions("target_dag", session)
# Verify: target resource deleted, keep resource remains
assert not session.get(Resource, target_resource.id)
assert session.get(Resource, keep_resource.id)
assert not session.get(Permission, target_perm.id)
assert session.get(Permission, keep_perm.id)
def test_cleanup_dag_permissions_handles_no_matching_resources(self):
"""Test that cleanup_dag_permissions handles DAGs with no matching resources gracefully."""
from sqlalchemy import func, select
from airflow.providers.fab.auth_manager.cli_commands.permissions_command import (
cleanup_dag_permissions,
)
from airflow.providers.fab.auth_manager.models import Resource
from airflow.utils.session import create_session
with create_session() as session:
initial_count = session.scalar(select(func.count(Resource.id)))
cleanup_dag_permissions("non_existent_dag", session)
assert session.scalar(select(func.count(Resource.id))) == initial_count
def test_cleanup_dag_permissions_handles_resources_without_permissions(self):
"""Test cleanup when resources exist but have no permissions."""
from airflow.providers.fab.auth_manager.cli_commands.permissions_command import (
cleanup_dag_permissions,
)
from airflow.providers.fab.auth_manager.models import Resource
from airflow.providers.fab.www.security.permissions import RESOURCE_DAG_PREFIX
from airflow.utils.session import create_session
with create_session() as session:
# Create resource without permissions
resource = Resource(name=f"{RESOURCE_DAG_PREFIX}test_dag")
session.add(resource)
session.commit()
resource_id = resource.id
cleanup_dag_permissions("test_dag", session)
assert not session.get(Resource, resource_id)
def test_cleanup_dag_permissions_with_default_session(self):
"""Test cleanup_dag_permissions when no session is provided (uses default)."""
from sqlalchemy import func, select
from airflow.providers.fab.auth_manager.cli_commands.permissions_command import (
cleanup_dag_permissions,
)
from airflow.providers.fab.auth_manager.models import Resource
from airflow.providers.fab.www.security.permissions import RESOURCE_DAG_PREFIX
from airflow.utils.session import create_session
# Setup test data
with create_session() as session:
resource = Resource(name=f"{RESOURCE_DAG_PREFIX}test_dag")
session.add(resource)
session.commit()
# Call cleanup without session parameter
cleanup_dag_permissions("test_dag")
# Verify deletion
with create_session() as session:
count = session.scalar(
select(func.count(Resource.id)).where(Resource.name == f"{RESOURCE_DAG_PREFIX}test_dag")
)
assert count == 0
| {
"repo_id": "apache/airflow",
"file_path": "providers/fab/tests/unit/fab/auth_manager/cli_commands/test_permissions_command.py",
"license": "Apache License 2.0",
"lines": 287,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-core/src/airflow/serialization/definitions/taskgroup.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import copy
import functools
import operator
import weakref
from collections import deque
from typing import TYPE_CHECKING
import attrs
import methodtools
from airflow.serialization.definitions.node import DAGNode
if TYPE_CHECKING:
from collections.abc import Generator, Iterator
from typing import Any, ClassVar
from airflow.models.expandinput import SchedulerExpandInput
from airflow.serialization.definitions.dag import SerializedDAG, SerializedOperator
@attrs.define(eq=False, hash=False, kw_only=True)
class SerializedTaskGroup(DAGNode):
"""Serialized representation of a TaskGroup used in protected processes."""
_group_id: str | None = attrs.field(alias="group_id")
group_display_name: str | None = attrs.field()
prefix_group_id: bool = attrs.field()
parent_group: SerializedTaskGroup | None = attrs.field()
dag: SerializedDAG = attrs.field()
tooltip: str = attrs.field()
default_args: dict[str, Any] = attrs.field(factory=dict)
# TODO: Are these actually useful?
ui_color: str = attrs.field(default="CornflowerBlue")
ui_fgcolor: str = attrs.field(default="#000")
children: dict[str, DAGNode] = attrs.field(factory=dict, init=False)
upstream_group_ids: set[str | None] = attrs.field(factory=set, init=False)
downstream_group_ids: set[str | None] = attrs.field(factory=set, init=False)
upstream_task_ids: set[str] = attrs.field(factory=set, init=False)
downstream_task_ids: set[str] = attrs.field(factory=set, init=False)
is_mapped: ClassVar[bool] = False
def __repr__(self) -> str:
return f"<SerializedTaskGroup: {self.group_id}>"
@staticmethod
def _iter_child(child):
"""Iterate over the children of this TaskGroup."""
if isinstance(child, SerializedTaskGroup):
yield from child
else:
yield child
def __iter__(self):
for child in self.children.values():
yield from self._iter_child(child)
@property
def group_id(self) -> str | None:
if (
self._group_id
and self.parent_group
and self.parent_group.prefix_group_id
and self.parent_group._group_id
):
return self.parent_group.child_id(self._group_id)
return self._group_id
@property
def label(self) -> str:
"""group_id excluding parent's group_id used as the node label in UI."""
return self.group_display_name or self._group_id or ""
@property
def node_id(self) -> str:
return self.group_id or ""
@property
def is_root(self) -> bool:
return not self._group_id
# TODO (GH-52141): This shouldn't need to be writable after serialization,
# but DAGNode defines the property as writable.
@property
def task_group(self) -> SerializedTaskGroup | None: # type: ignore[override]
return self.parent_group
def child_id(self, label: str) -> str:
if self.prefix_group_id and (group_id := self.group_id):
return f"{group_id}.{label}"
return label
@property
def upstream_join_id(self) -> str:
return f"{self.group_id}.upstream_join_id"
@property
def downstream_join_id(self) -> str:
return f"{self.group_id}.downstream_join_id"
@property
def roots(self) -> list[DAGNode]:
return list(self.get_roots())
@property
def leaves(self) -> list[DAGNode]:
return list(self.get_leaves())
def get_roots(self) -> Generator[SerializedOperator, None, None]:
"""Return a generator of tasks with no upstream dependencies within the TaskGroup."""
tasks = list(self)
ids = {x.task_id for x in tasks}
for task in tasks:
if task.upstream_task_ids.isdisjoint(ids):
yield task
def get_leaves(self) -> Generator[SerializedOperator, None, None]:
"""Return a generator of tasks with no downstream dependencies within the TaskGroup."""
tasks = list(self)
ids = {x.task_id for x in tasks}
def has_non_teardown_downstream(task, exclude: str):
for down_task in task.downstream_list:
if down_task.task_id == exclude:
continue
if down_task.task_id not in ids:
continue
if not down_task.is_teardown:
return True
return False
def recurse_for_first_non_teardown(task):
for upstream_task in task.upstream_list:
if upstream_task.task_id not in ids:
# upstream task is not in task group
continue
elif upstream_task.is_teardown:
yield from recurse_for_first_non_teardown(upstream_task)
elif task.is_teardown and upstream_task.is_setup:
# don't go through the teardown-to-setup path
continue
# return unless upstream task already has non-teardown downstream in group
elif not has_non_teardown_downstream(upstream_task, exclude=task.task_id):
yield upstream_task
for task in tasks:
if task.downstream_task_ids.isdisjoint(ids):
if not task.is_teardown:
yield task
else:
yield from recurse_for_first_non_teardown(task)
def get_task_group_dict(self) -> dict[str | None, SerializedTaskGroup]:
"""Create a flat dict of group_id: TaskGroup."""
def build_map(node: DAGNode) -> Generator[tuple[str | None, SerializedTaskGroup]]:
if not isinstance(node, SerializedTaskGroup):
return
yield node.group_id, node
for child in node.children.values():
yield from build_map(child)
return dict(build_map(self))
def iter_tasks(self) -> Iterator[SerializedOperator]:
"""Return an iterator of the child tasks."""
from airflow.serialization.definitions.baseoperator import SerializedBaseOperator
from airflow.serialization.definitions.mappedoperator import SerializedMappedOperator
groups_to_visit = deque([self])
while groups_to_visit:
for child in groups_to_visit.popleft().children.values():
if isinstance(child, (SerializedMappedOperator, SerializedBaseOperator)):
yield child
elif isinstance(child, SerializedTaskGroup):
groups_to_visit.append(child)
else:
raise ValueError(
f"Encountered a DAGNode that is not a task or task "
f"group: {type(child).__module__}.{type(child)}"
)
def iter_mapped_task_groups(self) -> Iterator[SerializedMappedTaskGroup]:
"""
Find mapped task groups in the hierarchy.
Groups are returned from the closest to the outmost. If *self* is a
mapped task group, it is returned first.
"""
group: SerializedTaskGroup | None = self
while group is not None:
if isinstance(group, SerializedMappedTaskGroup):
yield group
group = group.parent_group
def topological_sort(self) -> list[DAGNode]:
"""
Sorts children in topographical order.
A task in the result would come after any of its upstream dependencies.
"""
# This uses a modified version of Kahn's Topological Sort algorithm to
# not have to pre-compute the "in-degree" of the nodes.
graph_unsorted = copy.copy(self.children)
graph_sorted: list[DAGNode] = []
if not self.children:
return graph_sorted
while graph_unsorted:
for node in list(graph_unsorted.values()):
for edge in node.upstream_list:
if edge.node_id in graph_unsorted:
break
# Check for task's group is a child (or grand child) of this TG,
tg = edge.task_group
while tg:
if tg.node_id in graph_unsorted:
break
tg = tg.parent_group
if tg:
# We are already going to visit that TG
break
else:
del graph_unsorted[node.node_id]
graph_sorted.append(node)
return graph_sorted
def add(self, node: DAGNode) -> DAGNode:
# Set the TG first, as setting it might change the return value of node_id!
node.task_group = weakref.proxy(self)
if isinstance(node, SerializedTaskGroup):
if self.dag:
node.dag = self.dag
self.children[node.node_id] = node
return node
@attrs.define(kw_only=True, repr=False)
class SerializedMappedTaskGroup(SerializedTaskGroup):
"""Serialized representation of a MappedTaskGroup used in protected processes."""
_expand_input: SchedulerExpandInput = attrs.field(alias="expand_input")
is_mapped: ClassVar[bool] = True
def __repr__(self) -> str:
return f"<SerializedMappedTaskGroup: {self.group_id}>"
@methodtools.lru_cache(maxsize=None)
def get_parse_time_mapped_ti_count(self) -> int:
"""
Return the number of instances a task in this group should be mapped to.
This only considers literal mapped arguments, and would return *None*
when any non-literal values are used for mapping.
If this group is inside mapped task groups, all the nested counts are
multiplied and accounted.
:raise NotFullyPopulated: If any non-literal mapped arguments are encountered.
:return: The total number of mapped instances each task should have.
"""
return functools.reduce(
operator.mul,
(g._expand_input.get_parse_time_mapped_ti_count() for g in self.iter_mapped_task_groups()),
)
def iter_mapped_dependencies(self) -> Iterator[SerializedOperator]:
"""Upstream dependencies that provide XComs used by this mapped task group."""
from airflow.serialization.definitions.xcom_arg import SchedulerXComArg
for op, _ in SchedulerXComArg.iter_xcom_references(self._expand_input):
yield op
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/serialization/definitions/taskgroup.py",
"license": "Apache License 2.0",
"lines": 243,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/edge3/src/airflow/providers/edge3/worker_api/datamodels_ui.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from datetime import datetime
from typing import (
Annotated,
)
from pydantic import Field
from airflow.api_fastapi.core_api.base import BaseModel
from airflow.providers.edge3.worker_api.datamodels import EdgeJobBase, WorkerStateBody
from airflow.utils.state import TaskInstanceState # noqa: TC001
class Worker(WorkerStateBody):
"""Details of the worker state sent to the scheduler."""
worker_name: Annotated[str, Field(description="Name of the worker.")]
first_online: Annotated[datetime | None, Field(description="When the worker was first online.")] = None
last_heartbeat: Annotated[
datetime | None, Field(description="When the worker last sent a heartbeat.")
] = None
class WorkerCollectionResponse(BaseModel):
"""Worker Collection serializer."""
workers: list[Worker]
total_entries: int
class Job(EdgeJobBase):
"""Details of the job sent to the scheduler."""
state: Annotated[TaskInstanceState, Field(description="State of the job from the view of the executor.")]
queue: Annotated[
str,
Field(description="Queue for which the task is scheduled/running."),
]
queued_dttm: Annotated[datetime | None, Field(description="When the job was queued.")] = None
edge_worker: Annotated[
str | None, Field(description="The worker processing the job during execution.")
] = None
last_update: Annotated[datetime | None, Field(description="Last heartbeat of the job.")] = None
class JobCollectionResponse(BaseModel):
"""Job Collection serializer."""
jobs: list[Job]
total_entries: int
class MaintenanceRequest(BaseModel):
"""Request body for maintenance operations."""
maintenance_comment: Annotated[str, Field(description="Comment describing the maintenance reason.")]
class QueueUpdateRequest(BaseModel):
"""Request body for queue operations."""
queue_name: Annotated[str, Field(description="Name of the queue to add or remove.")]
| {
"repo_id": "apache/airflow",
"file_path": "providers/edge3/src/airflow/providers/edge3/worker_api/datamodels_ui.py",
"license": "Apache License 2.0",
"lines": 58,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/edge3/src/airflow/providers/edge3/worker_api/routes/ui.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from datetime import datetime
from typing import TYPE_CHECKING, Annotated
from fastapi import Depends, HTTPException, Query, status
from sqlalchemy import select
from airflow.api_fastapi.auth.managers.models.resource_details import AccessView
from airflow.api_fastapi.common.db.common import SessionDep # noqa: TC001
from airflow.api_fastapi.common.router import AirflowRouter
from airflow.api_fastapi.core_api.security import GetUserDep, requires_access_view
from airflow.providers.edge3.models.edge_job import EdgeJobModel
from airflow.providers.edge3.models.edge_worker import (
EdgeWorkerModel,
EdgeWorkerState,
add_worker_queues,
change_maintenance_comment,
exit_maintenance,
remove_worker,
remove_worker_queues,
request_maintenance,
request_shutdown,
)
from airflow.providers.edge3.worker_api.datamodels_ui import (
Job,
JobCollectionResponse,
MaintenanceRequest,
Worker,
WorkerCollectionResponse,
)
from airflow.utils.state import TaskInstanceState
if TYPE_CHECKING:
from sqlalchemy.engine import ScalarResult
ui_router = AirflowRouter(tags=["UI"])
@ui_router.get(
"/worker",
dependencies=[
Depends(requires_access_view(access_view=AccessView.JOBS)),
],
)
def worker(
session: SessionDep,
worker_name_pattern: str | None = None,
queue_name_pattern: str | None = None,
state: Annotated[list[EdgeWorkerState] | None, Query()] = None,
) -> WorkerCollectionResponse:
"""Return Edge Workers."""
query = select(EdgeWorkerModel)
if worker_name_pattern:
query = query.where(EdgeWorkerModel.worker_name.ilike(f"%{worker_name_pattern}%"))
if queue_name_pattern:
query = query.where(EdgeWorkerModel._queues.ilike(f"%'{queue_name_pattern}%"))
if state:
query = query.where(EdgeWorkerModel.state.in_(state))
query = query.order_by(EdgeWorkerModel.worker_name)
workers: ScalarResult[EdgeWorkerModel] = session.scalars(query)
result = [
Worker(
worker_name=w.worker_name,
queues=w.queues,
state=w.state,
jobs_active=w.jobs_active,
sysinfo=w.sysinfo_json or {},
maintenance_comments=w.maintenance_comment,
first_online=w.first_online,
last_heartbeat=w.last_update,
)
for w in workers
]
return WorkerCollectionResponse(
workers=result,
total_entries=len(result),
)
@ui_router.get(
"/jobs",
dependencies=[
Depends(requires_access_view(access_view=AccessView.JOBS)),
],
)
def jobs(
session: SessionDep,
dag_id_pattern: str | None = None,
run_id_pattern: str | None = None,
task_id_pattern: str | None = None,
state: Annotated[list[TaskInstanceState] | None, Query()] = None,
queue_pattern: str | None = None,
worker_name_pattern: str | None = None,
) -> JobCollectionResponse:
"""Return Edge Jobs."""
query = select(EdgeJobModel)
if dag_id_pattern:
query = query.where(EdgeJobModel.dag_id.ilike(f"%{dag_id_pattern}%"))
if run_id_pattern:
query = query.where(EdgeJobModel.run_id.ilike(f"%{run_id_pattern}%"))
if task_id_pattern:
query = query.where(EdgeJobModel.task_id.ilike(f"%{task_id_pattern}%"))
if state:
query = query.where(EdgeJobModel.state.in_([s.value for s in state]))
if queue_pattern:
query = query.where(EdgeJobModel.queue.ilike(f"%{queue_pattern}%"))
if worker_name_pattern:
query = query.where(EdgeJobModel.edge_worker.ilike(f"%{worker_name_pattern}%"))
query = query.order_by(EdgeJobModel.queued_dttm)
jobs: ScalarResult[EdgeJobModel] = session.scalars(query)
result = [
Job(
dag_id=j.dag_id,
task_id=j.task_id,
run_id=j.run_id,
map_index=j.map_index,
try_number=j.try_number,
state=TaskInstanceState(j.state),
queue=j.queue,
queued_dttm=j.queued_dttm,
edge_worker=j.edge_worker,
last_update=j.last_update,
)
for j in jobs
]
return JobCollectionResponse(
jobs=result,
total_entries=len(result),
)
@ui_router.post(
"/worker/{worker_name}/maintenance",
dependencies=[
Depends(requires_access_view(access_view=AccessView.JOBS)),
],
)
def request_worker_maintenance(
worker_name: str,
maintenance_request: MaintenanceRequest,
session: SessionDep,
user: GetUserDep,
) -> None:
"""Put a worker into maintenance mode."""
# Check if worker exists first
worker_query = select(EdgeWorkerModel).where(EdgeWorkerModel.worker_name == worker_name)
worker = session.scalar(worker_query)
if not worker:
raise HTTPException(status.HTTP_404_NOT_FOUND, detail=f"Worker {worker_name} not found")
if not maintenance_request.maintenance_comment:
raise HTTPException(status.HTTP_400_BAD_REQUEST, detail="Maintenance comment is required")
# Format the comment with timestamp and username (username will be added by plugin layer)
formatted_comment = f"[{datetime.now().strftime('%Y-%m-%d %H:%M')}] - {user.get_name()} put node into maintenance mode\nComment: {maintenance_request.maintenance_comment}"
try:
request_maintenance(worker_name, formatted_comment, session=session)
except Exception as e:
raise HTTPException(status.HTTP_400_BAD_REQUEST, detail=str(e))
@ui_router.patch(
"/worker/{worker_name}/maintenance",
dependencies=[
Depends(requires_access_view(access_view=AccessView.JOBS)),
],
)
def update_worker_maintenance(
worker_name: str,
maintenance_request: MaintenanceRequest,
session: SessionDep,
user: GetUserDep,
) -> None:
"""Update maintenance comments for a worker."""
# Check if worker exists first
worker_query = select(EdgeWorkerModel).where(EdgeWorkerModel.worker_name == worker_name)
worker = session.scalar(worker_query)
if not worker:
raise HTTPException(status.HTTP_404_NOT_FOUND, detail=f"Worker {worker_name} not found")
if not maintenance_request.maintenance_comment:
raise HTTPException(status.HTTP_400_BAD_REQUEST, detail="Maintenance comment is required")
# Format the comment with timestamp and username (username will be added by plugin layer)
first_line = worker.maintenance_comment.split("\n", 1)[0] if worker.maintenance_comment else ""
formatted_comment = f"{first_line}\n[{datetime.now().strftime('%Y-%m-%d %H:%M')}] - {user.get_name()} updated comment:\n{maintenance_request.maintenance_comment}"
try:
change_maintenance_comment(worker_name, formatted_comment, session=session)
except Exception as e:
raise HTTPException(status.HTTP_400_BAD_REQUEST, detail=str(e))
@ui_router.delete(
"/worker/{worker_name}/maintenance",
dependencies=[
Depends(requires_access_view(access_view=AccessView.JOBS)),
],
)
def exit_worker_maintenance(
worker_name: str,
session: SessionDep,
) -> None:
"""Exit a worker from maintenance mode."""
# Check if worker exists first
worker_query = select(EdgeWorkerModel).where(EdgeWorkerModel.worker_name == worker_name)
worker = session.scalar(worker_query)
if not worker:
raise HTTPException(status.HTTP_404_NOT_FOUND, detail=f"Worker {worker_name} not found")
try:
exit_maintenance(worker_name, session=session)
except Exception as e:
raise HTTPException(status.HTTP_400_BAD_REQUEST, detail=str(e))
@ui_router.post(
"/worker/{worker_name}/shutdown",
dependencies=[
Depends(requires_access_view(access_view=AccessView.JOBS)),
],
)
def request_worker_shutdown(
worker_name: str,
session: SessionDep,
) -> None:
"""Request shutdown of a worker."""
# Check if worker exists first
worker_query = select(EdgeWorkerModel).where(EdgeWorkerModel.worker_name == worker_name)
worker = session.scalar(worker_query)
if not worker:
raise HTTPException(status.HTTP_404_NOT_FOUND, detail=f"Worker {worker_name} not found")
try:
request_shutdown(worker_name, session=session)
except Exception as e:
raise HTTPException(status.HTTP_400_BAD_REQUEST, detail=str(e))
@ui_router.delete(
"/worker/{worker_name}",
dependencies=[
Depends(requires_access_view(access_view=AccessView.JOBS)),
],
)
def delete_worker(
worker_name: str,
session: SessionDep,
) -> None:
"""Delete a worker record from the system."""
# Check if worker exists first
worker_query = select(EdgeWorkerModel).where(EdgeWorkerModel.worker_name == worker_name)
worker = session.scalar(worker_query)
if not worker:
raise HTTPException(status.HTTP_404_NOT_FOUND, detail=f"Worker {worker_name} not found")
try:
remove_worker(worker_name, session=session)
except Exception as e:
raise HTTPException(status.HTTP_400_BAD_REQUEST, detail=str(e))
@ui_router.put(
"/worker/{worker_name}/queues/{queue_name}",
dependencies=[
Depends(requires_access_view(access_view=AccessView.JOBS)),
],
)
def add_worker_queue(
worker_name: str,
queue_name: str,
session: SessionDep,
) -> None:
"""Add a queue to a worker."""
# Check if worker exists first
worker_query = select(EdgeWorkerModel).where(EdgeWorkerModel.worker_name == worker_name)
worker = session.scalar(worker_query)
if not worker:
raise HTTPException(status.HTTP_404_NOT_FOUND, detail=f"Worker {worker_name} not found")
try:
add_worker_queues(worker_name, [queue_name], session=session)
except Exception as e:
raise HTTPException(status.HTTP_400_BAD_REQUEST, detail=str(e))
@ui_router.delete(
"/worker/{worker_name}/queues/{queue_name}",
dependencies=[
Depends(requires_access_view(access_view=AccessView.JOBS)),
],
)
def remove_worker_queue(
worker_name: str,
queue_name: str,
session: SessionDep,
) -> None:
"""Remove a queue from a worker."""
# Check if worker exists first
worker_query = select(EdgeWorkerModel).where(EdgeWorkerModel.worker_name == worker_name)
worker = session.scalar(worker_query)
if not worker:
raise HTTPException(status.HTTP_404_NOT_FOUND, detail=f"Worker {worker_name} not found")
try:
remove_worker_queues(worker_name, [queue_name], session=session)
except Exception as e:
raise HTTPException(status.HTTP_400_BAD_REQUEST, detail=str(e))
| {
"repo_id": "apache/airflow",
"file_path": "providers/edge3/src/airflow/providers/edge3/worker_api/routes/ui.py",
"license": "Apache License 2.0",
"lines": 292,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/edge3/tests/unit/edge3/worker_api/routes/test_ui.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING
import pytest
from sqlalchemy import delete
from airflow.providers.edge3.models.edge_worker import EdgeWorkerModel, EdgeWorkerState
from tests_common.test_utils.version_compat import AIRFLOW_V_3_1_PLUS
if TYPE_CHECKING:
from sqlalchemy.orm import Session
pytestmark = pytest.mark.db_test
@pytest.mark.skipif(not AIRFLOW_V_3_1_PLUS, reason="Plugin endpoint is not used in Airflow 3.0+")
class TestUiApiRoutes:
@pytest.fixture(autouse=True)
def setup_test_cases(self, session: Session):
session.execute(delete(EdgeWorkerModel))
session.add(EdgeWorkerModel(worker_name="worker1", queues=["default"], state=EdgeWorkerState.RUNNING))
session.commit()
def test_worker(self, session: Session):
from airflow.providers.edge3.worker_api.routes.ui import worker
worker_response = worker(session=session)
assert worker_response is not None
assert worker_response.total_entries == 1
assert len(worker_response.workers) == 1
assert worker_response.workers[0].worker_name == "worker1"
| {
"repo_id": "apache/airflow",
"file_path": "providers/edge3/tests/unit/edge3/worker_api/routes/test_ui.py",
"license": "Apache License 2.0",
"lines": 39,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:scripts/ci/prek/ts_compile_lint_edge.py | #!/usr/bin/env python3
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import sys
from pathlib import Path
sys.path.insert(0, str(Path(__file__).parent.resolve())) # make sure common_prek_utils is imported
from common_prek_utils import (
AIRFLOW_PROVIDERS_ROOT_PATH,
AIRFLOW_ROOT_PATH,
run_command,
temporary_tsc_project,
)
if __name__ not in ("__main__", "__mp_main__"):
raise SystemExit(
"This file is intended to be executed as an executable program. You cannot use it as a module."
f"To run this script, run the ./{__file__} command"
)
if __name__ == "__main__":
original_files = sys.argv[1:]
print("Original files:", original_files)
dir = (
AIRFLOW_PROVIDERS_ROOT_PATH / "edge3" / "src" / "airflow" / "providers" / "edge3" / "plugins" / "www"
)
relative_dir = Path(dir).relative_to(AIRFLOW_ROOT_PATH)
files = [
file[len(relative_dir.as_posix()) + 1 :]
for file in original_files
if Path(file).is_relative_to(relative_dir) and "openapi-gen/" not in file
]
all_non_yaml_files = [file for file in files if not file.endswith(".yaml")]
print("All non-YAML files:", all_non_yaml_files)
all_ts_files = [file for file in files if file.endswith(".ts") or file.endswith(".tsx")]
print("All TypeScript files:", all_ts_files)
run_command(["pnpm", "config", "set", "store-dir", ".pnpm-store"], cwd=dir)
run_command(["pnpm", "install", "--frozen-lockfile", "--config.confirmModulesPurge=false"], cwd=dir)
if any("/openapi/" in file for file in original_files):
run_command(["pnpm", "codegen"], cwd=dir)
if all_non_yaml_files:
run_command(["pnpm", "eslint", "--fix", *all_non_yaml_files], cwd=dir)
run_command(["pnpm", "prettier", "--write", *all_non_yaml_files], cwd=dir)
if all_ts_files:
with temporary_tsc_project(dir / "tsconfig.app.json", all_ts_files) as tsc_project:
run_command(["pnpm", "tsc", "--p", tsc_project.name], cwd=dir)
| {
"repo_id": "apache/airflow",
"file_path": "scripts/ci/prek/ts_compile_lint_edge.py",
"license": "Apache License 2.0",
"lines": 58,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:devel-common/src/tests_common/test_utils/dag.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from collections.abc import Collection, Sequence
from typing import TYPE_CHECKING
from airflow.utils.session import NEW_SESSION, provide_session
from tests_common.test_utils.compat import DagSerialization, SerializedDAG
if TYPE_CHECKING:
from sqlalchemy.orm import Session
from airflow.sdk import DAG
def create_scheduler_dag(dag: DAG | SerializedDAG) -> SerializedDAG:
if isinstance(dag, SerializedDAG):
return dag
return DagSerialization.deserialize_dag(DagSerialization.serialize_dag(dag))
@provide_session
def sync_dag_to_db(
dag: DAG,
bundle_name: str = "testing",
session: Session = NEW_SESSION,
) -> SerializedDAG:
return sync_dags_to_db([dag], bundle_name=bundle_name, session=session)[0]
@provide_session
def sync_dags_to_db(
dags: Collection[DAG],
bundle_name: str = "testing",
session: Session = NEW_SESSION,
) -> Sequence[SerializedDAG]:
"""
Sync dags into the database.
This serializes dags and saves the results to the database. The serialized
(scheduler-oeirnted) dags are returned. If the input is ordered (e.g. a list),
the returned sequence is guaranteed to be in the same order.
"""
from airflow.models.dagbundle import DagBundleModel
from airflow.models.serialized_dag import SerializedDagModel
from airflow.serialization.serialized_objects import LazyDeserializedDAG
session.merge(DagBundleModel(name=bundle_name))
session.flush()
def _write_dag(dag: DAG) -> SerializedDAG:
data = DagSerialization.to_dict(dag)
SerializedDagModel.write_dag(LazyDeserializedDAG(data=data), bundle_name, session=session)
return DagSerialization.from_dict(data)
SerializedDAG.bulk_write_to_db(bundle_name, None, dags, session=session)
scheduler_dags = [_write_dag(dag) for dag in dags]
session.flush()
return scheduler_dags
| {
"repo_id": "apache/airflow",
"file_path": "devel-common/src/tests_common/test_utils/dag.py",
"license": "Apache License 2.0",
"lines": 61,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/openlineage/tests/system/openlineage/example_openlineage_setup_teardown_dag.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Simple DAG with setup and teardown operators.
It checks:
- if setup and teardown information is included in DAG event for all tasks
- if setup and teardown information is included in task events for AF2
"""
from __future__ import annotations
from datetime import datetime
from airflow import DAG
from airflow.providers.standard.operators.python import PythonOperator
from system.openlineage.expected_events import get_expected_event_file_path
from system.openlineage.operator import OpenLineageTestOperator
def do_nothing():
pass
DAG_ID = "openlineage_setup_teardown_dag"
with DAG(
dag_id=DAG_ID,
start_date=datetime(2021, 1, 1),
schedule=None,
catchup=False,
default_args={"retries": 0},
) as dag:
create_cluster = PythonOperator(task_id="create_cluster", python_callable=do_nothing)
run_query = PythonOperator(task_id="run_query", python_callable=do_nothing)
run_query2 = PythonOperator(task_id="run_query2", python_callable=do_nothing)
delete_cluster = PythonOperator(task_id="delete_cluster", python_callable=do_nothing)
check_events = OpenLineageTestOperator(
task_id="check_events", file_path=get_expected_event_file_path(DAG_ID)
)
(
create_cluster
>> run_query
>> run_query2
>> delete_cluster.as_teardown(setups=create_cluster)
>> check_events
)
from tests_common.test_utils.system_tests import get_test_run # noqa: E402
# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest)
test_run = get_test_run(dag)
| {
"repo_id": "apache/airflow",
"file_path": "providers/openlineage/tests/system/openlineage/example_openlineage_setup_teardown_dag.py",
"license": "Apache License 2.0",
"lines": 55,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/openlineage/tests/system/openlineage/example_openlineage_short_circuit_dag.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Simple DAG with Short Circuit Operator.
It checks:
- if events that should be emitted are there
- if events for skipped tasks are not emitted
"""
from __future__ import annotations
from datetime import datetime
from airflow import DAG
from airflow.models import Variable
from airflow.providers.standard.operators.python import PythonOperator, ShortCircuitOperator
from system.openlineage.expected_events import get_expected_event_file_path
from system.openlineage.operator import OpenLineageTestOperator
def do_nothing():
pass
def check_events_number_func():
try:
from airflow.sdk.exceptions import AirflowRuntimeError as ExpectedError # AF3
except ImportError:
ExpectedError = KeyError # AF2
for event_type in ("start", "complete"):
try:
events = Variable.get(
key=f"openlineage_short_circuit_dag.should_be_skipped.event.{event_type}",
deserialize_json=True,
)
except ExpectedError:
pass
else:
raise ValueError(
f"Expected no {event_type.upper()} events for task `should_be_skipped`, got {events}"
)
DAG_ID = "openlineage_short_circuit_dag"
with DAG(
dag_id=DAG_ID,
start_date=datetime(2021, 1, 1),
schedule=None,
catchup=False,
default_args={"retries": 0},
) as dag:
do_nothing_task = PythonOperator(task_id="do_nothing_task", python_callable=do_nothing)
skip_tasks = ShortCircuitOperator(
task_id="skip_tasks", python_callable=lambda: False, ignore_downstream_trigger_rules=False
)
should_be_skipped = PythonOperator(task_id="should_be_skipped", python_callable=do_nothing)
check_events_number = PythonOperator(
task_id="check_events_number", python_callable=check_events_number_func, trigger_rule="none_failed"
)
check_events = OpenLineageTestOperator(
task_id="check_events", file_path=get_expected_event_file_path(DAG_ID)
)
do_nothing_task >> skip_tasks >> should_be_skipped >> check_events_number >> check_events
from tests_common.test_utils.system_tests import get_test_run # noqa: E402
# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest)
test_run = get_test_run(dag)
| {
"repo_id": "apache/airflow",
"file_path": "providers/openlineage/tests/system/openlineage/example_openlineage_short_circuit_dag.py",
"license": "Apache License 2.0",
"lines": 71,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/openlineage/tests/system/openlineage/example_openlineage_taskflow_simple_dag.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Simple DAG to verify that DAG defined with taskflow API emits OL events.
It checks:
- required keys
- field formats and types
- number of task events (one start, one complete)
"""
from __future__ import annotations
from datetime import datetime
from system.openlineage.expected_events import get_expected_event_file_path
from system.openlineage.operator import OpenLineageTestOperator
try:
from airflow.sdk import dag, task
except ImportError: # Airflow 2
from airflow.decorators import dag, task # type: ignore[no-redef, attr-defined]
DAG_ID = "openlineage_taskflow_simple_dag"
@dag(schedule=None, start_date=datetime(2021, 1, 1), catchup=False, default_args={"retries": 0})
def openlineage_taskflow_simple_dag():
@task
def do_nothing_task(**context):
return None
do_nothing = do_nothing_task()
check_events = OpenLineageTestOperator(
task_id="check_events", file_path=get_expected_event_file_path(DAG_ID)
)
do_nothing >> check_events
openlineage_taskflow_simple_dag()
from tests_common.test_utils.system_tests import get_test_run # noqa: E402
# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest)
test_run = get_test_run(dag)
| {
"repo_id": "apache/airflow",
"file_path": "providers/openlineage/tests/system/openlineage/example_openlineage_taskflow_simple_dag.py",
"license": "Apache License 2.0",
"lines": 46,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/common/sql/tests/system/common/sql/example_sql_insert_rows.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from airflow import DAG
from airflow.providers.common.sql.operators.sql import SQLInsertRowsOperator
from airflow.utils.timezone import datetime
AIRFLOW_DB_METADATA_TABLE = "ab_user"
connection_args = {
"conn_id": "airflow_db",
"conn_type": "Postgres",
"host": "postgres",
"schema": "postgres",
"login": "postgres",
"password": "postgres",
"port": 5432,
}
with DAG(
"example_sql_insert_rows",
description="Example DAG for SQLInsertRowsOperator.",
default_args=connection_args,
start_date=datetime(2021, 1, 1),
schedule=None,
catchup=False,
) as dag:
"""
### Example SQL insert rows DAG
Runs the SQLInsertRowsOperator against the Airflow metadata DB.
"""
# [START howto_operator_sql_insert_rows]
insert_rows = SQLInsertRowsOperator(
task_id="insert_rows",
table_name="actors",
columns=[
"name",
"firstname",
"age",
],
rows=[
("Stallone", "Sylvester", 78),
("Statham", "Jason", 57),
("Li", "Jet", 61),
("Lundgren", "Dolph", 66),
("Norris", "Chuck", 84),
],
preoperator=[
"""
CREATE TABLE IF NOT EXISTS actors (
index BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
name TEXT NOT NULL,
firstname TEXT NOT NULL,
age BIGINT NOT NULL
);
""",
"TRUNCATE TABLE actors;",
],
postoperator="DROP TABLE IF EXISTS actors;",
insert_args={
"commit_every": 1000,
"autocommit": False,
"executemany": True,
"fast_executemany": True,
},
)
# [END howto_operator_sql_insert_rows]
from tests_common.test_utils.system_tests import get_test_run # noqa: E402
# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest)
test_run = get_test_run(dag)
| {
"repo_id": "apache/airflow",
"file_path": "providers/common/sql/tests/system/common/sql/example_sql_insert_rows.py",
"license": "Apache License 2.0",
"lines": 82,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:devel-common/src/tests_common/test_utils/common_msg_queue.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import pytest
from airflow.exceptions import AirflowProviderDeprecationWarning
from tests_common.test_utils.version_compat import get_base_airflow_version_tuple
@pytest.fixture
def collect_queue_param_deprecation_warning():
"""Collect deprecation warnings for queue parameter."""
with pytest.warns(
AirflowProviderDeprecationWarning,
match="The `queue` parameter is deprecated and will be removed in future versions. Use the `scheme` parameter instead and pass configuration as keyword arguments to `MessageQueueTrigger`.",
):
yield
mark_common_msg_queue_test = pytest.mark.skipif(
get_base_airflow_version_tuple() < (3, 0, 1), reason="CommonMessageQueueTrigger Requires Airflow 3.0.1+"
)
| {
"repo_id": "apache/airflow",
"file_path": "devel-common/src/tests_common/test_utils/common_msg_queue.py",
"license": "Apache License 2.0",
"lines": 31,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:airflow-core/tests/unit/utils/test_strings.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import pytest
from airflow.utils.strings import to_boolean
@pytest.mark.parametrize(
("input_string", "expected_result"),
[
(" yes ", True),
(" 1\n", True),
("\tON", True),
(" no ", False),
(" 0\n", False),
("\tOFF", False),
],
)
def test_to_boolean_strips_whitespace(input_string: str, expected_result: bool) -> None:
assert to_boolean(input_string) is expected_result
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/tests/unit/utils/test_strings.py",
"license": "Apache License 2.0",
"lines": 32,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:scripts/ci/prek/check_schema_defaults.py | #!/usr/bin/env python
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# /// script
# requires-python = ">=3.10,<3.11"
# dependencies = [
# "packaging>=25",
# ]
# ///
from __future__ import annotations
import sys
from pathlib import Path
sys.path.insert(0, str(Path(__file__).parent.resolve()))
from common_prek_utils import (
initialize_breeze_prek,
run_command_via_breeze_shell,
validate_cmd_result,
)
initialize_breeze_prek(__name__, __file__)
cmd_result = run_command_via_breeze_shell(
["python3", "/opt/airflow/scripts/in_container/run_schema_defaults_check.py"],
backend="sqlite",
warn_image_upgrade_needed=True,
)
validate_cmd_result(cmd_result, include_ci_env_check=True)
| {
"repo_id": "apache/airflow",
"file_path": "scripts/ci/prek/check_schema_defaults.py",
"license": "Apache License 2.0",
"lines": 39,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:scripts/in_container/run_schema_defaults_check.py | #!/usr/bin/env python
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Check that defaults in Schema JSON match the server-side SerializedBaseOperator defaults.
This ensures that the schema accurately reflects the actual default values
used by the server-side serialization layer.
"""
from __future__ import annotations
import json
import sys
import traceback
from datetime import timedelta
from pathlib import Path
from typing import Any
def load_schema_defaults(object_type: str = "operator") -> dict[str, Any]:
"""Load default values from the JSON schema."""
schema_path = Path("airflow-core/src/airflow/serialization/schema.json")
if not schema_path.exists():
print(f"Error: Schema file not found at {schema_path}")
sys.exit(1)
with open(schema_path) as f:
schema = json.load(f)
# Extract defaults from the specified object type definition
object_def = schema.get("definitions", {}).get(object_type, {})
properties = object_def.get("properties", {})
defaults = {}
for field_name, field_def in properties.items():
if "default" in field_def:
defaults[field_name] = field_def["default"]
return defaults
def get_server_side_operator_defaults() -> dict[str, Any]:
"""Get default values from server-side SerializedBaseOperator class."""
try:
from airflow.serialization.definitions.baseoperator import SerializedBaseOperator
# Get all serializable fields
serialized_fields = SerializedBaseOperator.get_serialized_fields()
# Field name mappings from external API names to internal class attribute names
field_mappings = {
"weight_rule": "_weight_rule",
}
server_defaults = {}
for field_name in serialized_fields:
# Use the mapped internal name if it exists, otherwise use the field name
attr_name = field_mappings.get(field_name, field_name)
if hasattr(SerializedBaseOperator, attr_name):
default_value = getattr(SerializedBaseOperator, attr_name)
# Only include actual default values, not methods/properties/descriptors
if not callable(default_value) and not isinstance(default_value, (property, type)):
if isinstance(default_value, (set, tuple)):
# Convert to list since schema.json is pure JSON
default_value = list(default_value)
elif isinstance(default_value, timedelta):
default_value = default_value.total_seconds()
server_defaults[field_name] = default_value
return server_defaults
except ImportError as e:
print(f"Error importing SerializedBaseOperator: {e}")
traceback.print_exc()
sys.exit(1)
except Exception as e:
print(f"Error getting server-side defaults: {e}")
traceback.print_exc()
sys.exit(1)
def get_server_side_dag_defaults() -> dict[str, Any]:
"""Get default values from server-side SerializedDAG class."""
try:
from airflow.serialization.definitions.dag import SerializedDAG
# DAG defaults are set in __init__, so we create a temporary instance
temp_dag = SerializedDAG(dag_id="temp")
# Get all serializable DAG fields from the server-side class
serialized_fields = SerializedDAG.get_serialized_fields()
server_defaults = {}
for field_name in serialized_fields:
if hasattr(temp_dag, field_name):
default_value = getattr(temp_dag, field_name)
# Only include actual default values that are not None, callables, or descriptors
if not callable(default_value) and not isinstance(default_value, (property, type)):
if isinstance(default_value, (set, tuple)):
# Convert to list since schema.json is pure JSON
default_value = list(default_value)
server_defaults[field_name] = default_value
return server_defaults
except ImportError as e:
print(f"Error importing SerializedDAG: {e}")
traceback.print_exc()
sys.exit(1)
except Exception as e:
print(f"Error getting server-side DAG defaults: {e}")
traceback.print_exc()
sys.exit(1)
def compare_operator_defaults() -> list[str]:
"""Compare operator schema defaults with server-side defaults and return discrepancies."""
schema_defaults = load_schema_defaults("operator")
server_defaults = get_server_side_operator_defaults()
errors = []
print(f"Found {len(schema_defaults)} operator schema defaults")
print(f"Found {len(server_defaults)} operator server-side defaults")
# Check each server default against schema
for field_name, server_value in server_defaults.items():
schema_value = schema_defaults.get(field_name)
# Check if field exists in schema
if field_name not in schema_defaults:
# Some server fields might not need defaults in schema (like None values)
if server_value is not None and server_value not in [[], {}, (), set()]:
errors.append(
f"Server field '{field_name}' has default {server_value!r} but no schema default"
)
continue
# Direct comparison - no complex normalization needed
if schema_value != server_value:
errors.append(
f"Field '{field_name}': schema default is {schema_value!r}, "
f"server default is {server_value!r}"
)
# Check for schema defaults that don't have corresponding server defaults
for field_name, schema_value in schema_defaults.items():
if field_name not in server_defaults:
# Some schema fields are structural and don't need server defaults
schema_only_fields = {
"task_type",
"_task_module",
"task_id",
"_task_display_name",
"_is_mapped",
"_is_sensor",
}
if field_name not in schema_only_fields:
errors.append(
f"Schema has default for '{field_name}' = {schema_value!r} but no corresponding server default"
)
return errors
def compare_dag_defaults() -> list[str]:
"""Compare DAG schema defaults with server-side defaults and return discrepancies."""
schema_defaults = load_schema_defaults("dag")
server_defaults = get_server_side_dag_defaults()
errors = []
print(f"Found {len(schema_defaults)} DAG schema defaults")
print(f"Found {len(server_defaults)} DAG server-side defaults")
# Check each server default against schema
for field_name, server_value in server_defaults.items():
schema_value = schema_defaults.get(field_name)
# Check if field exists in schema
if field_name not in schema_defaults:
# Some server fields don't need defaults in schema (like None values, empty collections, or computed fields)
if (
server_value is not None
and server_value not in [[], {}, (), set()]
and field_name not in ["dag_id", "dag_display_name"]
):
errors.append(
f"DAG server field '{field_name}' has default {server_value!r} but no schema default"
)
continue
# Direct comparison
if schema_value != server_value:
errors.append(
f"DAG field '{field_name}': schema default is {schema_value!r}, "
f"server default is {server_value!r}"
)
# Check for schema defaults that don't have corresponding server defaults
for field_name, schema_value in schema_defaults.items():
if field_name not in server_defaults:
# Some schema fields are computed properties (like has_on_*_callback)
computed_properties = {
"has_on_success_callback",
"has_on_failure_callback",
}
if field_name not in computed_properties:
errors.append(
f"DAG schema has default for '{field_name}' = {schema_value!r} but no corresponding server default"
)
return errors
def main():
"""Main function to run the schema defaults check."""
print("Checking schema defaults against server-side serialization classes...")
# Check Operator defaults
print("\n1. Checking Operator defaults...")
operator_errors = compare_operator_defaults()
# Check Dag defaults
print("\n2. Checking Dag defaults...")
dag_errors = compare_dag_defaults()
all_errors = operator_errors + dag_errors
if all_errors:
print("\n❌ Found discrepancies between schema and server defaults:")
for error in all_errors:
print(f" • {error}")
print()
print("To fix these issues:")
print("1. Update airflow-core/src/airflow/serialization/schema.json to match server defaults, OR")
print(
"2. Update airflow-core/src/airflow/serialization/serialized_objects.py class/init defaults to match schema"
)
sys.exit(1)
else:
print("\n✅ All schema defaults match server-side defaults!")
if __name__ == "__main__":
main()
| {
"repo_id": "apache/airflow",
"file_path": "scripts/in_container/run_schema_defaults_check.py",
"license": "Apache License 2.0",
"lines": 212,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:airflow-core/tests/unit/api_fastapi/core_api/services/public/test_task_instances.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import pytest
from airflow.api_fastapi.core_api.datamodels.common import BulkActionResponse, BulkBody
from airflow.api_fastapi.core_api.datamodels.task_instances import BulkTaskInstanceBody
from airflow.api_fastapi.core_api.services.public.task_instances import BulkTaskInstanceService
from airflow.providers.standard.operators.bash import BashOperator
from tests_common.test_utils.db import (
clear_db_runs,
)
pytestmark = pytest.mark.db_test
DAG_ID_1 = "TEST_DAG_1"
DAG_ID_2 = "TEST_DAG_2"
DAG_RUN_ID_1 = "TEST_DAG_RUN_1"
DAG_RUN_ID_2 = "TEST_DAG_RUN_2"
TASK_ID_1 = "TEST_TASK_1"
TASK_ID_2 = "TEST_TASK_2"
class TestTaskInstanceEndpoint:
@staticmethod
def clear_db():
clear_db_runs()
class TestCategorizeTaskInstances(TestTaskInstanceEndpoint):
"""Tests for the categorize_task_instances method in BulkTaskInstanceService."""
def setup_method(self):
self.clear_db()
def teardown_method(self):
self.clear_db()
class MockUser:
def get_id(self) -> str:
return "test_user"
def get_name(self) -> str:
return "test_user"
@pytest.mark.parametrize(
(
"dags_to_create",
"dag_id",
"dag_run_id",
"task_keys",
"expected_matched_keys",
"expected_not_found_keys",
),
[
pytest.param(
[(DAG_ID_1, DAG_RUN_ID_1, [TASK_ID_1, TASK_ID_2])],
DAG_ID_1,
DAG_RUN_ID_1,
{(DAG_ID_1, DAG_RUN_ID_1, TASK_ID_1, -1), (DAG_ID_1, DAG_RUN_ID_1, TASK_ID_2, -1)},
{(DAG_ID_1, DAG_RUN_ID_1, TASK_ID_1, -1), (DAG_ID_1, DAG_RUN_ID_1, TASK_ID_2, -1)},
set(),
id="single_dag_run_all_found",
),
pytest.param(
[(DAG_ID_1, DAG_RUN_ID_1, [TASK_ID_1, TASK_ID_2])],
DAG_ID_1,
DAG_RUN_ID_1,
{(DAG_ID_1, DAG_RUN_ID_1, "nonexistent_task", -1)},
set(),
{(DAG_ID_1, DAG_RUN_ID_1, "nonexistent_task", -1)},
id="single_dag_run_not_found",
),
pytest.param(
[(DAG_ID_1, DAG_RUN_ID_1, [TASK_ID_1, TASK_ID_2])],
DAG_ID_1,
DAG_RUN_ID_1,
{(DAG_ID_1, DAG_RUN_ID_1, TASK_ID_1, -1), (DAG_ID_1, DAG_RUN_ID_1, TASK_ID_1, 0)},
{(DAG_ID_1, DAG_RUN_ID_1, TASK_ID_1, -1)},
{(DAG_ID_1, DAG_RUN_ID_1, TASK_ID_1, 0)},
id="single_dag_run_mixed_map_index",
),
pytest.param(
[(DAG_ID_1, DAG_RUN_ID_1, [TASK_ID_1]), (DAG_ID_2, DAG_RUN_ID_2, [TASK_ID_1])],
"~",
"~",
{(DAG_ID_1, DAG_RUN_ID_1, TASK_ID_1, -1), (DAG_ID_2, DAG_RUN_ID_2, TASK_ID_1, -1)},
{(DAG_ID_1, DAG_RUN_ID_1, TASK_ID_1, -1), (DAG_ID_2, DAG_RUN_ID_2, TASK_ID_1, -1)},
set(),
id="wildcard_multiple_dags_all_found",
),
pytest.param(
[(DAG_ID_1, DAG_RUN_ID_1, [TASK_ID_1]), (DAG_ID_2, DAG_RUN_ID_2, [TASK_ID_1])],
"~",
"~",
{
(DAG_ID_1, DAG_RUN_ID_1, TASK_ID_1, -1),
(DAG_ID_2, DAG_RUN_ID_2, "nonexistent_task", -1),
},
{(DAG_ID_1, DAG_RUN_ID_1, TASK_ID_1, -1)},
{(DAG_ID_2, DAG_RUN_ID_2, "nonexistent_task", -1)},
id="wildcard_multiple_dags_mixed",
),
pytest.param(
[(DAG_ID_1, DAG_RUN_ID_1, [TASK_ID_1, TASK_ID_2]), (DAG_ID_2, DAG_RUN_ID_2, [TASK_ID_1])],
"~",
"~",
{
(DAG_ID_1, DAG_RUN_ID_1, TASK_ID_1, -1),
(DAG_ID_1, DAG_RUN_ID_1, TASK_ID_2, -1),
(DAG_ID_2, DAG_RUN_ID_2, TASK_ID_1, -1),
(DAG_ID_2, DAG_RUN_ID_2, TASK_ID_2, -1),
},
{
(DAG_ID_1, DAG_RUN_ID_1, TASK_ID_1, -1),
(DAG_ID_1, DAG_RUN_ID_1, TASK_ID_2, -1),
(DAG_ID_2, DAG_RUN_ID_2, TASK_ID_1, -1),
},
{(DAG_ID_2, DAG_RUN_ID_2, TASK_ID_2, -1)},
id="wildcard_partial_match_across_dags",
),
],
)
def test_categorize_task_instances(
self,
session,
dag_maker,
dags_to_create,
dag_id,
dag_run_id,
task_keys,
expected_matched_keys,
expected_not_found_keys,
):
"""Test categorize_task_instances with various scenarios."""
for dag_id_to_create, run_id_to_create, task_ids in dags_to_create:
with dag_maker(dag_id=dag_id_to_create, session=session):
for task_id in task_ids:
BashOperator(task_id=task_id, bash_command=f"echo {task_id}")
dag_maker.create_dagrun(run_id=run_id_to_create)
session.commit()
user = self.MockUser()
bulk_request = BulkBody(actions=[])
service = BulkTaskInstanceService(
session=session,
request=bulk_request,
dag_id=dag_id,
dag_run_id=dag_run_id,
dag_bag=dag_maker.dagbag,
user=user,
)
_, matched_task_keys, not_found_task_keys = service._categorize_task_instances(task_keys)
assert matched_task_keys == expected_matched_keys
assert not_found_task_keys == expected_not_found_keys
class TestExtractTaskIdentifiers(TestTaskInstanceEndpoint):
"""Tests for the _extract_task_identifiers method in BulkTaskInstanceService."""
def setup_method(self):
self.clear_db()
def teardown_method(self):
self.clear_db()
class MockUser:
def get_id(self) -> str:
return "test_user"
def get_name(self) -> str:
return "test_user"
@pytest.mark.parametrize(
("entity", "expected_dag_id", "expected_dag_run_id", "expected_task_id", "expected_map_index"),
[
pytest.param(
BulkTaskInstanceBody(task_id="task_1", dag_id=None, dag_run_id=None, map_index=None),
DAG_ID_1,
DAG_RUN_ID_1,
"task_1",
None,
id="object_entity_with_none_fields",
),
pytest.param(
BulkTaskInstanceBody(task_id="task_2", dag_id=DAG_ID_2, dag_run_id=DAG_RUN_ID_2, map_index=5),
DAG_ID_2,
DAG_RUN_ID_2,
"task_2",
5,
id="object_entity_with_all_fields",
),
pytest.param(
BulkTaskInstanceBody(task_id="task_3", dag_id=None, dag_run_id=None, map_index=None),
DAG_ID_1,
DAG_RUN_ID_1,
"task_3",
None,
id="object_entity_fallback_to_path_params",
),
],
)
def test_extract_task_identifiers(
self,
session,
dag_maker,
entity,
expected_dag_id,
expected_dag_run_id,
expected_task_id,
expected_map_index,
):
"""Test _extract_task_identifiers with different entity configurations."""
user = self.MockUser()
bulk_request = BulkBody(actions=[])
service = BulkTaskInstanceService(
session=session,
request=bulk_request,
dag_id=DAG_ID_1,
dag_run_id=DAG_RUN_ID_1,
dag_bag=dag_maker.dagbag,
user=user,
)
dag_id, dag_run_id, task_id, map_index = service._extract_task_identifiers(entity)
assert dag_id == expected_dag_id
assert dag_run_id == expected_dag_run_id
assert task_id == expected_task_id
assert map_index == expected_map_index
class TestCategorizeEntities(TestTaskInstanceEndpoint):
"""Tests for the _categorize_entities method in BulkTaskInstanceService."""
def setup_method(self):
self.clear_db()
def teardown_method(self):
self.clear_db()
class MockUser:
def get_id(self) -> str:
return "test_user"
def get_name(self) -> str:
return "test_user"
@pytest.mark.parametrize(
(
"entities",
"service_dag_id",
"service_dag_run_id",
"expected_specific_keys",
"expected_all_keys",
"expected_error_count",
),
[
pytest.param(
[
BulkTaskInstanceBody(
task_id="task_1", dag_id=DAG_ID_1, dag_run_id=DAG_RUN_ID_1, map_index=5
)
],
DAG_ID_1,
DAG_RUN_ID_1,
{(DAG_ID_1, DAG_RUN_ID_1, "task_1", 5)},
set(),
0,
id="single_entity_with_map_index",
),
pytest.param(
[
BulkTaskInstanceBody(
task_id="task_1", dag_id=DAG_ID_1, dag_run_id=DAG_RUN_ID_1, map_index=None
)
],
DAG_ID_1,
DAG_RUN_ID_1,
set(),
{(DAG_ID_1, DAG_RUN_ID_1, "task_1")},
0,
id="single_entity_without_map_index",
),
pytest.param(
[
BulkTaskInstanceBody(
task_id="task_1", dag_id=DAG_ID_1, dag_run_id=DAG_RUN_ID_1, map_index=5
),
BulkTaskInstanceBody(
task_id="task_2", dag_id=DAG_ID_1, dag_run_id=DAG_RUN_ID_1, map_index=None
),
],
DAG_ID_1,
DAG_RUN_ID_1,
{(DAG_ID_1, DAG_RUN_ID_1, "task_1", 5)},
{(DAG_ID_1, DAG_RUN_ID_1, "task_2")},
0,
id="mixed_entities_with_and_without_map_index",
),
pytest.param(
[
BulkTaskInstanceBody(
task_id="task_1", dag_id=DAG_ID_1, dag_run_id=DAG_RUN_ID_1, map_index=5
),
BulkTaskInstanceBody(
task_id="task_1", dag_id=DAG_ID_2, dag_run_id=DAG_RUN_ID_2, map_index=10
),
],
DAG_ID_1,
DAG_RUN_ID_1,
{(DAG_ID_1, DAG_RUN_ID_1, "task_1", 5), (DAG_ID_2, DAG_RUN_ID_2, "task_1", 10)},
set(),
0,
id="multiple_entities_different_dags",
),
pytest.param(
[BulkTaskInstanceBody(task_id="task_1", dag_id="~", dag_run_id=DAG_RUN_ID_1, map_index=None)],
"~",
"~",
set(),
set(),
1,
id="wildcard_in_dag_id_with_none_fields",
),
pytest.param(
[BulkTaskInstanceBody(task_id="task_1", dag_id=DAG_ID_1, dag_run_id="~", map_index=None)],
"~",
"~",
set(),
set(),
1,
id="wildcard_in_dag_run_id_with_none_fields",
),
pytest.param(
[
BulkTaskInstanceBody(task_id="task_1", dag_id="~", dag_run_id="~", map_index=None),
BulkTaskInstanceBody(
task_id="task_2", dag_id=DAG_ID_1, dag_run_id=DAG_RUN_ID_1, map_index=5
),
],
"~",
"~",
{(DAG_ID_1, DAG_RUN_ID_1, "task_2", 5)},
set(),
1,
id="wildcard_error_and_valid_entity",
),
],
)
def test_categorize_entities(
self,
session,
dag_maker,
entities,
service_dag_id,
service_dag_run_id,
expected_specific_keys,
expected_all_keys,
expected_error_count,
):
"""Test _categorize_entities with different entity configurations and wildcard validation."""
user = self.MockUser()
bulk_request = BulkBody(actions=[])
service = BulkTaskInstanceService(
session=session,
request=bulk_request,
dag_id=service_dag_id,
dag_run_id=service_dag_run_id,
dag_bag=dag_maker.dagbag,
user=user,
)
results = BulkActionResponse()
specific_map_index_task_keys, all_map_index_task_keys = service._categorize_entities(
entities, results
)
assert specific_map_index_task_keys == expected_specific_keys
assert all_map_index_task_keys == expected_all_keys
assert len(results.errors) == expected_error_count
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/tests/unit/api_fastapi/core_api/services/public/test_task_instances.py",
"license": "Apache License 2.0",
"lines": 364,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:task-sdk/tests/task_sdk/bases/test_xcom.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from unittest import mock
import pytest
from airflow.sdk.bases.xcom import BaseXCom
from airflow.sdk.execution_time.comms import DeleteXCom, XComResult
class TestBaseXCom:
@pytest.mark.parametrize(
"map_index",
[
pytest.param(None, id="map_index_none"),
pytest.param(-1, id="map_index_negative_one"),
pytest.param(0, id="map_index_zero"),
pytest.param(5, id="map_index_positive"),
],
)
def test_delete_includes_map_index_in_delete_xcom_message(self, map_index, mock_supervisor_comms):
"""Test that BaseXCom.delete properly passes map_index to the DeleteXCom message."""
with mock.patch.object(
BaseXCom, "_get_xcom_db_ref", return_value=XComResult(key="test_key", value="test_value")
) as mock_get_ref:
with mock.patch.object(BaseXCom, "purge") as mock_purge:
BaseXCom.delete(
key="test_key",
task_id="test_task",
dag_id="test_dag",
run_id="test_run",
map_index=map_index,
)
mock_get_ref.assert_called_once_with(
key="test_key",
dag_id="test_dag",
task_id="test_task",
run_id="test_run",
map_index=map_index,
)
# Verify purge was called
mock_purge.assert_called_once()
# Verify DeleteXCom message was sent with map_index
mock_supervisor_comms.send.assert_called_once()
sent_message = mock_supervisor_comms.send.call_args[0][0]
assert isinstance(sent_message, DeleteXCom)
assert sent_message.key == "test_key"
assert sent_message.dag_id == "test_dag"
assert sent_message.task_id == "test_task"
assert sent_message.run_id == "test_run"
assert sent_message.map_index == map_index
| {
"repo_id": "apache/airflow",
"file_path": "task-sdk/tests/task_sdk/bases/test_xcom.py",
"license": "Apache License 2.0",
"lines": 62,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-core/src/airflow/api_fastapi/core_api/services/ui/task_group.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Task group utilities for UI API services."""
from __future__ import annotations
from collections.abc import Callable
from functools import cache
from operator import methodcaller
from airflow.configuration import conf
from airflow.serialization.definitions.baseoperator import SerializedBaseOperator
from airflow.serialization.definitions.mappedoperator import SerializedMappedOperator, is_mapped
@cache
def get_task_group_children_getter() -> Callable:
"""Get the Task Group Children Getter for the DAG."""
sort_order = conf.get("api", "grid_view_sorting_order")
if sort_order == "topological":
return methodcaller("topological_sort")
return methodcaller("hierarchical_alphabetical_sort")
def task_group_to_dict(task_item_or_group, parent_group_is_mapped=False):
"""Create a nested dict representation of this TaskGroup and its children used to construct the Graph."""
if isinstance(task := task_item_or_group, (SerializedBaseOperator, SerializedMappedOperator)):
# we explicitly want the short task ID here, not the full doted notation if in a group
task_display_name = task.task_display_name if task.task_display_name != task.task_id else task.label
node_operator = {
"id": task.task_id,
"label": task_display_name,
"operator": task.operator_name,
"type": "task",
}
if task.is_setup:
node_operator["setup_teardown_type"] = "setup"
elif task.is_teardown:
node_operator["setup_teardown_type"] = "teardown"
if is_mapped(task) or parent_group_is_mapped:
node_operator["is_mapped"] = True
return node_operator
task_group = task_item_or_group
mapped = is_mapped(task_group)
children = [
task_group_to_dict(child, parent_group_is_mapped=parent_group_is_mapped or mapped)
for child in get_task_group_children_getter()(task_group)
]
if task_group.upstream_group_ids or task_group.upstream_task_ids:
# This is the join node used to reduce the number of edges between two TaskGroup.
children.append({"id": task_group.upstream_join_id, "label": "", "type": "join"})
if task_group.downstream_group_ids or task_group.downstream_task_ids:
# This is the join node used to reduce the number of edges between two TaskGroup.
children.append({"id": task_group.downstream_join_id, "label": "", "type": "join"})
return {
"id": task_group.group_id,
"label": task_group.group_display_name or task_group.label,
"tooltip": task_group.tooltip,
"is_mapped": mapped,
"children": children,
"type": "task",
}
def task_group_to_dict_grid(task_item_or_group, parent_group_is_mapped=False):
"""Create a nested dict representation of this TaskGroup and its children used to construct the Grid."""
if isinstance(task := task_item_or_group, (SerializedMappedOperator, SerializedBaseOperator)):
mapped = None
if parent_group_is_mapped or is_mapped(task):
mapped = True
setup_teardown_type = None
if task.is_setup is True:
setup_teardown_type = "setup"
elif task.is_teardown is True:
setup_teardown_type = "teardown"
# we explicitly want the short task ID here, not the full doted notation if in a group
task_display_name = task.task_display_name if task.task_display_name != task.task_id else task.label
return {
"id": task.task_id,
"label": task_display_name,
"is_mapped": mapped,
"children": None,
"setup_teardown_type": setup_teardown_type,
}
task_group = task_item_or_group
task_group_sort = get_task_group_children_getter()
mapped = is_mapped(task_group)
children = [
task_group_to_dict_grid(x, parent_group_is_mapped=parent_group_is_mapped or mapped)
for x in task_group_sort(task_group)
]
return {
"id": task_group.group_id,
"label": task_group.group_display_name or task_group.label,
"is_mapped": mapped or None,
"children": children or None,
}
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/api_fastapi/core_api/services/ui/task_group.py",
"license": "Apache License 2.0",
"lines": 103,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/amazon/tests/unit/amazon/aws/executors/ecs/test_boto_schema.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from datetime import datetime
from airflow.providers.amazon.aws.executors.ecs.boto_schema import (
BotoContainerSchema,
BotoDescribeTasksSchema,
BotoFailureSchema,
BotoRunTaskSchema,
BotoTaskSchema,
)
from airflow.providers.amazon.aws.executors.ecs.utils import EcsExecutorTask
class TestBotoSchema:
def test_boto_container_schema_load(self):
schema = BotoContainerSchema()
data = {
"exitCode": 0,
"lastStatus": "STOPPED",
"name": "test_container",
"reason": "Essential container in task exited",
"containerArn": "arn:aws:ecs:us-east-1:123456789012:container/test-cluster/1234567890abcdef0",
}
result = schema.load(data)
assert result["exit_code"] == 0
assert result["last_status"] == "STOPPED"
assert result["name"] == "test_container"
assert result["reason"] == "Essential container in task exited"
assert (
result["container_arn"]
== "arn:aws:ecs:us-east-1:123456789012:container/test-cluster/1234567890abcdef0"
)
def test_boto_container_schema_load_minimal(self):
schema = BotoContainerSchema()
data = {"name": "minimal_container"}
result = schema.load(data)
assert result["name"] == "minimal_container"
assert result.get("exit_code") is None
assert result.get("last_status") is None
assert result.get("reason") is None
assert result.get("container_arn") is None
def test_boto_container_schema_exclude_unknown(self):
schema = BotoContainerSchema()
data = {"name": "test_container", "unknownField": "should_be_ignored"}
result = schema.load(data)
assert "unknownField" not in result
def test_boto_task_schema_load(self):
schema = BotoTaskSchema()
container_data = {
"exitCode": 0,
"lastStatus": "STOPPED",
"name": "test_container",
"reason": "Essential container in task exited",
"containerArn": "arn:aws:ecs:us-east-1:123456789012:container/test-cluster/1234567890abcdef0",
}
data = {
"taskArn": "arn:aws:ecs:us-east-1:123456789012:task/test-cluster/1234567890abcdef0",
"lastStatus": "STOPPED",
"desiredStatus": "STOPPED",
"containers": [container_data],
"startedAt": datetime(2023, 1, 1),
"stoppedReason": "Task failed to start",
}
result = schema.load(data)
assert isinstance(result, EcsExecutorTask)
assert result.task_arn == "arn:aws:ecs:us-east-1:123456789012:task/test-cluster/1234567890abcdef0"
assert result.last_status == "STOPPED"
assert result.desired_status == "STOPPED"
assert len(result.containers) == 1
assert result.containers[0]["name"] == "test_container"
assert result.started_at == datetime(2023, 1, 1)
assert result.stopped_reason == "Task failed to start"
def test_boto_task_schema_load_minimal(self):
schema = BotoTaskSchema()
container_data = {"name": "minimal_container_in_task"}
data = {
"taskArn": "arn:aws:ecs:us-east-1:123456789012:task/test-cluster/1234567890abcdef0",
"lastStatus": "RUNNING",
"desiredStatus": "RUNNING",
"containers": [container_data],
}
result = schema.load(data)
assert isinstance(result, EcsExecutorTask)
assert result.task_arn == "arn:aws:ecs:us-east-1:123456789012:task/test-cluster/1234567890abcdef0"
assert result.last_status == "RUNNING"
assert result.desired_status == "RUNNING"
assert len(result.containers) == 1
assert result.containers[0]["name"] == "minimal_container_in_task"
assert result.started_at is None
assert result.stopped_reason is None
def test_boto_task_schema_exclude_unknown(self):
schema = BotoTaskSchema()
container_data = {"name": "test_container"}
data = {
"taskArn": "arn:aws:ecs:us-east-1:123456789012:task/test-cluster/1234567890abcdef0",
"lastStatus": "RUNNING",
"desiredStatus": "RUNNING",
"containers": [container_data],
"unknownTaskField": "should_be_ignored",
}
result = schema.load(data)
# EcsExecutorTask doesn't store unknown fields, so we check the deserialized dict before object creation
# by checking the raw data passed to EcsExecutorTask constructor if possible,
# or simply ensure no error occurs and the object is created.
# A more direct way would be to mock EcsExecutorTask and inspect its kwargs.
# For now, we just ensure it loads without error and produces the correct type.
assert isinstance(result, EcsExecutorTask)
def test_boto_failure_schema_load(self):
schema = BotoFailureSchema()
data = {
"arn": "arn:aws:ecs:us-east-1:123456789012:container/test-cluster/1234567890abcdef0",
"reason": "MISSING",
}
result = schema.load(data)
assert result["arn"] == "arn:aws:ecs:us-east-1:123456789012:container/test-cluster/1234567890abcdef0"
assert result["reason"] == "MISSING"
def test_boto_failure_schema_load_minimal(self):
schema = BotoFailureSchema()
data = {}
result = schema.load(data)
assert result.get("arn") is None
assert result.get("reason") is None
def test_boto_failure_schema_exclude_unknown(self):
schema = BotoFailureSchema()
data = {"arn": "test_arn", "unknownField": "should_be_ignored"}
result = schema.load(data)
assert "unknownField" not in result
def test_boto_run_task_schema_load(self):
schema = BotoRunTaskSchema()
task_data = {
"taskArn": "arn:aws:ecs:us-east-1:123456789012:task/test-cluster/1234567890abcdef0",
"lastStatus": "RUNNING",
"desiredStatus": "RUNNING",
"containers": [{"name": "test_container"}],
}
failure_data = {
"arn": "arn:aws:ecs:us-east-1:123456789012:container/test-cluster/badabcdef0",
"reason": "MISSING",
}
data = {"tasks": [task_data], "failures": [failure_data]}
result = schema.load(data)
assert len(result["tasks"]) == 1
assert isinstance(result["tasks"][0], EcsExecutorTask)
assert (
result["tasks"][0].task_arn
== "arn:aws:ecs:us-east-1:123456789012:task/test-cluster/1234567890abcdef0"
)
assert len(result["failures"]) == 1
assert (
result["failures"][0]["arn"]
== "arn:aws:ecs:us-east-1:123456789012:container/test-cluster/badabcdef0"
)
assert result["failures"][0]["reason"] == "MISSING"
def test_boto_run_task_schema_exclude_unknown(self):
schema = BotoRunTaskSchema()
data = {
"tasks": [],
"failures": [],
"unknownRunTaskField": "should_be_ignored",
}
result = schema.load(data)
assert "unknownRunTaskField" not in result
def test_boto_describe_tasks_schema_load(self):
schema = BotoDescribeTasksSchema()
task_data = {
"taskArn": "arn:aws:ecs:us-east-1:123456789012:task/test-cluster/1234567890abcdef1",
"lastStatus": "STOPPED",
"desiredStatus": "STOPPED",
"containers": [{"name": "another_container", "exitCode": 1}],
}
failure_data = {
"arn": "arn:aws:ecs:us-east-1:123456789012:container/test-cluster/anotherbad",
"reason": "UNABLE",
}
data = {"tasks": [task_data], "failures": [failure_data]}
result = schema.load(data)
assert len(result["tasks"]) == 1
assert isinstance(result["tasks"][0], EcsExecutorTask)
assert (
result["tasks"][0].task_arn
== "arn:aws:ecs:us-east-1:123456789012:task/test-cluster/1234567890abcdef1"
)
assert result["tasks"][0].containers[0]["exit_code"] == 1
assert len(result["failures"]) == 1
assert (
result["failures"][0]["arn"]
== "arn:aws:ecs:us-east-1:123456789012:container/test-cluster/anotherbad"
)
assert result["failures"][0]["reason"] == "UNABLE"
def test_boto_describe_tasks_schema_exclude_unknown(self):
schema = BotoDescribeTasksSchema()
data = {
"tasks": [],
"failures": [],
"unknownDescribeTasksField": "should_be_ignored",
}
result = schema.load(data)
assert "unknownDescribeTasksField" not in result
| {
"repo_id": "apache/airflow",
"file_path": "providers/amazon/tests/unit/amazon/aws/executors/ecs/test_boto_schema.py",
"license": "Apache License 2.0",
"lines": 211,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/fab/tests/unit/fab/auth_manager/models/test_user_model.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import pytest
from airflow.providers.fab.auth_manager.models import User
pytestmark = pytest.mark.db_test
@pytest.mark.parametrize(
("user_id", "expected_id"),
[(999, "999")],
)
def test_get_id_returns_str(user_id: int, expected_id: str) -> None:
"""
Ensure get_id() always returns a string representation of the id.
"""
user = User()
user.id = user_id
result = user.get_id()
assert isinstance(result, str), f"Expected str, got {type(result)}"
assert result == expected_id
| {
"repo_id": "apache/airflow",
"file_path": "providers/fab/tests/unit/fab/auth_manager/models/test_user_model.py",
"license": "Apache License 2.0",
"lines": 33,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-core/src/airflow/models/team.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING
from sqlalchemy import Column, ForeignKey, Index, String, Table, select
from sqlalchemy.orm import Mapped, mapped_column, relationship
from airflow.models.base import Base, StringID
from airflow.utils.session import NEW_SESSION, provide_session
if TYPE_CHECKING:
from sqlalchemy.orm import Session
dag_bundle_team_association_table = Table(
"dag_bundle_team",
Base.metadata,
Column(
"dag_bundle_name",
StringID(length=250),
ForeignKey("dag_bundle.name", ondelete="CASCADE"),
primary_key=True,
),
Column("team_name", String(50), ForeignKey("team.name", ondelete="CASCADE"), primary_key=True),
Index("idx_dag_bundle_team_dag_bundle_name", "dag_bundle_name", unique=True),
Index("idx_dag_bundle_team_team_name", "team_name"),
)
class Team(Base):
"""
Contains the list of teams defined in the environment.
This table is only used when Airflow is run in multi-team mode.
"""
__tablename__ = "team"
name: Mapped[str] = mapped_column(String(50), primary_key=True)
dag_bundles = relationship(
"DagBundleModel", secondary=dag_bundle_team_association_table, back_populates="teams"
)
def __repr__(self):
return f"Team(name={self.name})"
@classmethod
@provide_session
def get_name_if_exists(cls, name: str, *, session: Session = NEW_SESSION) -> str | None:
"""Return name if a Team row with that name exists, otherwise None."""
return session.scalar(select(cls.name).where(cls.name == name))
@classmethod
@provide_session
def get_all_team_names(cls, session: Session = NEW_SESSION) -> set[str]:
"""
Return a set of all team names from the database.
This method provides a convenient way to get just the team names for validation
purposes, such as verifying team names in executor configurations.
:return: Set of all team names
"""
return set(session.scalars(select(Team.name)).all())
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/models/team.py",
"license": "Apache License 2.0",
"lines": 65,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:airflow-core/tests/unit/executors/test_local_executor_check_workers.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from unittest.mock import MagicMock
import pytest
from airflow.executors.local_executor import LocalExecutor
@pytest.fixture(autouse=True)
def setup_executor(monkeypatch):
executor = LocalExecutor(parallelism=2)
executor.workers = {}
executor._unread_messages = MagicMock()
executor.activity_queue = MagicMock()
monkeypatch.setattr(executor, "_spawn_worker", MagicMock())
return executor
def test_no_workers_on_no_work(setup_executor):
executor = setup_executor
executor._unread_messages.value = 0
executor.activity_queue.empty.return_value = True
executor._check_workers()
executor._spawn_worker.assert_not_called()
assert executor.workers == {}
def test_all_workers_alive(setup_executor):
executor = setup_executor
proc1 = MagicMock()
proc1.is_alive.return_value = True
proc2 = MagicMock()
proc2.is_alive.return_value = True
executor.workers = {1: proc1, 2: proc2}
executor._unread_messages.value = 0
executor.activity_queue.empty.return_value = True
executor._check_workers()
proc1.close.assert_not_called()
proc2.close.assert_not_called()
assert len(executor.workers) == 2
def test_some_workers_dead(setup_executor):
executor = setup_executor
proc1 = MagicMock()
proc1.is_alive.return_value = False
proc2 = MagicMock()
proc2.is_alive.return_value = True
executor.workers = {1: proc1, 2: proc2}
executor._unread_messages.value = 0
executor.activity_queue.empty.return_value = True
executor._check_workers()
proc1.close.assert_called_once()
proc2.close.assert_not_called()
assert executor.workers == {2: proc2}
def test_all_workers_dead(setup_executor):
executor = setup_executor
proc1 = MagicMock()
proc1.is_alive.return_value = False
proc2 = MagicMock()
proc2.is_alive.return_value = False
executor.workers = {1: proc1, 2: proc2}
executor._unread_messages.value = 0
executor.activity_queue.empty.return_value = True
executor._check_workers()
proc1.close.assert_called_once()
proc2.close.assert_called_once()
assert executor.workers == {}
def test_outstanding_messages_and_empty_queue(setup_executor):
executor = setup_executor
executor._unread_messages.value = 1
executor.activity_queue.empty.return_value = True
executor._check_workers()
executor._spawn_worker.assert_not_called()
def test_spawn_worker_when_needed(setup_executor):
executor = setup_executor
executor._unread_messages.value = 1
executor.activity_queue.empty.return_value = False
executor.workers = {}
executor._check_workers()
executor._spawn_worker.assert_called()
def test_no_spawn_if_parallelism_reached(setup_executor):
executor = setup_executor
executor._unread_messages.value = 2
executor.activity_queue.empty.return_value = False
proc1 = MagicMock()
proc1.is_alive.return_value = True
proc2 = MagicMock()
proc2.is_alive.return_value = True
executor.workers = {1: proc1, 2: proc2}
executor._check_workers()
executor._spawn_worker.assert_not_called()
def test_spawn_worker_when_we_have_parallelism_left(setup_executor):
executor = setup_executor
# Simulate 4 running workers
running_workers = {}
for i in range(4):
proc = MagicMock()
proc.is_alive.return_value = True
running_workers[i] = proc
executor.workers = running_workers
executor.parallelism = 5 # Allow more workers if needed
# Simulate 4 pending tasks (equal to running workers)
executor._unread_messages.value = 4
executor.activity_queue.empty.return_value = False
executor._spawn_worker.reset_mock()
executor._check_workers()
executor._spawn_worker.assert_called()
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/tests/unit/executors/test_local_executor_check_workers.py",
"license": "Apache License 2.0",
"lines": 114,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/sftp/src/airflow/providers/sftp/exceptions.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from airflow.providers.common.compat.sdk import AirflowException
class ConnectionNotOpenedException(AirflowException):
"""Thrown when a connection has not been opened and has been tried to be used."""
| {
"repo_id": "apache/airflow",
"file_path": "providers/sftp/src/airflow/providers/sftp/exceptions.py",
"license": "Apache License 2.0",
"lines": 20,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:scripts/in_container/run_generate_openapi_spec_providers.py | #!/usr/bin/env python
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import argparse
import sys
from pathlib import Path
from typing import TYPE_CHECKING, NamedTuple
from in_container_utils import console, generate_openapi_file, validate_openapi_file
from airflow.providers_manager import ProvidersManager
if TYPE_CHECKING:
from fastapi import FastAPI
class ProviderDef(NamedTuple):
openapi_spec_file: Path
app: FastAPI | None
prefix: str
sys.path.insert(0, str(Path(__file__).parent.resolve()))
ProvidersManager().initialize_providers_configuration()
def get_providers_defs(provider_name: str) -> ProviderDef:
if provider_name == "fab":
from airflow.providers.fab.auth_manager.api_fastapi import __file__ as FAB_AUTHMGR_API_PATH
from airflow.providers.fab.auth_manager.fab_auth_manager import FabAuthManager
return ProviderDef(
openapi_spec_file=Path(FAB_AUTHMGR_API_PATH).parent
/ "openapi"
/ "v2-fab-auth-manager-generated.yaml",
app=FabAuthManager().get_fastapi_app(),
prefix="/auth",
)
if provider_name == "edge":
from airflow.providers.edge3 import __file__ as EDGE_PATH
from airflow.providers.edge3.worker_api.app import create_edge_worker_api_app
# Ensure dist exists on aclean git...
(Path(EDGE_PATH).parent / "plugins" / "www" / "dist").mkdir(parents=True, exist_ok=True)
return ProviderDef(
openapi_spec_file=Path(EDGE_PATH).parent / "worker_api" / "v2-edge-generated.yaml",
app=create_edge_worker_api_app(),
prefix="/edge_worker",
)
if provider_name == "keycloak":
from airflow.providers.keycloak.auth_manager import __file__ as KEYCLOAK_AUTHMGR_PATH
from airflow.providers.keycloak.auth_manager.keycloak_auth_manager import KeycloakAuthManager
return ProviderDef(
openapi_spec_file=Path(KEYCLOAK_AUTHMGR_PATH).parent
/ "openapi"
/ "v2-keycloak-auth-manager-generated.yaml",
app=KeycloakAuthManager().get_fastapi_app(),
prefix="/auth",
)
raise NotImplementedError(f"Provider '{provider_name}' is not supported.")
# Generate FAB auth manager openapi spec
def generate_openapi_specs(provider_name: str):
provider_def = get_providers_defs(provider_name)
if provider_def is None:
console.print(f"[red]Provider '{provider_name}' not found. Skipping OpenAPI spec generation.[/]")
sys.exit(1)
app = provider_def.app
openapi_spec_file = provider_def.openapi_spec_file
if app:
generate_openapi_file(app=app, file_path=openapi_spec_file, prefix=provider_def.prefix)
validate_openapi_file(openapi_spec_file)
else:
console.print(
f"[red]Provider '{provider_name}' has no FastAPI app. Skipping OpenAPI spec generation.[/]"
)
sys.exit(1)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Generate openapi-spec for the specified provider.")
parser.add_argument(
"provider",
type=str,
help="The name of the provider whose openapi-spec should be compiled.",
choices=["fab", "edge", "keycloak"],
)
args = parser.parse_args()
generate_openapi_specs(args.provider)
| {
"repo_id": "apache/airflow",
"file_path": "scripts/in_container/run_generate_openapi_spec_providers.py",
"license": "Apache License 2.0",
"lines": 90,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:task-sdk/src/airflow/sdk/module_loading.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from airflow.sdk._shared.module_loading import ( # noqa: F401
import_string,
is_valid_dotpath,
iter_namespace,
qualname,
)
| {
"repo_id": "apache/airflow",
"file_path": "task-sdk/src/airflow/sdk/module_loading.py",
"license": "Apache License 2.0",
"lines": 23,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:task-sdk/tests/task_sdk/definitions/test_module_loading.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
| {
"repo_id": "apache/airflow",
"file_path": "task-sdk/tests/task_sdk/definitions/test_module_loading.py",
"license": "Apache License 2.0",
"lines": 17,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-core/src/airflow/api_fastapi/execution_api/versions/v2025_09_23.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from cadwyn import VersionChange, schema
from airflow.api_fastapi.execution_api.datamodels.taskinstance import TaskInstance
class AddDagVersionIdField(VersionChange):
"""Add the `dag_version_id` field to the TaskInstance model."""
description = __doc__
instructions_to_migrate_to_previous_version = (schema(TaskInstance).field("dag_version_id").didnt_exist,)
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/api_fastapi/execution_api/versions/v2025_09_23.py",
"license": "Apache License 2.0",
"lines": 23,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:airflow-core/src/airflow/models/referencemixin.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING, Protocol, runtime_checkable
if TYPE_CHECKING:
from collections.abc import Iterable
from airflow.serialization.definitions.mappedoperator import Operator
@runtime_checkable
class ReferenceMixin(Protocol):
"""
Mixin for things that references a task.
This should be implemented by things that reference operators and use them
to lazily resolve values at runtime. The most prominent examples are XCom
references (XComArg).
This is a partial interface to the SDK's ResolveMixin with the resolve()
method removed since the scheduler should not need to resolve the reference.
"""
def iter_references(self) -> Iterable[tuple[Operator, str]]:
"""
Find underlying XCom references this contains.
This is used by the DAG parser to recursively find task dependencies.
:meta private:
"""
raise NotImplementedError
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/models/referencemixin.py",
"license": "Apache License 2.0",
"lines": 38,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:dev/breeze/src/airflow_breeze/utils/workflow_status.py | #!/usr/bin/env python3
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# /// script
# requires-python = ">=3.10"
# dependencies = [
# "rich>=13.6.0",
# ]
# ///
from __future__ import annotations
import json
import os
import subprocess
import sys
from rich.console import Console
console = Console(width=400, color_system="standard")
def workflow_status(
branch: str,
workflow_id: str,
) -> list[dict]:
"""Check the status of a GitHub Actions workflow run."""
cmd = [
"gh",
"run",
"list",
"--workflow",
workflow_id,
"--branch",
branch,
"--limit",
"1", # Limit to the most recent run
"--repo",
"apache/airflow",
"--json",
"conclusion,url",
]
result = subprocess.run(
cmd,
capture_output=True,
check=False,
)
if result.returncode != 0:
console.print(f"[red]Error fetching workflow run ID: {str(result.stderr)}[/red]")
sys.exit(1)
runs_data = result.stdout.strip()
if not runs_data:
console.print("[red]No workflow runs found.[/red]")
sys.exit(1)
run_info = json.loads(runs_data)
console.print(f"[blue]Workflow status for {workflow_id}: {run_info}[/blue]")
return run_info
if __name__ == "__main__":
branch = os.environ.get("workflow_branch")
workflow_id = os.environ.get("workflow_id")
if not branch or not workflow_id:
console.print("[red]Both workflow-branch and workflow-id environment variables must be set.[/red]")
sys.exit(1)
console.print(f"[blue]Checking workflow status for branch: {branch}, workflow_id: {workflow_id}[/blue]")
data: list[dict] = workflow_status(branch, workflow_id)
conclusion = data[0].get("conclusion")
url = data[0].get("url")
if os.environ.get("GITHUB_OUTPUT") is None:
console.print("[red]GITHUB_OUTPUT environment variable is not set. Cannot write output.[/red]")
sys.exit(1)
with open(os.environ["GITHUB_OUTPUT"], "a") as f:
f.write(f"conclusion={conclusion}\n")
f.write(f"run-url={url}\n")
| {
"repo_id": "apache/airflow",
"file_path": "dev/breeze/src/airflow_breeze/utils/workflow_status.py",
"license": "Apache License 2.0",
"lines": 81,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:dev/breeze/src/airflow_breeze/utils/docs_version_validation.py | #!/usr/bin/env python3
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import json
import os
import sys
from pathlib import Path
from rich.console import Console
from airflow_breeze.global_constants import PACKAGES_METADATA_EXCLUDE_NAMES
console = Console(color_system="standard")
AIRFLOW_SITE_DIRECTORY = os.environ.get("AIRFLOW_SITE_DIRECTORY")
error_versions: list[str] = []
if AIRFLOW_SITE_DIRECTORY and "docs-archive" not in AIRFLOW_SITE_DIRECTORY:
AIRFLOW_SITE_DIRECTORY = os.path.join(Path(AIRFLOW_SITE_DIRECTORY), "docs-archive")
def validate_docs_version() -> None:
"""
Validate the versions of documentation packages in the specified directory.
This script checks the versions of documentation packages in the published directory
when we publish and add back-references to the documentation. the directory is expected to be structured like:
docs-archive/
apache-airflow/
1.10.0/
stable/
stable.txt
apache-airflow-providers-standard/
2.0.0/
stable/
stable.txt
If anything found apart from the expected structure, it will cause error to redirects urls or publishing the documentation to s3
"""
doc_packages = os.listdir(AIRFLOW_SITE_DIRECTORY)
if not doc_packages:
console.print("[red]No documentation packages found in the specified directory.[/red]")
return
package_version_map = {}
for package in doc_packages:
if package in PACKAGES_METADATA_EXCLUDE_NAMES:
console.print(f"[yellow]Skipping excluded package: {package}[/yellow]")
continue
package_path = os.path.join(str(AIRFLOW_SITE_DIRECTORY), package)
versions = [v for v in os.listdir(package_path) if v != "stable" and v != "stable.txt"]
if versions:
package_version_map[package] = get_all_versions(package, versions)
if error_versions:
console.print("[red]Errors found in version validation:[/red]")
for error in error_versions:
console.print(f"[red]{error}[/red]")
console.print(
"[blue]These errors could be due to invalid redirects present in the doc packages.[/blue]"
)
sys.exit(1)
console.print("[green]All versions validated successfully![/green]")
console.print(f"[blue] {json.dumps(package_version_map, indent=2)} [/blue]")
def get_all_versions(package_name: str, versions: list[str]) -> list[str]:
from packaging.version import Version
good_versions = []
for version in versions:
try:
Version(version)
good_versions.append(version)
except ValueError as e:
error_versions.append(f"{e} found under doc folder {package_name}")
return sorted(
good_versions,
key=lambda d: Version(d),
)
if __name__ == "__main__":
console.print("[blue]Validating documentation versions...[/blue]")
if AIRFLOW_SITE_DIRECTORY is None:
console.print(
"[red]AIRFLOW_SITE_DIRECTORY environment variable is not set. "
"Please set it to the directory containing the Airflow site files.[red]"
)
sys.exit(1)
validate_docs_version()
| {
"repo_id": "apache/airflow",
"file_path": "dev/breeze/src/airflow_breeze/utils/docs_version_validation.py",
"license": "Apache License 2.0",
"lines": 90,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:dev/breeze/tests/test_docs_version_validation.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import os
from unittest.mock import patch
import pytest
from airflow_breeze.utils.docs_version_validation import error_versions, validate_docs_version
class TestValidateDocsVersion:
def setup_method(self):
os.environ["AIRFLOW_SITE_DIRECTORY"] = "/path/to/docs-archive"
error_versions.clear()
@patch("os.listdir")
@patch("os.path.join")
def test_validate_docs_version_with_invalid_versions(self, mock_path_join, mock_listdir):
mock_listdir.side_effect = [
["apache-airflow", "apache-airflow-providers-google"],
["1.10.0", "stable", "invalid_version"],
["2.0.0", "stable", "stable.txt"],
]
mock_path_join.side_effect = lambda *args: "/".join(args)
with pytest.raises(SystemExit):
validate_docs_version()
assert "Invalid version: 'invalid_version' found under doc folder apache-airflow" in error_versions
@patch("os.listdir")
@patch("os.path.join")
def test_validate_docs_version_with_valid_versions(self, mock_path_join, mock_listdir):
mock_listdir.side_effect = [
["apache-airflow", "apache-airflow-providers-standard"],
["1.10.0", "stable"],
["2.0.0", "stable", "stable.txt"],
]
mock_path_join.side_effect = lambda *args: "/".join(args)
validate_docs_version()
assert not error_versions, f"No errors should be found for valid versions, {error_versions}"
| {
"repo_id": "apache/airflow",
"file_path": "dev/breeze/tests/test_docs_version_validation.py",
"license": "Apache License 2.0",
"lines": 48,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:devel-common/src/tests_common/test_utils/stream_capture_manager.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from contextlib import ExitStack, redirect_stderr, redirect_stdout
class StreamCaptureManager(ExitStack):
"""Context manager class for capturing stdout and/or stderr while isolating from Logger output."""
def __init__(self, capture_stdout=True, capture_stderr=False):
super().__init__()
from io import StringIO
self.capture_stdout = capture_stdout
self.capture_stderr = capture_stderr
self._stdout_buffer = StringIO() if capture_stdout else None
self._stderr_buffer = StringIO() if capture_stderr else None
self._stdout_final = ""
self._stderr_final = ""
self._in_context = False
# Store original streams
self._original_stdout = None
self._original_stderr = None
@property
def stdout(self) -> str:
"""Get captured stdout content."""
if self._in_context and self._stdout_buffer and not self._stdout_buffer.closed:
return self._stdout_buffer.getvalue()
return self._stdout_final
@property
def stderr(self) -> str:
"""Get captured stderr content."""
if self._in_context and self._stderr_buffer and not self._stderr_buffer.closed:
return self._stderr_buffer.getvalue()
return self._stderr_final
def getvalue(self) -> str:
"""Get captured content. For backward compatibility, returns stdout by default."""
return self.stdout if self.capture_stdout else self.stderr
def get_combined(self) -> str:
"""Get combined stdout and stderr content."""
parts = []
if self.capture_stdout:
parts.append(self.stdout)
if self.capture_stderr:
parts.append(self.stderr)
return "".join(parts)
def splitlines(self) -> list[str]:
"""Split captured content into lines."""
content = self.getvalue()
if not content:
return [""] # Return list with empty string to avoid IndexError
return content.splitlines()
def __enter__(self):
import logging
import sys
# Set up context managers for redirection
self._context_manager = ExitStack()
self._in_context = True
# Setup logging isolation
root_logger = logging.getLogger()
original_handlers = list(root_logger.handlers)
def reset_handlers():
from logging import _lock
root_logger = logging.getLogger()
with _lock:
root_logger.handlers = original_handlers
self.callback(reset_handlers)
# Remove stream handlers that would interfere with capture
handlers_to_remove = []
for handler in original_handlers:
if isinstance(handler, logging.StreamHandler):
if self.capture_stdout and handler.stream == sys.stdout:
handlers_to_remove.append(handler)
elif self.capture_stderr and handler.stream == sys.stderr:
handlers_to_remove.append(handler)
for handler in handlers_to_remove:
root_logger.removeHandler(handler)
def final_value(buffer, attrname):
if buffer:
try:
val = buffer.getvalue()
except (ValueError, AttributeError):
val = ""
setattr(self, attrname, val)
if self.capture_stdout:
self._stdout_redirect = redirect_stdout(self._stdout_buffer)
self.enter_context(self._stdout_redirect)
self.callback(final_value, buffer=self._stdout_buffer, attrname="_stdout_final")
if self.capture_stderr:
self._stderr_redirect = redirect_stderr(self._stderr_buffer)
self.enter_context(self._stderr_redirect)
self.callback(final_value, buffer=self._stderr_buffer, attrname="_stderr_final")
self.callback(setattr, self, "_in_context", False)
return super().__enter__()
# Convenience classes
class StdoutCaptureManager(StreamCaptureManager):
"""Convenience class for stdout-only capture."""
def __init__(self):
super().__init__(capture_stdout=True, capture_stderr=False)
class StderrCaptureManager(StreamCaptureManager):
"""Convenience class for stderr-only capture."""
def __init__(self):
super().__init__(capture_stdout=False, capture_stderr=True)
class CombinedCaptureManager(StreamCaptureManager):
"""Convenience class for capturing both stdout and stderr."""
def __init__(self):
super().__init__(capture_stdout=True, capture_stderr=True)
| {
"repo_id": "apache/airflow",
"file_path": "devel-common/src/tests_common/test_utils/stream_capture_manager.py",
"license": "Apache License 2.0",
"lines": 118,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:devel-common/tests/unit/tests_common/test_utils/test_stream_capture_manager.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Unit tests for the StreamCaptureManager class used in Airflow CLI tests."""
from __future__ import annotations
import logging
import sys
import pytest
def test_stdout_only(stdout_capture):
"""Test capturing stdout only."""
with stdout_capture as capture:
print("Hello stdout")
print("Error message", file=sys.stderr)
# Access during context
assert "Hello stdout" in capture.getvalue()
assert "Error message" not in capture.getvalue()
def test_stderr_only(stderr_capture):
"""Test capturing stderr only."""
with stderr_capture as capture:
print("Hello stdout")
print("Error message", file=sys.stderr)
assert "Error message" in capture.getvalue()
assert "Hello stdout" not in capture.getvalue()
def test_combined(combined_capture):
"""Test capturing both streams."""
with combined_capture as capture:
print("Hello stdout")
print("Error message", file=sys.stderr)
assert "Hello stdout" in capture.stdout
assert "Error message" in capture.stderr
assert "Hello stdout" in capture.get_combined()
assert "Error message" in capture.get_combined()
def test_configurable(stream_capture):
"""Test with configurable capture."""
# Capture both
with stream_capture(stdout=True, stderr=True) as capture:
print("stdout message")
print("stderr message", file=sys.stderr)
assert "stdout message" in capture.stdout
assert "stderr message" in capture.stderr
# Capture stderr only
with stream_capture(stdout=False, stderr=True) as capture:
print("stdout message")
print("stderr message", file=sys.stderr)
assert capture.stdout == ""
assert "stderr message" in capture.stderr
# ============== Tests for Logging Isolation ==============
def test_stdout_logging_isolation(stdout_capture):
"""Test that logging to stdout is isolated from captured output."""
# Set up a logger that writes to stdout
logger = logging.getLogger("test_stdout_logger")
logger.setLevel(logging.INFO)
# Create handler that writes to stdout
stdout_handler = logging.StreamHandler(sys.stdout)
stdout_handler.setFormatter(logging.Formatter("%(levelname)s - %(message)s"))
logger.addHandler(stdout_handler)
try:
with stdout_capture as capture:
# Regular print should be captured
print("Regular print to stdout")
# Logging should NOT be captured (isolated)
logger.info("This is a log message to stdout")
logger.warning("This is a warning to stdout")
# Another regular print
print("Another regular print")
output = capture.getvalue()
# Regular prints should be in output
assert "Regular print to stdout" in output
assert "Another regular print" in output
# Log messages should NOT be in output
assert "This is a log message to stdout" not in output
assert "This is a warning to stdout" not in output
assert "INFO" not in output
assert "WARNING" not in output
finally:
# Clean up
logger.removeHandler(stdout_handler)
def test_stderr_logging_isolation(stderr_capture):
"""Test that logging to stderr is isolated from captured output."""
# Set up a logger that writes to stderr
logger = logging.getLogger("test_stderr_logger")
logger.setLevel(logging.INFO)
# Create handler that writes to stderr
stderr_handler = logging.StreamHandler(sys.stderr)
stderr_handler.setFormatter(logging.Formatter("%(levelname)s - %(message)s"))
logger.addHandler(stderr_handler)
try:
with stderr_capture as capture:
# Regular print to stderr should be captured
print("Regular print to stderr", file=sys.stderr)
# Logging should NOT be captured (isolated)
logger.error("This is an error log to stderr")
logger.critical("This is a critical log to stderr")
# Another regular print
print("Another stderr print", file=sys.stderr)
output = capture.getvalue()
# Regular prints should be in output
assert "Regular print to stderr" in output
assert "Another stderr print" in output
# Log messages should NOT be in output
assert "This is an error log to stderr" not in output
assert "This is a critical log to stderr" not in output
assert "ERROR" not in output
assert "CRITICAL" not in output
finally:
# Clean up
logger.removeHandler(stderr_handler)
def test_combined_logging_isolation(combined_capture):
"""Test that logging is isolated when capturing both stdout and stderr."""
# Set up loggers for both streams
stdout_logger = logging.getLogger("test_combined_stdout")
stderr_logger = logging.getLogger("test_combined_stderr")
stdout_logger.setLevel(logging.INFO)
stderr_logger.setLevel(logging.INFO)
stdout_handler = logging.StreamHandler(sys.stdout)
stderr_handler = logging.StreamHandler(sys.stderr)
stdout_handler.setFormatter(logging.Formatter("[STDOUT LOG] %(message)s"))
stderr_handler.setFormatter(logging.Formatter("[STDERR LOG] %(message)s"))
stdout_logger.addHandler(stdout_handler)
stderr_logger.addHandler(stderr_handler)
try:
with combined_capture as capture:
# Regular prints
print("Regular stdout")
print("Regular stderr", file=sys.stderr)
# Logging (should be isolated)
stdout_logger.info("Log to stdout")
stderr_logger.error("Log to stderr")
# Check stdout
assert "Regular stdout" in capture.stdout
assert "Log to stdout" not in capture.stdout
assert "[STDOUT LOG]" not in capture.stdout
# Check stderr
assert "Regular stderr" in capture.stderr
assert "Log to stderr" not in capture.stderr
assert "[STDERR LOG]" not in capture.stderr
# Combined should have regular prints but not logs
combined = capture.get_combined()
assert "Regular stdout" in combined
assert "Regular stderr" in combined
assert "Log to stdout" not in combined
assert "Log to stderr" not in combined
finally:
# Clean up
stdout_logger.removeHandler(stdout_handler)
stderr_logger.removeHandler(stderr_handler)
def test_root_logger_isolation(stdout_capture):
"""Test that root logger messages are isolated from captured output."""
# Configure root logger to output to stdout
root_logger = logging.getLogger()
original_level = root_logger.level
root_logger.setLevel(logging.DEBUG)
# Add a stdout handler to root logger
handler = logging.StreamHandler(sys.stdout)
handler.setFormatter(logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s"))
root_logger.addHandler(handler)
try:
with stdout_capture as capture:
# Regular print
print("Before logging")
# Various log levels using root logger explicitly
root_logger.debug("Debug message")
root_logger.info("Info message")
root_logger.warning("Warning message")
root_logger.error("Error message")
# Regular print
print("After logging")
output = capture.getvalue()
# Only regular prints should be captured
assert "Before logging" in output
assert "After logging" in output
# No log messages should be captured
assert "Debug message" not in output
assert "Info message" not in output
assert "Warning message" not in output
assert "Error message" not in output
# No log formatting should be present
assert "DEBUG" not in output
assert "INFO" not in output
assert "WARNING" not in output
assert "ERROR" not in output
assert "%(asctime)s" not in output
finally:
# Clean up
root_logger.removeHandler(handler)
root_logger.setLevel(original_level)
def test_mixed_output_ordering(stdout_capture):
"""Test that the order of regular prints is preserved when logging is mixed in."""
logger = logging.getLogger("test_ordering")
logger.setLevel(logging.INFO)
handler = logging.StreamHandler(sys.stdout)
handler.setFormatter(logging.Formatter("LOG: %(message)s"))
logger.addHandler(handler)
try:
with stdout_capture as capture:
print("1. First print")
logger.info("Should not appear 1")
print("2. Second print")
logger.info("Should not appear 2")
print("3. Third print")
output = capture.getvalue()
lines = output.strip().split("\n")
finally:
logger.removeHandler(handler)
# Should have exactly 3 lines (only the prints)
assert len(lines) == 3
assert lines[0] == "1. First print"
assert lines[1] == "2. Second print"
assert lines[2] == "3. Third print"
# No log messages
assert "Should not appear" not in output
assert "LOG:" not in output
def test_handler_restoration(stdout_capture):
"""Test that logging handlers are properly restored after capture."""
root_logger = logging.getLogger()
# Add a test handler to root logger to ensure we have something to test
test_root_handler = logging.StreamHandler(sys.stdout)
test_root_handler.setFormatter(logging.Formatter("ROOT: %(message)s"))
root_logger.addHandler(test_root_handler)
# Also create a non-root logger with its own handler
logger = logging.getLogger("test_restoration")
logger.setLevel(logging.INFO)
handler = logging.StreamHandler(sys.stdout)
handler.setFormatter(logging.Formatter("TEST: %(message)s"))
logger.addHandler(handler)
try:
# Record initial state
initial_root_handlers = list(root_logger.handlers)
assert test_root_handler in initial_root_handlers
# Use the capture context
with stdout_capture:
print("Inside capture")
logger.info("Log inside capture")
# During capture, our test handler should be removed from root
current_root_handlers = list(root_logger.handlers)
assert test_root_handler not in current_root_handlers, (
"Test handler should be removed during capture"
)
# The non-root logger's handler should still exist
assert handler in logger.handlers
# After capture, root logger handlers should be restored
final_root_handlers = list(root_logger.handlers)
assert test_root_handler in final_root_handlers, "Test handler should be restored after capture"
assert len(final_root_handlers) == len(initial_root_handlers), (
f"Handler count mismatch. Initial: {len(initial_root_handlers)}, Final: {len(final_root_handlers)}"
)
finally:
# Clean up
root_logger.removeHandler(test_root_handler)
logger.removeHandler(handler)
def test_multiple_loggers_isolation(stream_capture):
"""Test isolation works with multiple loggers writing to different streams."""
# Create multiple loggers
app_logger = logging.getLogger("app")
db_logger = logging.getLogger("database")
api_logger = logging.getLogger("api")
# Set up handlers
app_handler = logging.StreamHandler(sys.stdout)
db_handler = logging.StreamHandler(sys.stderr)
api_handler = logging.StreamHandler(sys.stdout)
app_handler.setFormatter(logging.Formatter("[APP] %(message)s"))
db_handler.setFormatter(logging.Formatter("[DB] %(message)s"))
api_handler.setFormatter(logging.Formatter("[API] %(message)s"))
app_logger.addHandler(app_handler)
db_logger.addHandler(db_handler)
api_logger.addHandler(api_handler)
app_logger.setLevel(logging.INFO)
db_logger.setLevel(logging.INFO)
api_logger.setLevel(logging.INFO)
try:
with stream_capture(stdout=True, stderr=True) as capture:
# Regular prints
print("Starting application")
print("Database connection error", file=sys.stderr)
# Logs (should be isolated)
app_logger.info("App initialized")
db_logger.error("Connection failed")
api_logger.info("API server started")
# More regular prints
print("Application ready")
print("Check error log", file=sys.stderr)
# Verify stdout
assert "Starting application" in capture.stdout
assert "Application ready" in capture.stdout
assert "[APP]" not in capture.stdout
assert "[API]" not in capture.stdout
assert "App initialized" not in capture.stdout
assert "API server started" not in capture.stdout
# Verify stderr
assert "Database connection error" in capture.stderr
assert "Check error log" in capture.stderr
assert "[DB]" not in capture.stderr
assert "Connection failed" not in capture.stderr
finally:
# Clean up
app_logger.removeHandler(app_handler)
db_logger.removeHandler(db_handler)
api_logger.removeHandler(api_handler)
def test_exception_during_capture_preserves_isolation(stdout_capture):
"""Test that logging isolation is maintained even when an exception occurs."""
logger = logging.getLogger("test_exception")
logger.setLevel(logging.INFO)
handler = logging.StreamHandler(sys.stdout)
handler.setFormatter(logging.Formatter("LOG: %(message)s"))
logger.addHandler(handler)
# Test setup and isolation check before exception
captured_output = None
try:
with stdout_capture as capture:
print("Before exception")
logger.info("Log before exception")
# Check isolation before exception
captured_output = capture.getvalue()
assert "Before exception" in captured_output
assert "Log before exception" not in captured_output
# This will raise an exception
raise ValueError("Test exception")
except ValueError:
# Expected exception
pass
# Verify captured output was correct
assert captured_output is not None
assert "Before exception" in captured_output
assert "Log before exception" not in captured_output
# Verify handlers are restored after exception
root_logger = logging.getLogger()
# Just verify no crash when accessing handlers
restored_handlers = list(root_logger.handlers)
assert isinstance(restored_handlers, list)
# Clean up
logger.removeHandler(handler)
def test_exception_during_capture_with_pytest_raises(stdout_capture):
"""Test exception handling with proper pytest.raises usage."""
logger = logging.getLogger("test_exception_pytest")
logger.setLevel(logging.INFO)
handler = logging.StreamHandler(sys.stdout)
handler.setFormatter(logging.Formatter("LOG: %(message)s"))
logger.addHandler(handler)
try:
# Capture and verify before the exception
with stdout_capture as capture:
print("Before exception")
logger.info("Log before exception")
# Verify isolation works
output = capture.getvalue()
assert "Before exception" in output
assert "Log before exception" not in output
# Now test that exception in capture context still works
with pytest.raises(ValueError, match="Test exception"):
with stdout_capture:
raise ValueError("Test exception")
# Verify we can still use capture after exception
with stdout_capture as capture:
print("After exception test")
output = capture.getvalue()
assert "After exception test" in output
finally:
logger.removeHandler(handler)
| {
"repo_id": "apache/airflow",
"file_path": "devel-common/tests/unit/tests_common/test_utils/test_stream_capture_manager.py",
"license": "Apache License 2.0",
"lines": 372,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/google/src/airflow/providers/google/common/utils/get_secret.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from google.cloud.exceptions import NotFound
from airflow.providers.google.cloud.hooks.secret_manager import (
GoogleCloudSecretManagerHook,
)
def get_secret(secret_id: str) -> str:
hook = GoogleCloudSecretManagerHook()
if hook.secret_exists(secret_id=secret_id):
return hook.access_secret(secret_id=secret_id).payload.data.decode()
raise NotFound("The secret '%s' not found", secret_id)
| {
"repo_id": "apache/airflow",
"file_path": "providers/google/src/airflow/providers/google/common/utils/get_secret.py",
"license": "Apache License 2.0",
"lines": 26,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/google/tests/unit/google/common/utils/test_get_secret.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from unittest import mock
import pytest
from google.cloud.exceptions import NotFound
from airflow.providers.google.common.utils.get_secret import get_secret
class TestGetSecret:
@mock.patch("airflow.providers.google.common.utils.get_secret.GoogleCloudSecretManagerHook")
def test_get_secret_success(self, mock_hook_class):
mock_hook_instance = mock.Mock()
mock_hook_class.return_value = mock_hook_instance
mock_hook_instance.secret_exists.return_value = True
mock_payload = mock.Mock()
mock_payload.data.decode.return_value = "test_secret_value"
mock_access_secret_return = mock.Mock()
mock_access_secret_return.payload = mock_payload
mock_hook_instance.access_secret.return_value = mock_access_secret_return
secret_id = "test-secret-id"
expected_secret_value = "test_secret_value"
result = get_secret(secret_id=secret_id)
mock_hook_class.assert_called_once()
mock_hook_instance.secret_exists.assert_called_once_with(secret_id=secret_id)
mock_hook_instance.access_secret.assert_called_once_with(secret_id=secret_id)
assert result == expected_secret_value
@mock.patch("airflow.providers.google.common.utils.get_secret.GoogleCloudSecretManagerHook")
def test_get_secret_not_found(self, mock_hook_class):
mock_hook_instance = mock.Mock()
mock_hook_class.return_value = mock_hook_instance
mock_hook_instance.secret_exists.return_value = False
secret_id = "non-existent-secret"
with pytest.raises(NotFound):
get_secret(secret_id=secret_id)
mock_hook_class.assert_called_once()
mock_hook_instance.secret_exists.assert_called_once_with(secret_id=secret_id)
mock_hook_instance.access_secret.assert_not_called()
| {
"repo_id": "apache/airflow",
"file_path": "providers/google/tests/unit/google/common/utils/test_get_secret.py",
"license": "Apache License 2.0",
"lines": 50,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:task-sdk/src/airflow/sdk/execution_time/timeout.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import os
import structlog
from airflow.sdk.exceptions import AirflowTaskTimeout
class TimeoutPosix:
"""POSIX Timeout version: To be used in a ``with`` block and timeout its content."""
def __init__(self, seconds=1, error_message="Timeout"):
super().__init__()
self.seconds = seconds
self.error_message = error_message + ", PID: " + str(os.getpid())
self.log = structlog.get_logger(logger_name="task")
def handle_timeout(self, signum, frame):
"""Log information and raises AirflowTaskTimeout."""
self.log.error("Process timed out", pid=os.getpid())
raise AirflowTaskTimeout(self.error_message)
def __enter__(self):
import signal
try:
signal.signal(signal.SIGALRM, self.handle_timeout)
signal.setitimer(signal.ITIMER_REAL, self.seconds)
except ValueError:
self.log.warning("timeout can't be used in the current context", exc_info=True)
return self
def __exit__(self, type_, value, traceback):
import signal
try:
signal.setitimer(signal.ITIMER_REAL, 0)
except ValueError:
self.log.warning("timeout can't be used in the current context", exc_info=True)
timeout = TimeoutPosix
| {
"repo_id": "apache/airflow",
"file_path": "task-sdk/src/airflow/sdk/execution_time/timeout.py",
"license": "Apache License 2.0",
"lines": 46,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/redis/src/airflow/providers/redis/queues/redis.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING
from airflow.providers.common.messaging.providers.base_provider import BaseMessageQueueProvider
from airflow.providers.redis.triggers.redis_await_message import AwaitMessageTrigger
if TYPE_CHECKING:
from airflow.triggers.base import BaseEventTrigger
# [START queue_regexp]
QUEUE_REGEXP = r"^redis\+pubsub://"
# [END queue_regexp]
class RedisPubSubMessageQueueProvider(BaseMessageQueueProvider):
"""
Configuration for Redis integration with common-messaging.
[START redis_message_queue_provider_description]
* It uses ``redis+pubsub`` as scheme for identifying Redis queues.
* For parameter definitions take a look at :class:`~airflow.providers.redis.triggers.redis_await_message.AwaitMessageTrigger`.
.. code-block:: python
from airflow.providers.common.messaging.triggers.msg_queue import MessageQueueTrigger
from airflow.sdk import Asset, AssetWatcher
trigger = MessageQueueTrigger(
scheme="redis+pubsub",
# Additional Redis AwaitMessageTrigger parameters as needed
channels=["my_channel"],
redis_conn_id="redis_default",
)
asset = Asset("redis_queue_asset", watchers=[AssetWatcher(name="redis_watcher", trigger=trigger)])
[END redis_message_queue_provider_description]
"""
scheme = "redis+pubsub"
def trigger_class(self) -> type[BaseEventTrigger]:
return AwaitMessageTrigger # type: ignore[return-value]
| {
"repo_id": "apache/airflow",
"file_path": "providers/redis/src/airflow/providers/redis/queues/redis.py",
"license": "Apache License 2.0",
"lines": 46,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/redis/tests/integration/redis/queues/test_redis_pubsub_message_queue.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import time
import pytest
from airflow.providers.redis.hooks.redis import RedisHook
from airflow.providers.redis.queues.redis import RedisPubSubMessageQueueProvider
@pytest.mark.integration("redis")
class TestRedisPubSubMessageQueueProviderIntegration:
def setup_method(self):
self.redis_hook = RedisHook(redis_conn_id="redis_default")
self.redis = self.redis_hook.get_conn()
self.provider = RedisPubSubMessageQueueProvider()
self.channel = "test_pubsub_channel"
def test_pubsub_send_and_receive(self):
pubsub = self.redis.pubsub()
pubsub.subscribe(self.channel)
test_message = "airflow-pubsub-integration-message"
self.redis.publish(self.channel, test_message)
received = None
for _ in range(10):
message = pubsub.get_message()
if message and message["type"] == "message":
received = message["data"]
break
time.sleep(0.1)
assert received == test_message.encode(), f"Expected {test_message!r}, got {received!r}"
pubsub.unsubscribe(self.channel)
def test_queue_matches(self):
assert self.provider.scheme_matches("redis+pubsub")
| {
"repo_id": "apache/airflow",
"file_path": "providers/redis/tests/integration/redis/queues/test_redis_pubsub_message_queue.py",
"license": "Apache License 2.0",
"lines": 44,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/redis/tests/system/redis/example_dag_message_queue_trigger.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
# [START howto_trigger_message_queue]
from airflow.providers.common.messaging.triggers.msg_queue import MessageQueueTrigger
from airflow.providers.standard.operators.empty import EmptyOperator
from airflow.sdk import DAG, Asset, AssetWatcher
# Define a trigger that listens to an external message queue (Redis in this case)
trigger = MessageQueueTrigger(scheme="redis+pubsub", channels=["test"])
# Define an asset that watches for messages on the queue
asset = Asset("redis_queue_asset_1", watchers=[AssetWatcher(name="redis_watcher_1", trigger=trigger)])
with DAG(dag_id="example_redis_watcher_1", schedule=[asset]) as dag:
EmptyOperator(task_id="task_1")
# [END howto_trigger_message_queue]
from tests_common.test_utils.system_tests import get_test_run # noqa: E402
# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest)
test_run = get_test_run(dag)
| {
"repo_id": "apache/airflow",
"file_path": "providers/redis/tests/system/redis/example_dag_message_queue_trigger.py",
"license": "Apache License 2.0",
"lines": 31,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/redis/tests/unit/redis/queues/test_redis.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import pytest
from airflow.providers.redis.triggers.redis_await_message import AwaitMessageTrigger
from tests_common.test_utils.common_msg_queue import mark_common_msg_queue_test
pytest.importorskip("airflow.providers.common.messaging.providers.base_provider")
class TestRedisPubSubMessageQueueProvider:
"""Tests for RedisPubSubMessageQueueProvider."""
def setup_method(self):
"""Set up the test environment."""
from airflow.providers.redis.queues.redis import RedisPubSubMessageQueueProvider
self.provider = RedisPubSubMessageQueueProvider()
def test_queue_create(self):
"""Test the creation of the RedisPubSubMessageQueueProvider."""
from airflow.providers.common.messaging.providers.base_provider import BaseMessageQueueProvider
assert isinstance(self.provider, BaseMessageQueueProvider)
@pytest.mark.parametrize(
("scheme", "expected_result"),
[
pytest.param("redis+pubsub", True, id="redis_pubsub_scheme"),
pytest.param("kafka", False, id="kafka_scheme"),
pytest.param("sqs", False, id="sqs_scheme"),
pytest.param("unknown", False, id="unknown_scheme"),
],
)
def test_scheme_matches(self, scheme, expected_result):
"""Test the scheme_matches method with various schemes."""
assert self.provider.scheme_matches(scheme) == expected_result
def test_trigger_class(self):
"""Test the trigger_class method."""
assert self.provider.trigger_class() == AwaitMessageTrigger
@mark_common_msg_queue_test
class TestMessageQueueTrigger:
@pytest.mark.usefixtures("cleanup_providers_manager")
def test_provider_integrations_with_scheme_param(self):
from airflow.providers.common.messaging.triggers.msg_queue import MessageQueueTrigger
from airflow.providers.redis.triggers.redis_await_message import AwaitMessageTrigger
trigger = MessageQueueTrigger(scheme="redis+pubsub", channels="test_channel")
assert isinstance(trigger.trigger, AwaitMessageTrigger)
| {
"repo_id": "apache/airflow",
"file_path": "providers/redis/tests/unit/redis/queues/test_redis.py",
"license": "Apache License 2.0",
"lines": 54,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.