sample_id stringlengths 21 196 | text stringlengths 105 936k | metadata dict | category stringclasses 6
values |
|---|---|---|---|
apache/airflow:airflow-core/src/airflow/api_fastapi/auth/managers/simple/utils.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from fastapi import HTTPException, Request, status
from fastapi.exceptions import RequestValidationError
from pydantic import ValidationError
from airflow.api_fastapi.auth.managers.simple.datamodels.login import LoginBody
from airflow.api_fastapi.common.headers import HeaderContentTypeJsonOrForm
from airflow.api_fastapi.common.types import Mimetype
async def parse_login_body(
request: Request,
content_type: HeaderContentTypeJsonOrForm,
) -> LoginBody:
try:
if content_type == Mimetype.JSON:
body = await request.json()
elif content_type == Mimetype.FORM:
form = await request.form()
body = {
"username": form.get("username"),
"password": form.get("password"),
}
else:
raise HTTPException(
status_code=status.HTTP_415_UNSUPPORTED_MEDIA_TYPE,
detail="Unsupported Media Type",
)
return LoginBody(**body)
except ValidationError as e:
raise RequestValidationError(repr(e))
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/api_fastapi/auth/managers/simple/utils.py",
"license": "Apache License 2.0",
"lines": 44,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:airflow-core/tests/unit/security/test_permissions_deprecation_warning.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import importlib
import re
import pytest
from airflow.exceptions import RemovedInAirflow4Warning
from airflow.security import permissions
def test_permissions_import_warns() -> None:
"""Ensures that imports of `airflow.security.permissions` trigger a `RemovedInAirflow4Warning`."""
with pytest.warns(
RemovedInAirflow4Warning, match=re.escape("The airflow.security.permissions module is deprecated")
):
importlib.reload(permissions)
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/tests/unit/security/test_permissions_deprecation_warning.py",
"license": "Apache License 2.0",
"lines": 29,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/openlineage/src/airflow/providers/openlineage/operators/empty.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING
from airflow.providers.common.compat.sdk import BaseOperator
from airflow.providers.openlineage.extractors.base import OperatorLineage
if TYPE_CHECKING:
from airflow.providers.common.compat.sdk import Context
class EmptyOperator(BaseOperator):
"""
Operator that does literally nothing.
It can be used to group tasks in a DAG.
The task is evaluated by the scheduler but never processed by the executor.
"""
ui_color = "#e8f7e4"
def execute(self, context: Context):
pass
def get_openlineage_facets_on_start(self) -> OperatorLineage:
return OperatorLineage()
def get_openlineage_facets_on_complete(self, task_instance) -> OperatorLineage:
return OperatorLineage()
def get_openlineage_facets_on_failure(self, task_instance) -> OperatorLineage:
return OperatorLineage()
| {
"repo_id": "apache/airflow",
"file_path": "providers/openlineage/src/airflow/providers/openlineage/operators/empty.py",
"license": "Apache License 2.0",
"lines": 37,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/openlineage/tests/unit/openlineage/operators/test_empty.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from unittest.mock import Mock
import pytest
from airflow.providers.openlineage.extractors.base import OperatorLineage
from airflow.providers.openlineage.operators.empty import EmptyOperator
def test_execute_returns_none_and_does_not_raise():
op = EmptyOperator(task_id="empty")
# Calling execute should do nothing and return None
result = op.execute(Mock())
assert result is None
@pytest.mark.parametrize(
("method_name", "use_task_instance"),
[
("get_openlineage_facets_on_start", False),
("get_openlineage_facets_on_complete", True),
("get_openlineage_facets_on_failure", True),
],
)
def test_openlineage_facets_methods_return_operator_lineage(method_name, use_task_instance):
op = EmptyOperator(task_id="empty")
method = getattr(op, method_name)
# Invoke with or without a mock for task_instance
if use_task_instance:
facets = method(Mock())
else:
facets = method()
# Should return an OperatorLineage instance
assert isinstance(facets, OperatorLineage)
# Each call returns a fresh instance
second_call = method(Mock()) if use_task_instance else method()
assert facets is not second_call
| {
"repo_id": "apache/airflow",
"file_path": "providers/openlineage/tests/unit/openlineage/operators/test_empty.py",
"license": "Apache License 2.0",
"lines": 47,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/amazon/src/airflow/providers/amazon/aws/operators/ssm.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from collections.abc import Sequence
from typing import TYPE_CHECKING, Any
from botocore.exceptions import WaiterError
from airflow.providers.amazon.aws.hooks.ssm import SsmHook
from airflow.providers.amazon.aws.operators.base_aws import AwsBaseOperator
from airflow.providers.amazon.aws.triggers.ssm import SsmRunCommandTrigger
from airflow.providers.amazon.aws.utils import validate_execute_complete_event
from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
from airflow.providers.common.compat.sdk import conf
if TYPE_CHECKING:
from airflow.sdk import Context
class SsmRunCommandOperator(AwsBaseOperator[SsmHook]):
"""
Executes the SSM Run Command to perform actions on managed instances.
.. seealso::
For more information on how to use this operator, take a look at the
guide:
:ref:`howto/operator:SsmRunCommandOperator`
:param document_name: The name of the Amazon Web Services Systems Manager
document (SSM document) to run.
:param run_command_kwargs: Optional parameters to pass to the send_command
API.
:param wait_for_completion: Whether to wait for cluster to stop.
(default: True)
:param waiter_delay: Time in seconds to wait between status checks.
(default: 120)
:param waiter_max_attempts: Maximum number of attempts to check for job
completion. (default: 75)
:param fail_on_nonzero_exit: If True (default), the operator will fail when
the command returns a non-zero exit code. If False, the operator will
complete successfully regardless of the command exit code, allowing
downstream tasks to handle exit codes for workflow routing. Note that
AWS-level failures (Cancelled, TimedOut) will still raise exceptions
even when this is False. (default: True)
:param deferrable: If True, the operator will wait asynchronously for the
cluster to stop. This implies waiting for completion. This mode
requires aiobotocore module to be installed. (default: False)
:param aws_conn_id: The Airflow connection used for AWS credentials.
If this is ``None`` or empty then the default boto3 behaviour is used.
If running Airflow in a distributed manner and aws_conn_id is None or
empty, then default boto3 configuration would be used (and must be
maintained on each worker node).
:param region_name: AWS region_name. If not specified then the default
boto3 behaviour is used.
:param verify: Whether or not to verify SSL certificates. See:
https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html
:param botocore_config: Configuration dictionary (key-values) for botocore
client. See:
https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.html
"""
aws_hook_class = SsmHook
template_fields: Sequence[str] = aws_template_fields(
"document_name",
"run_command_kwargs",
)
def __init__(
self,
*,
document_name: str,
run_command_kwargs: dict[str, Any] | None = None,
wait_for_completion: bool = True,
waiter_delay: int = 120,
waiter_max_attempts: int = 75,
fail_on_nonzero_exit: bool = True,
deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
**kwargs,
):
super().__init__(**kwargs)
self.wait_for_completion = wait_for_completion
self.waiter_delay = waiter_delay
self.waiter_max_attempts = waiter_max_attempts
self.fail_on_nonzero_exit = fail_on_nonzero_exit
self.deferrable = deferrable
self.document_name = document_name
self.run_command_kwargs = run_command_kwargs or {}
def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> str:
event = validate_execute_complete_event(event)
if event["status"] == "failed":
# Command failed - raise an exception with detailed information
command_status = event.get("command_status", "Unknown")
exit_code = event.get("exit_code", -1)
instance_id = event.get("instance_id", "Unknown")
message = event.get("message", "Command failed")
error_msg = (
f"SSM run command {event['command_id']} failed on instance {instance_id}. "
f"Status: {command_status}, Exit code: {exit_code}. {message}"
)
raise RuntimeError(error_msg)
if event["status"] != "success":
raise RuntimeError(f"Error while running run command: {event}")
self.log.info("SSM run command `%s` completed.", event["command_id"])
return event["command_id"]
def execute(self, context: Context):
response = self.hook.conn.send_command(
DocumentName=self.document_name,
**self.run_command_kwargs,
)
command_id = response["Command"]["CommandId"]
task_description = f"SSM run command {command_id} to complete."
if self.deferrable:
self.log.info("Deferring for %s", task_description)
self.defer(
trigger=SsmRunCommandTrigger(
command_id=command_id,
waiter_delay=self.waiter_delay,
waiter_max_attempts=self.waiter_max_attempts,
fail_on_nonzero_exit=self.fail_on_nonzero_exit,
aws_conn_id=self.aws_conn_id,
region_name=self.region_name,
verify=self.verify,
botocore_config=self.botocore_config,
),
method_name="execute_complete",
)
elif self.wait_for_completion:
self.log.info("Waiting for %s", task_description)
waiter = self.hook.get_waiter("command_executed")
instance_ids = response["Command"]["InstanceIds"]
for instance_id in instance_ids:
try:
waiter.wait(
CommandId=command_id,
InstanceId=instance_id,
WaiterConfig={
"Delay": self.waiter_delay,
"MaxAttempts": self.waiter_max_attempts,
},
)
except WaiterError:
if not self.fail_on_nonzero_exit:
# Enhanced mode: distinguish between AWS-level and command-level failures
invocation = self.hook.get_command_invocation(command_id, instance_id)
status = invocation.get("Status", "")
# AWS-level failures should always raise
if SsmHook.is_aws_level_failure(status):
raise
# Command-level failure - tolerate it in enhanced mode
self.log.info(
"Command completed with status %s (exit code: %s). "
"Continuing due to fail_on_nonzero_exit=False",
status,
invocation.get("ResponseCode", "unknown"),
)
else:
# Traditional mode: all failures raise
raise
return command_id
class SsmGetCommandInvocationOperator(AwsBaseOperator[SsmHook]):
"""
Retrieves the output and execution details of an SSM command invocation.
.. seealso::
For more information on how to use this operator, take a look at the
guide:
:ref:`howto/operator:SsmGetCommandInvocationOperator`
:param command_id: The ID of the SSM command to retrieve output for.
:param instance_id: The ID of the specific instance to retrieve output
for. If not provided, retrieves output from all instances that
executed the command.
:param aws_conn_id: The Airflow connection used for AWS credentials.
If this is ``None`` or empty then the default boto3 behaviour is used.
If running Airflow in a distributed manner and aws_conn_id is None or
empty, then default boto3 configuration would be used (and must be
maintained on each worker node).
:param region_name: AWS region_name. If not specified then the default
boto3 behaviour is used.
:param verify: Whether or not to verify SSL certificates. See:
https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html
:param botocore_config: Configuration dictionary (key-values) for botocore
client. See:
https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.html
"""
aws_hook_class = SsmHook
template_fields: Sequence[str] = aws_template_fields(
"command_id",
"instance_id",
)
def __init__(
self,
*,
command_id: str,
instance_id: str | None = None,
**kwargs,
):
super().__init__(**kwargs)
self.command_id = command_id
self.instance_id = instance_id
def execute(self, context: Context) -> dict[str, Any]:
"""Execute the operator to retrieve command invocation output."""
if self.instance_id:
self.log.info(
"Retrieving output for command %s on instance %s",
self.command_id,
self.instance_id,
)
invocations = [{"InstanceId": self.instance_id}]
else:
self.log.info("Retrieving output for command %s from all instances", self.command_id)
response = self.hook.list_command_invocations(self.command_id)
invocations = response.get("CommandInvocations", [])
output_data: dict[str, Any] = {"command_id": self.command_id, "invocations": []}
for invocation in invocations:
instance_id = invocation["InstanceId"]
try:
invocation_details = self.hook.get_command_invocation(self.command_id, instance_id)
output_data["invocations"].append(
{
"instance_id": instance_id,
"status": invocation_details.get("Status", ""),
"response_code": invocation_details.get("ResponseCode", ""),
"standard_output": invocation_details.get("StandardOutputContent", ""),
"standard_error": invocation_details.get("StandardErrorContent", ""),
"execution_start_time": invocation_details.get("ExecutionStartDateTime", ""),
"execution_end_time": invocation_details.get("ExecutionEndDateTime", ""),
"document_name": invocation_details.get("DocumentName", ""),
"comment": invocation_details.get("Comment", ""),
}
)
except Exception as e:
self.log.warning("Failed to get output for instance %s: %s", instance_id, e)
output_data["invocations"].append({"instance_id": instance_id, "error": str(e)})
return output_data
| {
"repo_id": "apache/airflow",
"file_path": "providers/amazon/src/airflow/providers/amazon/aws/operators/ssm.py",
"license": "Apache License 2.0",
"lines": 238,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/amazon/src/airflow/providers/amazon/aws/sensors/ssm.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from collections.abc import Sequence
from typing import TYPE_CHECKING, Any
from airflow.providers.amazon.aws.hooks.ssm import SsmHook
from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
from airflow.providers.amazon.aws.triggers.ssm import SsmRunCommandTrigger
from airflow.providers.amazon.aws.utils import validate_execute_complete_event
from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
from airflow.providers.common.compat.sdk import conf
if TYPE_CHECKING:
from airflow.sdk import Context
class SsmRunCommandCompletedSensor(AwsBaseSensor[SsmHook]):
"""
Poll the state of an AWS SSM Run Command until completion.
Waits until all instance jobs reach a terminal state. Fails if any
instance job ends in a failed state.
.. seealso::
For more information on how to use this sensor, take a look at the
guide:
:ref:`howto/sensor:SsmRunCommandCompletedSensor`
:param command_id: The ID of the AWS SSM Run Command.
:param fail_on_nonzero_exit: If True (default), the sensor will fail when the command
returns a non-zero exit code. If False, the sensor will complete successfully
for both Success and Failed command statuses, allowing downstream tasks to handle
exit codes. AWS-level failures (Cancelled, TimedOut) will still raise exceptions.
(default: True)
:param deferrable: If True, the sensor will operate in deferrable mode.
This mode requires aiobotocore module to be installed.
(default: False, but can be overridden in config file by setting
default_deferrable to True)
:param poke_interval: Polling period in seconds to check for the status
of the job. (default: 120)
:param max_retries: Number of times before returning the current state.
(default: 75)
:param aws_conn_id: The Airflow connection used for AWS credentials.
If this is ``None`` or empty then the default boto3 behaviour is used.
If running Airflow in a distributed manner and aws_conn_id is None or
empty, then default boto3 configuration would be used (and must be
maintained on each worker node).
:param region_name: AWS region_name. If not specified then the default
boto3 behaviour is used.
:param verify: Whether or not to verify SSL certificates. See:
https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html
:param botocore_config: Configuration dictionary (key-values) for botocore
client. See:
https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.html
"""
INTERMEDIATE_STATES: tuple[str, ...] = (
"Pending",
"Delayed",
"InProgress",
"Cancelling",
)
FAILURE_STATES: tuple[str, ...] = ("Cancelled", "TimedOut", "Failed")
SUCCESS_STATES: tuple[str, ...] = ("Success",)
FAILURE_MESSAGE = "SSM run command sensor failed."
aws_hook_class = SsmHook
template_fields: Sequence[str] = aws_template_fields(
"command_id",
)
def __init__(
self,
*,
command_id,
fail_on_nonzero_exit: bool = True,
deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
poke_interval: int = 120,
max_retries: int = 75,
**kwargs,
):
super().__init__(**kwargs)
self.command_id = command_id
self.fail_on_nonzero_exit = fail_on_nonzero_exit
self.deferrable = deferrable
self.poke_interval = poke_interval
self.max_retries = max_retries
def poke(self, context: Context):
response = self.hook.conn.list_command_invocations(CommandId=self.command_id)
command_invocations = response.get("CommandInvocations", [])
if not command_invocations:
self.log.info(
"No command invocations found",
"command_id=%s yet, waiting...",
self.command_id,
)
return False
for invocation in command_invocations:
state = invocation["Status"]
if state in self.FAILURE_STATES:
# Check if we should tolerate this failure
if self.fail_on_nonzero_exit:
raise RuntimeError(self.FAILURE_MESSAGE) # Traditional behavior
# Only fail on AWS-level issues, tolerate command failures
if SsmHook.is_aws_level_failure(state):
raise RuntimeError(f"SSM command {self.command_id} {state}")
# Command failed but we're tolerating it
self.log.info(
"Command invocation has status %s. Continuing due to fail_on_nonzero_exit=False",
state,
)
if state in self.INTERMEDIATE_STATES:
return False
return True
def execute(self, context: Context):
if self.deferrable:
self.defer(
trigger=SsmRunCommandTrigger(
command_id=self.command_id,
waiter_delay=int(self.poke_interval),
waiter_max_attempts=self.max_retries,
aws_conn_id=self.aws_conn_id,
fail_on_nonzero_exit=self.fail_on_nonzero_exit,
),
method_name="execute_complete",
)
else:
super().execute(context=context)
def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None:
event = validate_execute_complete_event(event)
if event["status"] != "success":
raise RuntimeError(f"Error while running run command: {event}")
self.log.info("SSM run command `%s` completed.", event["command_id"])
| {
"repo_id": "apache/airflow",
"file_path": "providers/amazon/src/airflow/providers/amazon/aws/sensors/ssm.py",
"license": "Apache License 2.0",
"lines": 138,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/amazon/src/airflow/providers/amazon/aws/triggers/ssm.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from collections.abc import AsyncIterator
from typing import TYPE_CHECKING
from airflow.providers.amazon.aws.hooks.ssm import SsmHook
from airflow.providers.amazon.aws.triggers.base import AwsBaseWaiterTrigger
from airflow.providers.amazon.aws.utils.waiter_with_logging import async_wait
from airflow.triggers.base import TriggerEvent
if TYPE_CHECKING:
from airflow.providers.amazon.aws.hooks.base_aws import AwsGenericHook
class SsmRunCommandTrigger(AwsBaseWaiterTrigger):
"""
Trigger when a SSM run command is complete.
:param command_id: The ID of the AWS SSM Run Command.
:param waiter_delay: The amount of time in seconds to wait between attempts. (default: 120)
:param waiter_max_attempts: The maximum number of attempts to be made. (default: 75)
:param fail_on_nonzero_exit: If True (default), the trigger will fail when the command returns
a non-zero exit code. If False, the trigger will complete successfully regardless of the
command exit code. (default: True)
:param aws_conn_id: The Airflow connection used for AWS credentials.
:param region_name: AWS region_name. If not specified then the default boto3 behaviour is used.
:param verify: Whether or not to verify SSL certificates. See:
https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html
:param botocore_config: Configuration dictionary (key-values) for botocore client. See:
https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.html
"""
def __init__(
self,
*,
command_id: str,
waiter_delay: int = 120,
waiter_max_attempts: int = 75,
fail_on_nonzero_exit: bool = True,
aws_conn_id: str | None = None,
region_name: str | None = None,
verify: bool | str | None = None,
botocore_config: dict | None = None,
) -> None:
super().__init__(
serialized_fields={"command_id": command_id, "fail_on_nonzero_exit": fail_on_nonzero_exit},
waiter_name="command_executed",
waiter_args={"CommandId": command_id},
failure_message="SSM run command failed.",
status_message="Status of SSM run command is",
status_queries=["Status"],
return_key="command_id",
return_value=command_id,
waiter_delay=waiter_delay,
waiter_max_attempts=waiter_max_attempts,
aws_conn_id=aws_conn_id,
region_name=region_name,
verify=verify,
botocore_config=botocore_config,
)
self.command_id = command_id
self.fail_on_nonzero_exit = fail_on_nonzero_exit
def hook(self) -> AwsGenericHook:
return SsmHook(
aws_conn_id=self.aws_conn_id,
region_name=self.region_name,
verify=self.verify,
config=self.botocore_config,
)
async def run(self) -> AsyncIterator[TriggerEvent]:
hook = self.hook()
async with await hook.get_async_conn() as client:
response = await client.list_command_invocations(CommandId=self.command_id)
instance_ids = [invocation["InstanceId"] for invocation in response.get("CommandInvocations", [])]
waiter = hook.get_waiter(self.waiter_name, deferrable=True, client=client)
for instance_id in instance_ids:
self.waiter_args["InstanceId"] = instance_id
try:
await async_wait(
waiter,
self.waiter_delay,
self.attempts,
self.waiter_args,
self.failure_message,
self.status_message,
self.status_queries,
)
except Exception:
# Get detailed invocation information to determine failure type
invocation = await client.get_command_invocation(
CommandId=self.command_id, InstanceId=instance_id
)
status = invocation.get("Status", "")
response_code = invocation.get("ResponseCode", -1)
# AWS-level failures should always raise
if SsmHook.is_aws_level_failure(status):
self.log.error(
"AWS-level failure for command %s on instance %s: status=%s",
self.command_id,
instance_id,
status,
)
raise
# Command-level failure (non-zero exit code)
if not self.fail_on_nonzero_exit:
# Enhanced mode: tolerate command-level failures
self.log.info(
"Command %s completed with status %s (exit code: %s) for instance %s. "
"Continuing due to fail_on_nonzero_exit=False",
self.command_id,
status,
response_code,
instance_id,
)
continue
else:
# Traditional mode: yield failure event instead of raising
# This allows the operator to handle the failure gracefully
self.log.warning(
"Command %s failed with status %s (exit code: %s) for instance %s",
self.command_id,
status,
response_code,
instance_id,
)
yield TriggerEvent(
{
"status": "failed",
"message": f"Command failed with status {status} (exit code: {response_code})",
"command_status": status,
"exit_code": response_code,
"instance_id": instance_id,
self.return_key: self.return_value,
}
)
return
yield TriggerEvent({"status": "success", self.return_key: self.return_value})
| {
"repo_id": "apache/airflow",
"file_path": "providers/amazon/src/airflow/providers/amazon/aws/triggers/ssm.py",
"license": "Apache License 2.0",
"lines": 146,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/amazon/tests/system/amazon/aws/example_ssm.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import datetime
import logging
import textwrap
import time
import boto3
from airflow.providers.amazon.aws.operators.ec2 import EC2CreateInstanceOperator, EC2TerminateInstanceOperator
from airflow.providers.amazon.aws.operators.ssm import SsmGetCommandInvocationOperator, SsmRunCommandOperator
from airflow.providers.amazon.aws.sensors.ssm import SsmRunCommandCompletedSensor
from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS
if AIRFLOW_V_3_0_PLUS:
from airflow.sdk import DAG, chain, task
else:
# Airflow 2 path
from airflow.decorators import task # type: ignore[attr-defined,no-redef]
from airflow.models.baseoperator import chain # type: ignore[attr-defined,no-redef]
from airflow.models.dag import DAG # type: ignore[attr-defined,no-redef,assignment]
try:
from airflow.sdk import TriggerRule
except ImportError:
# Compatibility for Airflow < 3.1
from airflow.utils.trigger_rule import TriggerRule # type: ignore[no-redef,attr-defined]
from system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder, get_role_name
from system.amazon.aws.utils.ec2 import get_latest_ami_id
DAG_ID = "example_ssm"
ROLE_ARN_KEY = "ROLE_ARN"
sys_test_context_task = SystemTestContextBuilder().add_variable(ROLE_ARN_KEY).build()
USER_DATA = textwrap.dedent("""\
#!/bin/bash
set -e
# Update the system
if command -v yum &> /dev/null; then
PACKAGE_MANAGER="yum"
elif command -v dnf &> /dev/null; then
PACKAGE_MANAGER="dnf"
else
echo "No suitable package manager found"
exit 1
fi
# Install SSM agent if it's not installed
if ! command -v amazon-ssm-agent &> /dev/null; then
echo "Installing SSM agent..."
$PACKAGE_MANAGER install -y amazon-ssm-agent
else
echo "SSM agent already installed"
fi
echo "Enabling and starting SSM agent..."
systemctl enable amazon-ssm-agent
systemctl start amazon-ssm-agent
shutdown -h +15
echo "=== Finished user-data script ==="
""")
log = logging.getLogger(__name__)
@task
def create_instance_profile(role_name: str, instance_profile_name: str):
client = boto3.client("iam")
client.create_instance_profile(InstanceProfileName=instance_profile_name)
client.add_role_to_instance_profile(InstanceProfileName=instance_profile_name, RoleName=role_name)
@task
def await_instance_profile_exists(instance_profile_name):
client = boto3.client("iam")
client.get_waiter("instance_profile_exists").wait(InstanceProfileName=instance_profile_name)
@task
def delete_instance_profile(instance_profile_name, role_name):
client = boto3.client("iam")
try:
client.remove_role_from_instance_profile(
InstanceProfileName=instance_profile_name, RoleName=role_name
)
except client.exceptions.NoSuchEntityException:
log.info("Role %s not attached to %s or already removed.", role_name, instance_profile_name)
try:
client.delete_instance_profile(InstanceProfileName=instance_profile_name)
except client.exceptions.NoSuchEntityException:
log.info("Instance profile %s already deleted.", instance_profile_name)
@task
def extract_instance_id(instance_ids: list) -> str:
return instance_ids[0]
@task
def build_run_command_kwargs(instance_id: str):
return {
"InstanceIds": [instance_id],
"Parameters": {"commands": ["touch /tmp/ssm_test_passed"]},
}
@task
def wait_until_ssm_ready(instance_id: str, max_attempts: int = 10, delay_seconds: int = 15):
"""
Waits for an EC2 instance to register with AWS Systems Manager (SSM).
This may take over a minute even after the instance is running.
Raises an exception if the instance is not ready after max_attempts.
"""
ssm = boto3.client("ssm")
for _ in range(max_attempts):
response = ssm.describe_instance_information(
Filters=[{"Key": "InstanceIds", "Values": [instance_id]}]
)
if (
response.get("InstanceInformationList")
and response["InstanceInformationList"][0]["PingStatus"] == "Online"
):
return
time.sleep(delay_seconds)
raise Exception(f"Instance {instance_id} not ready in SSM after {max_attempts} attempts.")
with DAG(
dag_id=DAG_ID,
schedule="@once",
start_date=datetime.datetime(2021, 1, 1),
catchup=False,
) as dag:
# Create EC2 instance with SSM agent
test_context = sys_test_context_task()
env_id = test_context[ENV_ID_KEY]
instance_name = f"{env_id}-instance"
image_id = get_latest_ami_id()
role_name = get_role_name(test_context[ROLE_ARN_KEY])
instance_profile_name = f"{env_id}-ssm-instance-profile"
config = {
"InstanceType": "t4g.micro",
"IamInstanceProfile": {"Name": instance_profile_name},
# Optional: Tags for identifying test resources in the AWS console
"TagSpecifications": [
{"ResourceType": "instance", "Tags": [{"Key": "Name", "Value": instance_name}]}
],
"UserData": USER_DATA,
# Use IMDSv2 for greater security, see the following doc for more details:
# https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/configuring-instance-metadata-service.html
"MetadataOptions": {"HttpEndpoint": "enabled", "HttpTokens": "required"},
"BlockDeviceMappings": [
{"DeviceName": "/dev/xvda", "Ebs": {"Encrypted": True, "DeleteOnTermination": True}}
],
"InstanceInitiatedShutdownBehavior": "terminate",
}
create_instance = EC2CreateInstanceOperator(
task_id="create_instance",
image_id=image_id,
max_count=1,
min_count=1,
config=config,
wait_for_completion=True,
retries=5,
retry_delay=datetime.timedelta(seconds=15),
)
instance_id = extract_instance_id(create_instance.output)
run_command_kwargs = build_run_command_kwargs(instance_id)
# [START howto_operator_run_command]
run_command = SsmRunCommandOperator(
task_id="run_command",
document_name="AWS-RunShellScript",
run_command_kwargs=run_command_kwargs,
wait_for_completion=False,
)
# [END howto_operator_run_command]
# [START howto_sensor_run_command]
await_run_command = SsmRunCommandCompletedSensor(
task_id="await_run_command", command_id="{{ ti.xcom_pull(task_ids='run_command') }}"
)
# [END howto_sensor_run_command]
# [START howto_operator_get_command_invocation]
get_command_output = SsmGetCommandInvocationOperator(
task_id="get_command_output",
command_id="{{ ti.xcom_pull(task_ids='run_command') }}",
instance_id=instance_id,
)
# [END howto_operator_get_command_invocation]
# [START howto_operator_ssm_enhanced_async]
run_command_async = SsmRunCommandOperator(
task_id="run_command_async",
document_name="AWS-RunShellScript",
run_command_kwargs={
"InstanceIds": [instance_id],
"Parameters": {"commands": ["echo 'Testing async pattern'", "exit 1"]},
},
wait_for_completion=False,
fail_on_nonzero_exit=False,
)
wait_command_async = SsmRunCommandCompletedSensor(
task_id="wait_command_async",
command_id="{{ ti.xcom_pull(task_ids='run_command_async') }}",
fail_on_nonzero_exit=False,
)
# [END howto_operator_ssm_enhanced_async]
# [START howto_operator_ssm_enhanced_sync]
run_command_sync = SsmRunCommandOperator(
task_id="run_command_sync",
document_name="AWS-RunShellScript",
run_command_kwargs={
"InstanceIds": [instance_id],
"Parameters": {"commands": ["echo 'Testing sync pattern'", "exit 2"]},
},
wait_for_completion=True,
fail_on_nonzero_exit=False,
)
# [END howto_operator_ssm_enhanced_sync]
# [START howto_operator_ssm_exit_code_routing]
get_exit_code_output = SsmGetCommandInvocationOperator(
task_id="get_exit_code_output",
command_id="{{ ti.xcom_pull(task_ids='run_command_async') }}",
instance_id=instance_id,
)
@task
def route_based_on_exit_code(**context):
output = context["ti"].xcom_pull(task_ids="get_exit_code_output")
exit_code = output.get("response_code") if output else None
log.info("Command exit code: %s", exit_code)
return "handle_exit_code"
route_task = route_based_on_exit_code()
@task(task_id="handle_exit_code")
def handle_exit_code():
log.info("Handling exit code routing")
return "exit_code_handled"
handle_task = handle_exit_code()
# [END howto_operator_ssm_exit_code_routing]
# [START howto_operator_ssm_traditional]
run_command_traditional = SsmRunCommandOperator(
task_id="run_command_traditional",
document_name="AWS-RunShellScript",
run_command_kwargs={
"InstanceIds": [instance_id],
"Parameters": {"commands": ["echo 'Testing traditional pattern'", "exit 0"]},
},
wait_for_completion=False,
)
wait_command_traditional = SsmRunCommandCompletedSensor(
task_id="wait_command_traditional",
command_id="{{ ti.xcom_pull(task_ids='run_command_traditional') }}",
)
# [END howto_operator_ssm_traditional]
delete_instance = EC2TerminateInstanceOperator(
task_id="terminate_instance",
trigger_rule=TriggerRule.ALL_DONE,
instance_ids=instance_id,
)
chain(
# TEST SETUP
test_context,
image_id,
role_name,
create_instance_profile(role_name, instance_profile_name),
await_instance_profile_exists(instance_profile_name),
create_instance,
instance_id,
run_command_kwargs,
wait_until_ssm_ready(instance_id),
# TEST BODY
run_command,
await_run_command,
get_command_output,
)
# Exit code handling examples (run in parallel)
wait_until_ssm_ready(instance_id) >> run_command_async >> wait_command_async >> get_exit_code_output
get_exit_code_output >> route_task >> handle_task
wait_until_ssm_ready(instance_id) >> run_command_sync
wait_until_ssm_ready(instance_id) >> run_command_traditional >> wait_command_traditional
# TEST TEARDOWN
[get_command_output, handle_task, run_command_sync, wait_command_traditional] >> delete_instance
delete_instance >> delete_instance_profile(instance_profile_name, role_name)
from tests_common.test_utils.watcher import watcher
# This test needs watcher in order to properly mark success/failure
# when "tearDown" task with trigger rule is part of the DAG
list(dag.tasks) >> watcher()
from tests_common.test_utils.system_tests import get_test_run # noqa: E402
# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest)
test_run = get_test_run(dag)
| {
"repo_id": "apache/airflow",
"file_path": "providers/amazon/tests/system/amazon/aws/example_ssm.py",
"license": "Apache License 2.0",
"lines": 279,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/amazon/tests/unit/amazon/aws/operators/test_ssm.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from collections.abc import Generator
from unittest import mock
import pytest
from botocore.exceptions import WaiterError
from airflow.providers.amazon.aws.hooks.ssm import SsmHook
from airflow.providers.amazon.aws.operators.ssm import SsmGetCommandInvocationOperator, SsmRunCommandOperator
from unit.amazon.aws.utils.test_template_fields import validate_template_fields
COMMAND_ID = "test_command_id"
DOCUMENT_NAME = "test_ssm_custom_document"
INSTANCE_IDS = ["test_instance_id_1", "test_instance_id_2"]
class TestSsmRunCommandOperator:
@pytest.fixture
def mock_conn(self) -> Generator[SsmHook, None, None]:
with mock.patch.object(SsmHook, "conn") as _conn:
_conn.send_command.return_value = {
"Command": {
"CommandId": COMMAND_ID,
"InstanceIds": INSTANCE_IDS,
}
}
yield _conn
def setup_method(self):
self.operator = SsmRunCommandOperator(
task_id="test_run_command_operator",
document_name=DOCUMENT_NAME,
run_command_kwargs={"InstanceIds": INSTANCE_IDS},
)
self.operator.defer = mock.MagicMock()
@pytest.mark.parametrize(
("wait_for_completion", "deferrable"),
[
pytest.param(False, False, id="no_wait"),
pytest.param(True, False, id="wait"),
pytest.param(False, True, id="defer"),
],
)
@mock.patch.object(SsmHook, "get_waiter")
def test_run_command_wait_combinations(self, mock_get_waiter, wait_for_completion, deferrable, mock_conn):
self.operator.wait_for_completion = wait_for_completion
self.operator.deferrable = deferrable
command_id = self.operator.execute({})
assert command_id == COMMAND_ID
mock_conn.send_command.assert_called_once_with(DocumentName=DOCUMENT_NAME, InstanceIds=INSTANCE_IDS)
assert mock_get_waiter.call_count == wait_for_completion
assert self.operator.defer.call_count == deferrable
def test_template_fields(self):
validate_template_fields(self.operator)
def test_deferrable_with_region(self, mock_conn):
"""Test that deferrable mode properly passes region and other AWS parameters to trigger."""
self.operator.deferrable = True
self.operator.region_name = "us-west-2"
self.operator.verify = False
self.operator.botocore_config = {"retries": {"max_attempts": 5}}
command_id = self.operator.execute({})
assert command_id == COMMAND_ID
mock_conn.send_command.assert_called_once_with(DocumentName=DOCUMENT_NAME, InstanceIds=INSTANCE_IDS)
# Verify defer was called with correct trigger parameters
self.operator.defer.assert_called_once()
call_args = self.operator.defer.call_args
trigger = call_args[1]["trigger"] # Get the trigger from kwargs
# Verify the trigger has the correct parameters
assert trigger.command_id == COMMAND_ID
assert trigger.region_name == "us-west-2"
assert trigger.verify is False
assert trigger.botocore_config == {"retries": {"max_attempts": 5}}
assert trigger.aws_conn_id == self.operator.aws_conn_id
def test_operator_default_fails_on_nonzero_exit(self, mock_conn):
"""
Test traditional mode where fail_on_nonzero_exit=True (default).
Verifies that when fail_on_nonzero_exit is True (the default), the operator
raises an exception when the waiter encounters a command failure.
"""
self.operator.wait_for_completion = True
# Mock waiter to raise WaiterError (simulating command failure)
mock_waiter = mock.MagicMock()
mock_waiter.wait.side_effect = WaiterError(
name="command_executed",
reason="Waiter encountered a terminal failure state",
last_response={"Status": "Failed"},
)
with mock.patch.object(SsmHook, "get_waiter", return_value=mock_waiter):
# Should raise WaiterError in traditional mode
with pytest.raises(WaiterError):
self.operator.execute({})
def test_operator_enhanced_mode_tolerates_failed_status(self, mock_conn):
"""
Test enhanced mode where fail_on_nonzero_exit=False tolerates Failed status.
Verifies that when fail_on_nonzero_exit is False, the operator completes
successfully even when the command returns a Failed status with non-zero exit code.
"""
self.operator.wait_for_completion = True
self.operator.fail_on_nonzero_exit = False
# Mock waiter to raise WaiterError
mock_waiter = mock.MagicMock()
mock_waiter.wait.side_effect = WaiterError(
name="command_executed",
reason="Waiter encountered a terminal failure state",
last_response={"Status": "Failed"},
)
# Mock get_command_invocation to return Failed status with exit code
with (
mock.patch.object(SsmHook, "get_waiter", return_value=mock_waiter),
mock.patch.object(
SsmHook, "get_command_invocation", return_value={"Status": "Failed", "ResponseCode": 1}
),
):
# Should NOT raise in enhanced mode for Failed status
command_id = self.operator.execute({})
assert command_id == COMMAND_ID
def test_operator_enhanced_mode_fails_on_timeout(self, mock_conn):
"""
Test enhanced mode still fails on TimedOut status.
Verifies that even when fail_on_nonzero_exit is False, the operator
still raises an exception for AWS-level failures like TimedOut.
"""
self.operator.wait_for_completion = True
self.operator.fail_on_nonzero_exit = False
# Mock waiter to raise WaiterError
mock_waiter = mock.MagicMock()
mock_waiter.wait.side_effect = WaiterError(
name="command_executed",
reason="Waiter encountered a terminal failure state",
last_response={"Status": "TimedOut"},
)
# Mock get_command_invocation to return TimedOut status
with (
mock.patch.object(SsmHook, "get_waiter", return_value=mock_waiter),
mock.patch.object(
SsmHook, "get_command_invocation", return_value={"Status": "TimedOut", "ResponseCode": -1}
),
):
# Should raise even in enhanced mode for TimedOut
with pytest.raises(WaiterError):
self.operator.execute({})
def test_operator_enhanced_mode_fails_on_cancelled(self, mock_conn):
"""
Test enhanced mode still fails on Cancelled status.
Verifies that even when fail_on_nonzero_exit is False, the operator
still raises an exception for AWS-level failures like Cancelled.
"""
self.operator.wait_for_completion = True
self.operator.fail_on_nonzero_exit = False
# Mock waiter to raise WaiterError
mock_waiter = mock.MagicMock()
mock_waiter.wait.side_effect = WaiterError(
name="command_executed",
reason="Waiter encountered a terminal failure state",
last_response={"Status": "Cancelled"},
)
# Mock get_command_invocation to return Cancelled status
with (
mock.patch.object(SsmHook, "get_waiter", return_value=mock_waiter),
mock.patch.object(
SsmHook, "get_command_invocation", return_value={"Status": "Cancelled", "ResponseCode": -1}
),
):
# Should raise even in enhanced mode for Cancelled
with pytest.raises(WaiterError):
self.operator.execute({})
@mock.patch("airflow.providers.amazon.aws.operators.ssm.SsmRunCommandTrigger")
def test_operator_passes_parameter_to_trigger(self, mock_trigger_class, mock_conn):
"""
Test that fail_on_nonzero_exit parameter is passed to trigger in deferrable mode.
Verifies that when using deferrable mode, the fail_on_nonzero_exit parameter
is correctly passed to the SsmRunCommandTrigger.
"""
self.operator.deferrable = True
self.operator.fail_on_nonzero_exit = False
with mock.patch.object(self.operator, "defer") as mock_defer:
command_id = self.operator.execute({})
assert command_id == COMMAND_ID
mock_conn.send_command.assert_called_once_with(
DocumentName=DOCUMENT_NAME, InstanceIds=INSTANCE_IDS
)
# Verify defer was called
mock_defer.assert_called_once()
# Verify the trigger was instantiated with correct parameters
mock_trigger_class.assert_called_once()
call_kwargs = mock_trigger_class.call_args[1]
assert call_kwargs["command_id"] == COMMAND_ID
assert call_kwargs["fail_on_nonzero_exit"] is False
def test_execute_complete_success(self):
"""Test execute_complete with successful event."""
event = {"status": "success", "command_id": COMMAND_ID}
result = self.operator.execute_complete({}, event)
assert result == COMMAND_ID
def test_execute_complete_failure_event(self):
"""Test execute_complete with failure event from trigger."""
event = {
"status": "failed",
"command_id": COMMAND_ID,
"command_status": "Failed",
"exit_code": 1,
"instance_id": "i-123456",
"message": "Command failed with status Failed (exit code: 1)",
}
with pytest.raises(RuntimeError) as exc_info:
self.operator.execute_complete({}, event)
error_msg = str(exc_info.value)
assert COMMAND_ID in error_msg
assert "Failed" in error_msg
assert "exit code: 1" in error_msg
assert "i-123456" in error_msg
def test_execute_complete_failure_event_with_different_exit_codes(self):
"""Test execute_complete properly reports different exit codes in error messages."""
event = {
"status": "failed",
"command_id": COMMAND_ID,
"command_status": "Failed",
"exit_code": 42,
"instance_id": "i-789012",
"message": "Command failed with status Failed (exit code: 42)",
}
with pytest.raises(RuntimeError) as exc_info:
self.operator.execute_complete({}, event)
error_msg = str(exc_info.value)
assert "exit code: 42" in error_msg
assert "i-789012" in error_msg
def test_execute_complete_unknown_status(self):
"""Test execute_complete with unknown status."""
event = {"status": "unknown", "command_id": COMMAND_ID}
with pytest.raises(RuntimeError) as exc_info:
self.operator.execute_complete({}, event)
assert "Error while running run command" in str(exc_info.value)
class TestSsmGetCommandInvocationOperator:
@pytest.fixture
def mock_hook(self) -> Generator[mock.MagicMock, None, None]:
with mock.patch.object(SsmGetCommandInvocationOperator, "hook") as _hook:
yield _hook
def setup_method(self):
self.command_id = "test-command-id-123"
self.instance_id = "i-1234567890abcdef0"
self.operator = SsmGetCommandInvocationOperator(
task_id="test_get_command_invocation",
command_id=self.command_id,
instance_id=self.instance_id,
)
def test_execute_with_specific_instance(self, mock_hook):
# Mock response for specific instance
mock_invocation_details = {
"Status": "Success",
"ResponseCode": 0,
"StandardOutputContent": "Hello World",
"StandardErrorContent": "",
"ExecutionStartDateTime": "2023-01-01T12:00:00Z",
"ExecutionEndDateTime": "2023-01-01T12:00:05Z",
"DocumentName": "AWS-RunShellScript",
"Comment": "Test command",
}
mock_hook.get_command_invocation.return_value = mock_invocation_details
result = self.operator.execute({})
# Verify hook was called correctly
mock_hook.get_command_invocation.assert_called_once_with(self.command_id, self.instance_id)
# Verify returned data structure - should use standardized format with invocations array
expected_result = {
"command_id": self.command_id,
"invocations": [
{
"instance_id": self.instance_id,
"status": "Success",
"response_code": 0,
"standard_output": "Hello World",
"standard_error": "",
"execution_start_time": "2023-01-01T12:00:00Z",
"execution_end_time": "2023-01-01T12:00:05Z",
"document_name": "AWS-RunShellScript",
"comment": "Test command",
}
],
}
assert result == expected_result
def test_execute_all_instances(self, mock_hook):
# Setup operator without instance_id to get all instances
operator = SsmGetCommandInvocationOperator(
task_id="test_get_all_invocations",
command_id=self.command_id,
)
# Mock list_command_invocations response
mock_invocations = [
{"InstanceId": "i-111"},
{"InstanceId": "i-222"},
]
mock_hook.list_command_invocations.return_value = {"CommandInvocations": mock_invocations}
# Mock get_command_invocation responses
mock_invocation_details_1 = {
"Status": "Success",
"ResponseCode": 0,
"StandardOutputContent": "Output 1",
"StandardErrorContent": "",
"ExecutionStartDateTime": "2023-01-01T12:00:00Z",
"ExecutionEndDateTime": "2023-01-01T12:00:05Z",
"DocumentName": "AWS-RunShellScript",
"Comment": "",
}
mock_invocation_details_2 = {
"Status": "Failed",
"ResponseCode": 1,
"StandardOutputContent": "",
"StandardErrorContent": "Error occurred",
"ExecutionStartDateTime": "2023-01-01T12:00:00Z",
"ExecutionEndDateTime": "2023-01-01T12:00:10Z",
"DocumentName": "AWS-RunShellScript",
"Comment": "",
}
mock_hook.get_command_invocation.side_effect = [
mock_invocation_details_1,
mock_invocation_details_2,
]
result = operator.execute({})
# Verify hook calls
mock_hook.list_command_invocations.assert_called_once_with(self.command_id)
assert mock_hook.get_command_invocation.call_count == 2
mock_hook.get_command_invocation.assert_any_call(self.command_id, "i-111")
mock_hook.get_command_invocation.assert_any_call(self.command_id, "i-222")
# Verify returned data structure
expected_result = {
"command_id": self.command_id,
"invocations": [
{
"instance_id": "i-111",
"status": "Success",
"response_code": 0,
"standard_output": "Output 1",
"standard_error": "",
"execution_start_time": "2023-01-01T12:00:00Z",
"execution_end_time": "2023-01-01T12:00:05Z",
"document_name": "AWS-RunShellScript",
"comment": "",
},
{
"instance_id": "i-222",
"status": "Failed",
"response_code": 1,
"standard_output": "",
"standard_error": "Error occurred",
"execution_start_time": "2023-01-01T12:00:00Z",
"execution_end_time": "2023-01-01T12:00:10Z",
"document_name": "AWS-RunShellScript",
"comment": "",
},
],
}
assert result == expected_result
def test_execute_all_instances_with_error(self, mock_hook):
# Setup operator without instance_id
operator = SsmGetCommandInvocationOperator(
task_id="test_get_all_with_error",
command_id=self.command_id,
)
# Mock list_command_invocations response
mock_invocations = [{"InstanceId": "i-111"}]
mock_hook.list_command_invocations.return_value = {"CommandInvocations": mock_invocations}
# Mock get_command_invocation to raise an exception
mock_hook.get_command_invocation.side_effect = Exception("API Error")
result = operator.execute({})
# Verify error handling
expected_result = {
"command_id": self.command_id,
"invocations": [{"instance_id": "i-111", "error": "API Error"}],
}
assert result == expected_result
def test_template_fields(self):
validate_template_fields(self.operator)
def test_exit_code_routing_use_case(self, mock_hook):
"""
Test that demonstrates the exit code routing use case.
This test verifies that SsmGetCommandInvocationOperator correctly retrieves
exit codes and status information that can be used for workflow routing,
particularly when used with SsmRunCommandOperator in enhanced mode
(fail_on_nonzero_exit=False).
"""
# Mock response with various exit codes that might be used for routing
mock_invocation_details = {
"Status": "Failed", # Command failed but we want to route based on exit code
"ResponseCode": 42, # Custom exit code for specific routing logic
"StandardOutputContent": "Partial success - some items processed",
"StandardErrorContent": "Warning: 3 items skipped",
"ExecutionStartDateTime": "2023-01-01T12:00:00Z",
"ExecutionEndDateTime": "2023-01-01T12:00:05Z",
"DocumentName": "AWS-RunShellScript",
"Comment": "Data processing script",
}
mock_hook.get_command_invocation.return_value = mock_invocation_details
result = self.operator.execute({})
# Verify that response_code is available for routing decisions
assert result["invocations"][0]["response_code"] == 42
assert result["invocations"][0]["status"] == "Failed"
# Verify that output is available for additional context
assert "Partial success" in result["invocations"][0]["standard_output"]
assert "Warning" in result["invocations"][0]["standard_error"]
# This demonstrates that the operator provides all necessary information
# for downstream tasks to make routing decisions based on exit codes,
# which is the key use case for the enhanced mode feature.
def test_multiple_exit_codes_for_routing(self, mock_hook):
"""
Test retrieving multiple instances with different exit codes for routing.
This demonstrates a common pattern where a command runs on multiple instances
and downstream tasks need to route based on the exit codes from each instance.
"""
operator = SsmGetCommandInvocationOperator(
task_id="test_multi_instance_routing",
command_id=self.command_id,
)
# Mock list_command_invocations response
mock_invocations = [
{"InstanceId": "i-success"},
{"InstanceId": "i-partial"},
{"InstanceId": "i-failed"},
]
mock_hook.list_command_invocations.return_value = {"CommandInvocations": mock_invocations}
# Mock different exit codes for routing scenarios
mock_hook.get_command_invocation.side_effect = [
{
"Status": "Success",
"ResponseCode": 0, # Complete success
"StandardOutputContent": "All items processed",
"StandardErrorContent": "",
"ExecutionStartDateTime": "2023-01-01T12:00:00Z",
"ExecutionEndDateTime": "2023-01-01T12:00:05Z",
"DocumentName": "AWS-RunShellScript",
"Comment": "",
},
{
"Status": "Failed",
"ResponseCode": 2, # Partial success - custom exit code
"StandardOutputContent": "Some items processed",
"StandardErrorContent": "Warning: partial completion",
"ExecutionStartDateTime": "2023-01-01T12:00:00Z",
"ExecutionEndDateTime": "2023-01-01T12:00:10Z",
"DocumentName": "AWS-RunShellScript",
"Comment": "",
},
{
"Status": "Failed",
"ResponseCode": 1, # Complete failure
"StandardOutputContent": "",
"StandardErrorContent": "Error: operation failed",
"ExecutionStartDateTime": "2023-01-01T12:00:00Z",
"ExecutionEndDateTime": "2023-01-01T12:00:08Z",
"DocumentName": "AWS-RunShellScript",
"Comment": "",
},
]
result = operator.execute({})
# Verify all exit codes are captured for routing logic
assert len(result["invocations"]) == 3
assert result["invocations"][0]["response_code"] == 0
assert result["invocations"][1]["response_code"] == 2
assert result["invocations"][2]["response_code"] == 1
# This demonstrates that the operator can retrieve exit codes from multiple
# instances, enabling complex routing logic based on the results from each instance.
| {
"repo_id": "apache/airflow",
"file_path": "providers/amazon/tests/unit/amazon/aws/operators/test_ssm.py",
"license": "Apache License 2.0",
"lines": 468,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/amazon/tests/unit/amazon/aws/sensors/test_ssm.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from unittest import mock
import pytest
from airflow.providers.amazon.aws.hooks.ssm import SsmHook
from airflow.providers.amazon.aws.sensors.ssm import SsmRunCommandCompletedSensor
COMMAND_ID = "123e4567-e89b-12d3-a456-426614174000"
@pytest.fixture
def mock_ssm_list_invocations():
def _setup(mock_conn: mock.MagicMock, state: str):
mock_conn.list_command_invocations.return_value = {
"CommandInvocations": [
{"CommandId": COMMAND_ID, "InstanceId": "i-1234567890abcdef0", "Status": state}
]
}
return _setup
class TestSsmRunCommandCompletedSensor:
SENSOR = SsmRunCommandCompletedSensor
def setup_method(self):
self.default_op_kwarg = dict(
task_id="test_ssm_run_command_sensor",
command_id=COMMAND_ID,
poke_interval=5,
max_retries=1,
)
self.sensor = self.SENSOR(**self.default_op_kwarg)
def test_base_aws_op_attributes(self):
op = self.SENSOR(**self.default_op_kwarg)
assert op.hook.aws_conn_id == "aws_default"
assert op.hook._region_name is None
assert op.hook._verify is None
assert op.hook._config is None
op = self.SENSOR(
**self.default_op_kwarg,
aws_conn_id="aws-test-custom-conn",
region_name="eu-west-1",
verify=False,
botocore_config={"read_timeout": 42},
)
assert op.hook.aws_conn_id == "aws-test-custom-conn"
assert op.hook._region_name == "eu-west-1"
assert op.hook._verify is False
assert op.hook._config is not None
assert op.hook._config.read_timeout == 42
@pytest.mark.parametrize("state", SENSOR.SUCCESS_STATES)
@mock.patch.object(SsmHook, "conn")
def test_poke_success_states(self, mock_conn, state, mock_ssm_list_invocations):
mock_ssm_list_invocations(mock_conn, state)
self.sensor.hook.conn = mock_conn
assert self.sensor.poke({}) is True
@pytest.mark.parametrize("state", SENSOR.INTERMEDIATE_STATES)
@mock.patch.object(SsmHook, "conn")
def test_poke_intermediate_states(self, mock_conn, state, mock_ssm_list_invocations):
mock_ssm_list_invocations(mock_conn, state)
self.sensor.hook.conn = mock_conn
assert self.sensor.poke({}) is False
@pytest.mark.parametrize("state", SENSOR.FAILURE_STATES)
@mock.patch.object(SsmHook, "conn")
def test_poke_failure_states(self, mock_conn, state, mock_ssm_list_invocations):
mock_ssm_list_invocations(mock_conn, state)
with pytest.raises(RuntimeError, match=self.SENSOR.FAILURE_MESSAGE):
self.sensor.poke({})
@mock.patch.object(SsmHook, "conn")
def test_sensor_default_fails_on_failed_status(self, mock_conn, mock_ssm_list_invocations):
"""Test that sensor fails on Failed status in traditional mode (fail_on_nonzero_exit=True)."""
mock_ssm_list_invocations(mock_conn, "Failed")
self.sensor.hook.conn = mock_conn
with pytest.raises(RuntimeError, match=self.SENSOR.FAILURE_MESSAGE):
self.sensor.poke({})
@mock.patch.object(SsmHook, "conn")
def test_sensor_enhanced_mode_tolerates_failed_status(self, mock_conn, mock_ssm_list_invocations):
"""Test that sensor tolerates Failed status in enhanced mode (fail_on_nonzero_exit=False)."""
sensor = self.SENSOR(**self.default_op_kwarg, fail_on_nonzero_exit=False)
mock_ssm_list_invocations(mock_conn, "Failed")
sensor.hook.conn = mock_conn
assert sensor.poke({}) is True
@mock.patch.object(SsmHook, "conn")
def test_sensor_enhanced_mode_fails_on_timeout(self, mock_conn, mock_ssm_list_invocations):
"""Test that sensor still fails on TimedOut status in enhanced mode."""
sensor = self.SENSOR(**self.default_op_kwarg, fail_on_nonzero_exit=False)
mock_ssm_list_invocations(mock_conn, "TimedOut")
sensor.hook.conn = mock_conn
with pytest.raises(RuntimeError, match=f"SSM command {COMMAND_ID} TimedOut"):
sensor.poke({})
@mock.patch.object(SsmHook, "conn")
def test_sensor_enhanced_mode_fails_on_cancelled(self, mock_conn, mock_ssm_list_invocations):
"""Test that sensor still fails on Cancelled status in enhanced mode."""
sensor = self.SENSOR(**self.default_op_kwarg, fail_on_nonzero_exit=False)
mock_ssm_list_invocations(mock_conn, "Cancelled")
sensor.hook.conn = mock_conn
with pytest.raises(RuntimeError, match=f"SSM command {COMMAND_ID} Cancelled"):
sensor.poke({})
@mock.patch("airflow.providers.amazon.aws.sensors.ssm.SsmRunCommandTrigger")
def test_sensor_passes_parameter_to_trigger(self, mock_trigger_class):
"""Test that fail_on_nonzero_exit parameter is passed correctly to trigger in deferrable mode."""
sensor = self.SENSOR(**self.default_op_kwarg, fail_on_nonzero_exit=False, deferrable=True)
with mock.patch.object(sensor, "defer") as mock_defer:
sensor.execute({})
# Verify defer was called
assert mock_defer.called
# Verify the trigger was instantiated with correct parameters
mock_trigger_class.assert_called_once()
call_kwargs = mock_trigger_class.call_args[1]
assert call_kwargs["command_id"] == COMMAND_ID
assert call_kwargs["fail_on_nonzero_exit"] is False
| {
"repo_id": "apache/airflow",
"file_path": "providers/amazon/tests/unit/amazon/aws/sensors/test_ssm.py",
"license": "Apache License 2.0",
"lines": 120,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/amazon/tests/unit/amazon/aws/triggers/test_ssm.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from unittest import mock
import pytest
from botocore.exceptions import WaiterError
from airflow.providers.amazon.aws.hooks.ssm import SsmHook
from airflow.providers.amazon.aws.triggers.ssm import SsmRunCommandTrigger
from airflow.providers.common.compat.sdk import AirflowException
from airflow.triggers.base import TriggerEvent
from unit.amazon.aws.utils.test_waiter import assert_expected_waiter_type
BASE_TRIGGER_CLASSPATH = "airflow.providers.amazon.aws.triggers.ssm."
EXPECTED_WAITER_NAME = "command_executed"
COMMAND_ID = "123e4567-e89b-12d3-a456-426614174000"
INSTANCE_ID_1 = "i-1234567890abcdef0"
INSTANCE_ID_2 = "i-1234567890abcdef1"
@pytest.fixture
def mock_ssm_list_invocations():
def _setup(mock_get_async_conn):
mock_client = mock.MagicMock()
mock_get_async_conn.return_value.__aenter__.return_value = mock_client
mock_client.list_command_invocations = mock.AsyncMock(
return_value={
"CommandInvocations": [
{"CommandId": COMMAND_ID, "InstanceId": INSTANCE_ID_1},
{"CommandId": COMMAND_ID, "InstanceId": INSTANCE_ID_2},
]
}
)
return mock_client
return _setup
class TestSsmRunCommandTrigger:
def test_serialization(self):
trigger = SsmRunCommandTrigger(command_id=COMMAND_ID)
classpath, kwargs = trigger.serialize()
assert classpath == BASE_TRIGGER_CLASSPATH + "SsmRunCommandTrigger"
assert kwargs.get("command_id") == COMMAND_ID
def test_serialization_with_region(self):
"""Test that region_name and other AWS parameters are properly serialized."""
trigger = SsmRunCommandTrigger(
command_id=COMMAND_ID,
region_name="us-east-1",
aws_conn_id="test_conn",
verify=True,
botocore_config={"retries": {"max_attempts": 3}},
)
classpath, kwargs = trigger.serialize()
assert classpath == BASE_TRIGGER_CLASSPATH + "SsmRunCommandTrigger"
assert kwargs.get("command_id") == COMMAND_ID
assert kwargs.get("region_name") == "us-east-1"
assert kwargs.get("aws_conn_id") == "test_conn"
assert kwargs.get("verify") is True
assert kwargs.get("botocore_config") == {"retries": {"max_attempts": 3}}
@pytest.mark.asyncio
@mock.patch.object(SsmHook, "get_async_conn")
@mock.patch.object(SsmHook, "get_waiter")
async def test_run_success(self, mock_get_waiter, mock_get_async_conn, mock_ssm_list_invocations):
mock_client = mock_ssm_list_invocations(mock_get_async_conn)
mock_get_waiter().wait = mock.AsyncMock(name="wait")
trigger = SsmRunCommandTrigger(command_id=COMMAND_ID)
generator = trigger.run()
response = await generator.asend(None)
assert response == TriggerEvent({"status": "success", "command_id": COMMAND_ID})
assert_expected_waiter_type(mock_get_waiter, EXPECTED_WAITER_NAME)
assert mock_get_waiter().wait.call_count == 2
mock_get_waiter().wait.assert_any_call(
CommandId=COMMAND_ID, InstanceId=INSTANCE_ID_1, WaiterConfig={"MaxAttempts": 1}
)
mock_get_waiter().wait.assert_any_call(
CommandId=COMMAND_ID, InstanceId=INSTANCE_ID_2, WaiterConfig={"MaxAttempts": 1}
)
mock_client.list_command_invocations.assert_called_once_with(CommandId=COMMAND_ID)
@pytest.mark.asyncio
@mock.patch.object(SsmHook, "get_async_conn")
@mock.patch.object(SsmHook, "get_waiter")
async def test_run_fails(self, mock_get_waiter, mock_get_async_conn, mock_ssm_list_invocations):
mock_client = mock_ssm_list_invocations(mock_get_async_conn)
mock_get_waiter().wait.side_effect = WaiterError(
"name", "terminal failure", {"CommandInvocations": [{"CommandId": COMMAND_ID}]}
)
# Mock get_command_invocation to return AWS-level failure
mock_client.get_command_invocation = mock.AsyncMock(
return_value={"Status": "TimedOut", "ResponseCode": -1}
)
trigger = SsmRunCommandTrigger(command_id=COMMAND_ID)
generator = trigger.run()
with pytest.raises(AirflowException):
await generator.asend(None)
@pytest.mark.asyncio
@mock.patch("airflow.providers.amazon.aws.triggers.ssm.async_wait")
@mock.patch.object(SsmHook, "get_async_conn")
@mock.patch.object(SsmHook, "get_waiter")
async def test_trigger_default_fails_on_waiter_error(
self, mock_get_waiter, mock_get_async_conn, mock_async_wait, mock_ssm_list_invocations
):
"""Test traditional mode (fail_on_nonzero_exit=True) raises exception on waiter error."""
mock_client = mock_ssm_list_invocations(mock_get_async_conn)
mock_async_wait.side_effect = AirflowException("SSM run command failed.")
# Mock get_command_invocation to return AWS-level failure
mock_client.get_command_invocation = mock.AsyncMock(
return_value={"Status": "Cancelled", "ResponseCode": -1}
)
trigger = SsmRunCommandTrigger(command_id=COMMAND_ID, fail_on_nonzero_exit=True)
generator = trigger.run()
with pytest.raises(AirflowException):
await generator.asend(None)
@pytest.mark.asyncio
@mock.patch("airflow.providers.amazon.aws.triggers.ssm.async_wait")
@mock.patch.object(SsmHook, "get_async_conn")
@mock.patch.object(SsmHook, "get_waiter")
async def test_trigger_enhanced_mode_tolerates_failed_status(
self, mock_get_waiter, mock_get_async_conn, mock_async_wait, mock_ssm_list_invocations
):
"""Test enhanced mode (fail_on_nonzero_exit=False) tolerates Failed status."""
mock_client = mock_ssm_list_invocations(mock_get_async_conn)
# Mock async_wait to raise exception (simulating waiter failure)
mock_async_wait.side_effect = AirflowException("SSM run command failed.")
# Mock get_command_invocation to return Failed status
mock_client.get_command_invocation = mock.AsyncMock(
return_value={"Status": "Failed", "ResponseCode": 1}
)
trigger = SsmRunCommandTrigger(command_id=COMMAND_ID, fail_on_nonzero_exit=False)
generator = trigger.run()
response = await generator.asend(None)
assert response == TriggerEvent({"status": "success", "command_id": COMMAND_ID})
# Verify get_command_invocation was called for both instances
assert mock_client.get_command_invocation.call_count == 2
@pytest.mark.asyncio
@mock.patch("airflow.providers.amazon.aws.triggers.ssm.async_wait")
@mock.patch.object(SsmHook, "get_async_conn")
@mock.patch.object(SsmHook, "get_waiter")
async def test_trigger_enhanced_mode_fails_on_aws_errors(
self, mock_get_waiter, mock_get_async_conn, mock_async_wait, mock_ssm_list_invocations
):
"""Test enhanced mode (fail_on_nonzero_exit=False) still fails on AWS-level errors."""
mock_client = mock_ssm_list_invocations(mock_get_async_conn)
# Mock async_wait to raise exception (simulating waiter failure)
mock_async_wait.side_effect = AirflowException("SSM run command failed.")
# Mock get_command_invocation to return TimedOut status (AWS-level failure)
mock_client.get_command_invocation = mock.AsyncMock(
return_value={"Status": "TimedOut", "ResponseCode": -1}
)
trigger = SsmRunCommandTrigger(command_id=COMMAND_ID, fail_on_nonzero_exit=False)
generator = trigger.run()
with pytest.raises(AirflowException):
await generator.asend(None)
# Test with Cancelled status as well
mock_client.get_command_invocation = mock.AsyncMock(
return_value={"Status": "Cancelled", "ResponseCode": -1}
)
trigger = SsmRunCommandTrigger(command_id=COMMAND_ID, fail_on_nonzero_exit=False)
generator = trigger.run()
with pytest.raises(AirflowException):
await generator.asend(None)
def test_trigger_serialization_includes_parameter(self):
"""Test that fail_on_nonzero_exit parameter is properly serialized."""
trigger = SsmRunCommandTrigger(command_id=COMMAND_ID, fail_on_nonzero_exit=False)
classpath, kwargs = trigger.serialize()
assert classpath == BASE_TRIGGER_CLASSPATH + "SsmRunCommandTrigger"
assert kwargs.get("command_id") == COMMAND_ID
assert kwargs.get("fail_on_nonzero_exit") is False
# Test with default value (True)
trigger_default = SsmRunCommandTrigger(command_id=COMMAND_ID)
classpath, kwargs = trigger_default.serialize()
assert kwargs.get("fail_on_nonzero_exit") is True
@pytest.mark.asyncio
@mock.patch("airflow.providers.amazon.aws.triggers.ssm.async_wait")
@mock.patch.object(SsmHook, "get_async_conn")
@mock.patch.object(SsmHook, "get_waiter")
async def test_trigger_yields_failure_event_instead_of_raising(
self, mock_get_waiter, mock_get_async_conn, mock_async_wait, mock_ssm_list_invocations
):
"""Test that trigger yields failure event instead of raising exception for command failures."""
mock_client = mock_ssm_list_invocations(mock_get_async_conn)
# Mock async_wait to raise exception (simulating waiter failure)
mock_async_wait.side_effect = AirflowException("SSM run command failed.")
# Mock get_command_invocation to return Failed status with exit code 1
mock_client.get_command_invocation = mock.AsyncMock(
return_value={"Status": "Failed", "ResponseCode": 1}
)
trigger = SsmRunCommandTrigger(command_id=COMMAND_ID, fail_on_nonzero_exit=True)
generator = trigger.run()
response = await generator.asend(None)
# Should yield a failure event, not raise an exception
assert response.payload["status"] == "failed"
assert response.payload["command_id"] == COMMAND_ID
assert response.payload["exit_code"] == 1
assert response.payload["command_status"] == "Failed"
assert response.payload["instance_id"] == INSTANCE_ID_1
assert "Command failed with status Failed (exit code: 1)" in response.payload["message"]
@pytest.mark.asyncio
@mock.patch("airflow.providers.amazon.aws.triggers.ssm.async_wait")
@mock.patch.object(SsmHook, "get_async_conn")
@mock.patch.object(SsmHook, "get_waiter")
async def test_trigger_yields_failure_event_for_different_exit_codes(
self, mock_get_waiter, mock_get_async_conn, mock_async_wait, mock_ssm_list_invocations
):
"""Test that trigger properly captures different exit codes in failure events."""
mock_client = mock_ssm_list_invocations(mock_get_async_conn)
mock_async_wait.side_effect = AirflowException("SSM run command failed.")
# Test with exit code 2
mock_client.get_command_invocation = mock.AsyncMock(
return_value={"Status": "Failed", "ResponseCode": 2}
)
trigger = SsmRunCommandTrigger(command_id=COMMAND_ID, fail_on_nonzero_exit=True)
generator = trigger.run()
response = await generator.asend(None)
assert response.payload["status"] == "failed"
assert response.payload["exit_code"] == 2
assert response.payload["command_status"] == "Failed"
@pytest.mark.asyncio
@mock.patch("airflow.providers.amazon.aws.triggers.ssm.async_wait")
@mock.patch.object(SsmHook, "get_async_conn")
@mock.patch.object(SsmHook, "get_waiter")
async def test_trigger_continues_on_second_instance_after_first_fails(
self, mock_get_waiter, mock_get_async_conn, mock_async_wait, mock_ssm_list_invocations
):
"""Test that trigger stops after first failure and yields failure event."""
mock_client = mock_ssm_list_invocations(mock_get_async_conn)
# First instance fails
mock_async_wait.side_effect = AirflowException("SSM run command failed.")
mock_client.get_command_invocation = mock.AsyncMock(
return_value={"Status": "Failed", "ResponseCode": 1}
)
trigger = SsmRunCommandTrigger(command_id=COMMAND_ID, fail_on_nonzero_exit=True)
generator = trigger.run()
response = await generator.asend(None)
# Should yield failure event for first instance
assert response.payload["status"] == "failed"
assert response.payload["instance_id"] == INSTANCE_ID_1
# Should only call get_command_invocation once (for first instance)
assert mock_client.get_command_invocation.call_count == 1
| {
"repo_id": "apache/airflow",
"file_path": "providers/amazon/tests/unit/amazon/aws/triggers/test_ssm.py",
"license": "Apache License 2.0",
"lines": 246,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-core/tests/unit/api_fastapi/common/test_parameters.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import re
from typing import Annotated
import pytest
from fastapi import Depends, FastAPI, HTTPException
from sqlalchemy import select
from airflow.api_fastapi.common.parameters import FilterParam, SortParam, _SearchParam, filter_param_factory
from airflow.models import DagModel, DagRun, Log
class TestFilterParam:
def test_filter_param_factory_description(self):
app = FastAPI() # Create a FastAPI app to test OpenAPI generation
expected_descriptions = {
"dag_id": "Filter by Dag ID Description",
"task_id": "Filter by Task ID Description",
"map_index": None, # No description for map_index
"run_id": "Filter by Run ID Description",
}
@app.get("/test")
def test_route(
dag_id: Annotated[
FilterParam[str | None],
Depends(
filter_param_factory(Log.dag_id, str | None, description="Filter by Dag ID Description")
),
],
task_id: Annotated[
FilterParam[str | None],
Depends(
filter_param_factory(Log.task_id, str | None, description="Filter by Task ID Description")
),
],
map_index: Annotated[
FilterParam[int | None],
Depends(filter_param_factory(Log.map_index, int | None)),
],
run_id: Annotated[
FilterParam[str | None],
Depends(
filter_param_factory(
DagRun.run_id, str | None, description="Filter by Run ID Description"
)
),
],
):
return {"message": "test"}
# Get the OpenAPI spec
openapi_spec = app.openapi()
# Check if the description is in the parameters
parameters = openapi_spec["paths"]["/test"]["get"]["parameters"]
for param_name, expected_description in expected_descriptions.items():
param = next((p for p in parameters if p.get("name") == param_name), None)
assert param is not None, f"{param_name} parameter not found in OpenAPI"
if expected_description is None:
assert "description" not in param, (
f"Description should not be present in {param_name} parameter"
)
else:
assert "description" in param, f"Description not found in {param_name} parameter"
assert param["description"] == expected_description, (
f"Expected description '{expected_description}', got '{param['description']}'"
)
class TestSortParam:
def test_sort_param_max_number_of_filers(self):
param = SortParam([], None, None)
n_filters = param.MAX_SORT_PARAMS + 1
param.value = [f"filter_{i}" for i in range(n_filters)]
with pytest.raises(
HTTPException,
match=re.escape(
f"400: Ordering with more than {param.MAX_SORT_PARAMS} parameters is not allowed. Provided: {param.value}"
),
):
param.to_orm(None)
class TestSearchParam:
def test_to_orm_single_value(self):
"""Test search with a single term."""
param = _SearchParam(DagModel.dag_id).set_value("example_bash")
statement = select(DagModel)
statement = param.to_orm(statement)
sql = str(statement.compile(compile_kwargs={"literal_binds": True})).lower()
assert "dag_id" in sql
assert "like" in sql
def test_to_orm_multiple_values_or(self):
"""Test search with multiple terms using the pipe | operator."""
param = _SearchParam(DagModel.dag_id).set_value("example_bash | example_python")
statement = select(DagModel)
statement = param.to_orm(statement)
sql = str(statement.compile(compile_kwargs={"literal_binds": True}))
assert "OR" in sql
assert "example_bash" in sql
assert "example_python" in sql
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/tests/unit/api_fastapi/common/test_parameters.py",
"license": "Apache License 2.0",
"lines": 107,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/neo4j/tests/system/neo4j/example_neo4j_query.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Example use of Neo4j related operators with parameters.
"""
from __future__ import annotations
import os
from datetime import datetime
from airflow import DAG
from airflow.providers.neo4j.operators.neo4j import Neo4jOperator
ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
DAG_ID = "example_neo4j_query"
with DAG(
DAG_ID,
start_date=datetime(2025, 1, 1),
schedule=None,
tags=["example"],
catchup=False,
) as dag:
# [START run_query_neo4j_operator]
neo4j_task = Neo4jOperator(
task_id="run_neo4j_query_with_parameters",
neo4j_conn_id="neo4j_conn_id",
parameters={"name": "Tom Hanks"},
sql='MATCH (actor {name: $name, date: "{{ds}}"}) RETURN actor',
dag=dag,
)
# [END run_query_neo4j_operator]
from tests_common.test_utils.system_tests import get_test_run # noqa: E402
# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest)
test_run = get_test_run(dag)
| {
"repo_id": "apache/airflow",
"file_path": "providers/neo4j/tests/system/neo4j/example_neo4j_query.py",
"license": "Apache License 2.0",
"lines": 46,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:task-sdk/src/airflow/sdk/timezone.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
# We don't want to `import *` here to avoid the risk of making adding too much to Public python API
from airflow.sdk._shared.timezones.timezone import (
coerce_datetime,
convert_to_utc,
datetime,
initialize,
make_naive,
parse,
utc,
utcnow,
)
try:
from airflow.sdk.configuration import conf
tz_str = conf.get_mandatory_value("core", "default_timezone")
initialize(tz_str)
except Exception:
initialize("UTC")
__all__ = [
"coerce_datetime",
"convert_to_utc",
"datetime",
"make_naive",
"parse",
"utc",
"utcnow",
]
| {
"repo_id": "apache/airflow",
"file_path": "task-sdk/src/airflow/sdk/timezone.py",
"license": "Apache License 2.0",
"lines": 43,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:airflow-core/src/airflow/api_fastapi/core_api/datamodels/ui/dag_runs.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from datetime import datetime
from pydantic import computed_field
from airflow.api_fastapi.core_api.base import BaseModel
from airflow.utils.state import DagRunState
class DAGRunLightResponse(BaseModel):
"""DAG Run serializer for responses."""
id: int
dag_id: str
run_id: str
logical_date: datetime | None
run_after: datetime
start_date: datetime | None
end_date: datetime | None
state: DagRunState
@computed_field
def duration(self) -> float | None:
if self.end_date and self.start_date:
return (self.end_date - self.start_date).total_seconds()
return None
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/api_fastapi/core_api/datamodels/ui/dag_runs.py",
"license": "Apache License 2.0",
"lines": 36,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:dev/react-plugin-tools/bootstrap.py | #!/usr/bin/env python3
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Bootstrap React Plugin CLI Tool.
This script provides a command-line interface to create new React UI plugin
directories based on the airflow-core/ui project structure. It sets up all the
necessary configuration files, dependencies, and basic structure for development
with the same tooling as used in Airflow's core UI.
"""
from __future__ import annotations
import argparse
import re
import shutil
import sys
from pathlib import Path
def get_template_dir() -> Path:
"""Get the template directory path."""
script_dir = Path(__file__).parent
template_dir = script_dir / "react_plugin_template"
if not template_dir.exists():
print(f"Error: Template directory not found at {template_dir}")
sys.exit(1)
return template_dir
def replace_template_variables(content: str, project_name: str) -> str:
"""Replace template variables in file content."""
return content.replace("{{PROJECT_NAME}}", project_name)
def remove_apache_license_header(content: str, file_extension: str) -> str:
"""Remove Apache license header from file content based on file type."""
if file_extension in [".ts", ".tsx", ".js", ".jsx"]:
license_pattern = r"/\*!\s*\*\s*Licensed to the Apache Software Foundation.*?\*/\s*"
content = re.sub(license_pattern, "", content, flags=re.DOTALL)
elif file_extension in [".md"]:
license_pattern = r"<!--\s*Licensed to the Apache Software Foundation.*?-->\s*"
content = re.sub(license_pattern, "", content, flags=re.DOTALL)
elif file_extension in [".html"]:
license_pattern = r"<!--\s*Licensed to the Apache Software Foundation.*?-->\s*"
content = re.sub(license_pattern, "", content, flags=re.DOTALL)
return content
def copy_template_files(template_dir: Path, project_path: Path, project_name: str) -> None:
for item in template_dir.rglob("*"):
if item.is_file():
# Calculate relative path from template root
rel_path = item.relative_to(template_dir)
target_path = project_path / rel_path
target_path.parent.mkdir(parents=True, exist_ok=True)
with open(item, encoding="utf-8") as f:
content = f.read()
content = replace_template_variables(content, project_name)
file_extension = item.suffix.lower()
content = remove_apache_license_header(content, file_extension)
with open(target_path, "w", encoding="utf-8") as f:
f.write(content)
print(f" Created: {rel_path}")
def bootstrap_react_plugin(args) -> None:
"""Bootstrap a new React plugin project."""
project_name = args.name
target_dir = args.dir if args.dir else project_name
project_path = Path(target_dir).resolve()
template_dir = get_template_dir()
if project_path.exists():
print(f"Error: Directory '{project_path}' already exists!")
sys.exit(1)
if not project_name.replace("-", "").replace("_", "").isalnum():
print("Error: Project name should only contain letters, numbers, hyphens, and underscores")
sys.exit(1)
print(f"Creating React plugin project: {project_name}")
print(f"Target directory: {project_path}")
print(f"Template directory: {template_dir}")
project_path.mkdir(parents=True, exist_ok=True)
try:
# Copy template files
print("Copying template files...")
copy_template_files(template_dir, project_path, project_name)
print(f"\n✅ Successfully created {project_name}!")
print("\nNext steps:")
print(f" cd {target_dir}")
print(" pnpm install")
print(" pnpm dev")
print("\nHappy coding! 🚀")
except Exception as e:
print(f"Error creating project: {e}")
if project_path.exists():
shutil.rmtree(project_path)
sys.exit(1)
def main():
"""Main CLI entry point."""
parser = argparse.ArgumentParser(
description="Bootstrap a new React UI plugin project",
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog="""
Examples:
python bootstrap.py my-plugin
python bootstrap.py my-plugin --dir /path/to/projects/my-plugin
This will create a new React project with all the necessary configuration
files, dependencies, and structure needed for Airflow plugin development.
""",
)
parser.add_argument(
"name",
help="Name of the React plugin project (letters, numbers, hyphens, and underscores only)",
)
parser.add_argument(
"--dir",
"-d",
help="Target directory for the project (defaults to project name)",
)
parser.add_argument(
"--verbose",
"-v",
action="store_true",
help="Enable verbose output",
)
args = parser.parse_args()
try:
bootstrap_react_plugin(args)
except KeyboardInterrupt:
print("\n\nOperation cancelled by user.")
sys.exit(1)
except Exception as e:
print(f"Error: {e}")
sys.exit(1)
if __name__ == "__main__":
main()
| {
"repo_id": "apache/airflow",
"file_path": "dev/react-plugin-tools/bootstrap.py",
"license": "Apache License 2.0",
"lines": 138,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/standard/src/airflow/providers/standard/example_dags/example_hitl_operator.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import datetime
from typing import TYPE_CHECKING
import pendulum
from airflow.providers.standard.operators.hitl import (
ApprovalOperator,
HITLBranchOperator,
HITLEntryOperator,
HITLOperator,
)
from airflow.sdk import DAG, Param, task
from airflow.sdk.bases.notifier import BaseNotifier
if TYPE_CHECKING:
from airflow.providers.common.compat.sdk import Context
# [START hitl_tutorial]
# [START hitl_notifier]
class LocalLogNotifier(BaseNotifier):
"""Simple notifier to demonstrate HITL notification without setup any connection."""
template_fields = ("message",)
def __init__(self, message: str) -> None:
self.message = message
def notify(self, context: Context) -> None:
url = HITLOperator.generate_link_to_ui_from_context(
context=context,
base_url="http://localhost:28080",
)
self.log.info(self.message)
self.log.info("Url to respond %s", url)
hitl_request_callback = LocalLogNotifier(
message="""
[HITL]
Subject: {{ task.subject }}
Body: {{ task.body }}
Options: {{ task.options }}
Is Multiple Option: {{ task.multiple }}
Default Options: {{ task.defaults }}
Params: {{ task.params }}
"""
)
hitl_success_callback = LocalLogNotifier(
message="{% set task_id = task.task_id -%}{{ ti.xcom_pull(task_ids=task_id) }}"
)
hitl_failure_callback = LocalLogNotifier(message="Request to response to '{{ task.subject }}' failed")
# [END hitl_notifier]
with DAG(
dag_id="example_hitl_operator",
start_date=pendulum.datetime(2021, 1, 1, tz="UTC"),
catchup=False,
tags=["example", "HITL"],
):
# [START howto_hitl_entry_operator]
wait_for_input = HITLEntryOperator(
task_id="wait_for_input",
subject="Please provide required information: ",
params={"information": Param("", type="string")},
notifiers=[hitl_request_callback],
on_success_callback=hitl_success_callback,
on_failure_callback=hitl_failure_callback,
)
# [END howto_hitl_entry_operator]
# [START howto_hitl_operator]
wait_for_option = HITLOperator(
task_id="wait_for_option",
subject="Please choose one option to proceed: ",
options=["option 1", "option 2", "option 3"],
notifiers=[hitl_request_callback],
on_success_callback=hitl_success_callback,
on_failure_callback=hitl_failure_callback,
)
# [END howto_hitl_operator]
# [START howto_hitl_operator_multiple]
wait_for_multiple_options = HITLOperator(
task_id="wait_for_multiple_options",
subject="Please choose option to proceed: ",
options=["option 4", "option 5", "option 6"],
multiple=True,
notifiers=[hitl_request_callback],
on_success_callback=hitl_success_callback,
on_failure_callback=hitl_failure_callback,
)
# [END howto_hitl_operator_multiple]
# [START howto_hitl_operator_timeout]
wait_for_default_option = HITLOperator(
task_id="wait_for_default_option",
subject="Please choose option to proceed: ",
options=["option 7", "option 8", "option 9"],
defaults=["option 7"],
execution_timeout=datetime.timedelta(seconds=1),
notifiers=[hitl_request_callback],
on_success_callback=hitl_success_callback,
on_failure_callback=hitl_failure_callback,
)
# [END howto_hitl_operator_timeout]
# [START howto_hitl_approval_operator]
valid_input_and_options = ApprovalOperator(
task_id="valid_input_and_options",
subject="Are the following input and options valid?",
body="""
Input: {{ ti.xcom_pull(task_ids='wait_for_input')["params_input"]["information"] }}
Option: {{ ti.xcom_pull(task_ids='wait_for_option')["chosen_options"] }}
Multiple Options: {{ ti.xcom_pull(task_ids='wait_for_multiple_options')["chosen_options"] }}
Timeout Option: {{ ti.xcom_pull(task_ids='wait_for_default_option')["chosen_options"] }}
""",
defaults="Reject",
execution_timeout=datetime.timedelta(minutes=5),
notifiers=[hitl_request_callback],
on_success_callback=hitl_success_callback,
on_failure_callback=hitl_failure_callback,
assigned_users=[{"id": "1", "name": "airflow"}, {"id": "admin", "name": "admin"}],
)
# [END howto_hitl_approval_operator]
# [START howto_hitl_branch_operator]
choose_a_branch_to_run = HITLBranchOperator(
task_id="choose_a_branch_to_run",
subject="You're now allowed to proceeded. Please choose one task to run: ",
options=["task_1", "task_2", "task_3"],
notifiers=[hitl_request_callback],
on_success_callback=hitl_success_callback,
on_failure_callback=hitl_failure_callback,
)
# [END howto_hitl_branch_operator]
# [START howto_hitl_workflow]
@task
def task_1(): ...
@task
def task_2(): ...
@task
def task_3(): ...
(
[wait_for_input, wait_for_option, wait_for_default_option, wait_for_multiple_options]
>> valid_input_and_options
>> choose_a_branch_to_run
>> [task_1(), task_2(), task_3()]
)
# [END howto_hitl_workflow]
# [END hitl_tutorial]
| {
"repo_id": "apache/airflow",
"file_path": "providers/standard/src/airflow/providers/standard/example_dags/example_hitl_operator.py",
"license": "Apache License 2.0",
"lines": 152,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:airflow-core/tests/unit/dags/subdir1/test_negate_ignore.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from datetime import datetime
from airflow.models.dag import DAG
from airflow.providers.standard.operators.bash import BashOperator
DEFAULT_DATE = datetime(2019, 12, 1)
dag = DAG(dag_id="test_dag_under_subdir1", start_date=DEFAULT_DATE, schedule=None)
task = BashOperator(task_id="task1", bash_command='echo "test dag under sub directory subdir2"', dag=dag)
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/tests/unit/dags/subdir1/test_negate_ignore.py",
"license": "Apache License 2.0",
"lines": 24,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:helm-tests/tests/helm_tests/airflow_aux/test_job_launcher_role.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import jmespath
import pytest
from chart_utils.helm_template_generator import render_chart
class TestJobLauncher:
"""Tests job launcher RBAC."""
@pytest.mark.parametrize(
("executor", "rbac", "allow", "expected_accounts"),
[
("CeleryKubernetesExecutor", True, True, ["scheduler", "worker"]),
("KubernetesExecutor", True, True, ["scheduler", "worker"]),
("CeleryExecutor", True, True, ["worker"]),
("LocalExecutor", True, True, ["scheduler"]),
("LocalExecutor", False, False, []),
("CeleryExecutor,KubernetesExecutor", True, True, ["scheduler", "worker"]),
],
)
def test_job_launcher_rolebinding(self, executor, rbac, allow, expected_accounts):
docs = render_chart(
values={
"rbac": {"create": rbac},
"allowJobLaunching": allow,
"executor": executor,
},
show_only=["templates/rbac/job-launcher-rolebinding.yaml"],
)
if expected_accounts:
for idx, suffix in enumerate(expected_accounts):
assert f"release-name-airflow-{suffix}" == jmespath.search(f"subjects[{idx}].name", docs[0])
else:
assert docs == []
@pytest.mark.parametrize(
("multiNamespaceMode", "namespace", "expectedRole", "expectedRoleBinding"),
[
(
True,
"namespace",
"namespace-release-name-job-launcher-role",
"namespace-release-name-job-launcher-rolebinding",
),
(
True,
"other-ns",
"other-ns-release-name-job-launcher-role",
"other-ns-release-name-job-launcher-rolebinding",
),
(False, "namespace", "release-name-job-launcher-role", "release-name-job-launcher-rolebinding"),
],
)
def test_job_launcher_rolebinding_multi_namespace(
self, multiNamespaceMode, namespace, expectedRole, expectedRoleBinding
):
docs = render_chart(
namespace=namespace,
values={"allowJobLaunching": True, "multiNamespaceMode": multiNamespaceMode},
show_only=["templates/rbac/job-launcher-rolebinding.yaml"],
)
actualRoleBinding = jmespath.search("metadata.name", docs[0])
assert actualRoleBinding == expectedRoleBinding
actualRoleRef = jmespath.search("roleRef.name", docs[0])
assert actualRoleRef == expectedRole
actualKind = jmespath.search("kind", docs[0])
actualRoleRefKind = jmespath.search("roleRef.kind", docs[0])
if multiNamespaceMode:
assert actualKind == "ClusterRoleBinding"
assert actualRoleRefKind == "ClusterRole"
else:
assert actualKind == "RoleBinding"
assert actualRoleRefKind == "Role"
@pytest.mark.parametrize(
("multiNamespaceMode", "namespace", "expectedRole"),
[
(True, "namespace", "namespace-release-name-job-launcher-role"),
(True, "other-ns", "other-ns-release-name-job-launcher-role"),
(False, "namespace", "release-name-job-launcher-role"),
],
)
def test_job_launcher_role_multi_namespace(self, multiNamespaceMode, namespace, expectedRole):
docs = render_chart(
namespace=namespace,
values={"allowJobLaunching": True, "multiNamespaceMode": multiNamespaceMode},
show_only=["templates/rbac/job-launcher-role.yaml"],
)
actualRole = jmespath.search("metadata.name", docs[0])
assert actualRole == expectedRole
actualKind = jmespath.search("kind", docs[0])
if multiNamespaceMode:
assert actualKind == "ClusterRole"
else:
assert actualKind == "Role"
| {
"repo_id": "apache/airflow",
"file_path": "helm-tests/tests/helm_tests/airflow_aux/test_job_launcher_role.py",
"license": "Apache License 2.0",
"lines": 106,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/google/tests/unit/google/common/links/test_storage.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from airflow.providers.google.common.links.storage import FileDetailsLink, StorageLink
class TestStorageLink:
def test_storage_link(self):
assert StorageLink.name == "GCS Storage"
assert StorageLink.key == "storage_conf"
assert (
StorageLink.format_str
== "https://console.cloud.google.com/storage/browser/{uri};tab=objects?project={project_id}"
)
def test_storage_link_format(self):
link = StorageLink()
url = link._format_link(uri="test-bucket/test-folder", project_id="test-id")
expected_url = "https://console.cloud.google.com/storage/browser/test-bucket/test-folder;tab=objects?project=test-id"
assert url == expected_url
class TestFileDetailsLink:
def test_file_details_link_name_and_key(self):
assert FileDetailsLink.name == "GCS File Details"
assert FileDetailsLink.key == "file_details"
assert (
FileDetailsLink.format_str
== "https://console.cloud.google.com/storage/browser/_details/{uri};tab=live_object?project={project_id}"
)
def test_file_details_link_format(self):
link = FileDetailsLink()
url = link._format_link(uri="test-bucket/test-folder", project_id="test-id")
expected_url = "https://console.cloud.google.com/storage/browser/_details/test-bucket/test-folder;tab=live_object?project=test-id"
assert url == expected_url
| {
"repo_id": "apache/airflow",
"file_path": "providers/google/tests/unit/google/common/links/test_storage.py",
"license": "Apache License 2.0",
"lines": 44,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/google/tests/unit/google/common/test_consts.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from google.api_core.gapic_v1.client_info import ClientInfo
from airflow import version
from airflow.providers.google.common.consts import (
CLIENT_INFO,
GOOGLE_DEFAULT_DEFERRABLE_METHOD_NAME,
)
def test_google_default_deferrable_method_name():
assert GOOGLE_DEFAULT_DEFERRABLE_METHOD_NAME == "execute_complete"
def test_client_info_instance():
assert isinstance(CLIENT_INFO, ClientInfo)
assert CLIENT_INFO.client_library_version == f"airflow_v{version.version}"
| {
"repo_id": "apache/airflow",
"file_path": "providers/google/tests/unit/google/common/test_consts.py",
"license": "Apache License 2.0",
"lines": 28,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/redis/src/airflow/providers/redis/triggers/redis_await_message.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import asyncio
from typing import Any
from asgiref.sync import sync_to_async
from airflow.providers.redis.hooks.redis import RedisHook
from airflow.triggers.base import BaseTrigger, TriggerEvent
class AwaitMessageTrigger(BaseTrigger):
"""
A trigger that waits for a message matching specific criteria to arrive in Redis.
The behavior of this trigger is as follows:
- poll the Redis pubsub for a message, if no message returned, sleep
:param channels: The channels that should be searched for messages
:param redis_conn_id: The connection object to use, defaults to "redis_default"
:param poll_interval: How long the trigger should sleep after reaching the end of the Redis log
(seconds), defaults to 60
"""
def __init__(
self,
channels: list[str] | str,
redis_conn_id: str = "redis_default",
poll_interval: float = 60,
) -> None:
self.channels = channels
self.redis_conn_id = redis_conn_id
self.poll_interval = poll_interval
def serialize(self) -> tuple[str, dict[str, Any]]:
return (
"airflow.providers.redis.triggers.redis_await_message.AwaitMessageTrigger",
{
"channels": self.channels,
"redis_conn_id": self.redis_conn_id,
"poll_interval": self.poll_interval,
},
)
async def run(self):
hook = RedisHook(redis_conn_id=self.redis_conn_id).get_conn().pubsub()
hook.subscribe(self.channels)
async_get_message = sync_to_async(hook.get_message)
while True:
message = await async_get_message()
if message and message["type"] == "message":
if "channel" in message and isinstance(message["channel"], bytes):
message["channel"] = message["channel"].decode("utf-8")
if "data" in message and isinstance(message["data"], bytes):
message["data"] = message["data"].decode("utf-8")
yield TriggerEvent(message)
break
await asyncio.sleep(self.poll_interval)
| {
"repo_id": "apache/airflow",
"file_path": "providers/redis/src/airflow/providers/redis/triggers/redis_await_message.py",
"license": "Apache License 2.0",
"lines": 64,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/redis/tests/unit/redis/triggers/test_redis_await_message.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import asyncio
from unittest.mock import patch
import pytest
from airflow.providers.redis.triggers.redis_await_message import AwaitMessageTrigger
class TestAwaitMessageTrigger:
def test_trigger_serialization(self):
trigger = AwaitMessageTrigger(
channels=["test_channel"],
redis_conn_id="redis_default",
poll_interval=30,
)
assert isinstance(trigger, AwaitMessageTrigger)
classpath, kwargs = trigger.serialize()
assert classpath == "airflow.providers.redis.triggers.redis_await_message.AwaitMessageTrigger"
assert kwargs == dict(
channels=["test_channel"],
redis_conn_id="redis_default",
poll_interval=30,
)
@patch("airflow.providers.redis.hooks.redis.RedisHook.get_conn")
@pytest.mark.asyncio
async def test_trigger_run_succeed(self, mock_redis_conn):
trigger = AwaitMessageTrigger(
channels="test",
redis_conn_id="redis_default",
poll_interval=0.0001,
)
mock_redis_conn().pubsub().get_message.return_value = {
"type": "message",
"channel": "test",
"data": "d1",
}
trigger_gen = trigger.run()
task = asyncio.create_task(trigger_gen.__anext__())
event = await task
assert task.done() is True
assert event.payload["data"] == "d1"
assert event.payload["channel"] == "test"
asyncio.get_event_loop().stop()
@patch("airflow.providers.redis.hooks.redis.RedisHook.get_conn")
@pytest.mark.asyncio
async def test_trigger_run_succeed_with_bytes(self, mock_redis_conn):
trigger = AwaitMessageTrigger(
channels="test",
redis_conn_id="redis_default",
poll_interval=0.0001,
)
mock_redis_conn().pubsub().get_message.return_value = {
"type": "message",
"channel": b"test",
"data": b"d1",
}
trigger_gen = trigger.run()
task = asyncio.create_task(trigger_gen.__anext__())
event = await task
assert task.done() is True
assert event.payload["data"] == "d1"
assert event.payload["channel"] == "test"
asyncio.get_event_loop().stop()
@patch("airflow.providers.redis.hooks.redis.RedisHook.get_conn")
@pytest.mark.asyncio
async def test_trigger_run_fail(self, mock_redis_conn):
trigger = AwaitMessageTrigger(
channels="test",
redis_conn_id="redis_default",
poll_interval=0.01,
)
mock_redis_conn().pubsub().get_message.return_value = {
"type": "subscribe",
"channel": "test",
"data": "d1",
}
trigger_gen = trigger.run()
task = asyncio.create_task(trigger_gen.__anext__())
await asyncio.sleep(1.0)
assert task.done() is False
task.cancel()
asyncio.get_event_loop().stop()
| {
"repo_id": "apache/airflow",
"file_path": "providers/redis/tests/unit/redis/triggers/test_redis_await_message.py",
"license": "Apache License 2.0",
"lines": 96,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/fab/tests/unit/fab/db_manager/test_fab_db_manager.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from unittest import mock
import pytest
from sqlalchemy import Table
from airflow.providers.common.compat.sdk import AirflowException
from airflow.utils.db import initdb
from airflow.utils.db_manager import RunDBManager
from tests_common.test_utils.config import conf_vars
pytestmark = [pytest.mark.db_test]
class TestRunDBManagerWithFab:
@conf_vars(
{("database", "external_db_managers"): "airflow.providers.fab.auth_manager.models.db.FABDBManager"}
)
def test_db_manager_uses_config(self):
from airflow.providers.fab.auth_manager.models.db import FABDBManager
run_db_manager = RunDBManager()
assert FABDBManager in run_db_manager._managers
@conf_vars(
{("database", "external_db_managers"): "airflow.providers.fab.auth_manager.models.db.FABDBManager"}
)
def test_defining_table_same_name_as_airflow_table_name_raises(self):
from sqlalchemy import Column, Integer, String
run_db_manager = RunDBManager()
metadata = run_db_manager._managers[0].metadata
# Add dag_run table to metadata
mytable = Table(
"dag_run", metadata, Column("id", Integer, primary_key=True), Column("name", String(50))
)
metadata._add_table("dag_run", None, mytable)
with pytest.raises(AirflowException, match="Table 'dag_run' already exists in the Airflow metadata"):
run_db_manager.validate()
metadata._remove_table("dag_run", None)
@mock.patch.object(RunDBManager, "upgradedb")
@mock.patch.object(RunDBManager, "initdb")
def test_init_db_calls_rundbmanager(self, mock_initdb, mock_upgrade_db, session):
initdb(session=session)
mock_initdb.assert_called()
mock_initdb.assert_called_once_with(session)
@conf_vars(
{("database", "external_db_managers"): "airflow.providers.fab.auth_manager.models.db.FABDBManager"}
)
@mock.patch("airflow.providers.fab.auth_manager.models.db.FABDBManager")
def test_rundbmanager_calls_dbmanager_methods(self, mock_fabdb_manager, session):
mock_fabdb_manager.supports_table_dropping = True
fabdb_manager = mock_fabdb_manager.return_value
ext_db = RunDBManager()
# initdb
ext_db.initdb(session=session)
fabdb_manager.initdb.assert_called_once()
# upgradedb
ext_db.upgradedb(session=session)
fabdb_manager.upgradedb.assert_called_once()
# drop_tables
connection = mock.MagicMock()
ext_db.drop_tables(session, connection)
mock_fabdb_manager.return_value.drop_tables.assert_called_once_with(connection)
| {
"repo_id": "apache/airflow",
"file_path": "providers/fab/tests/unit/fab/db_manager/test_fab_db_manager.py",
"license": "Apache License 2.0",
"lines": 72,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/fab/tests/unit/fab/plugins/test_plugin.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import pytest
from fastapi import FastAPI
from flask import Blueprint
from flask_appbuilder import BaseView as AppBuilderBaseView, expose
from starlette.middleware.base import BaseHTTPMiddleware
# This is the class you derive to create a plugin
from airflow.providers.common.compat.sdk import AirflowPlugin
from airflow.task.priority_strategy import PriorityWeightStrategy
from airflow.timetables.interval import CronDataIntervalTimetable
from tests_common.test_utils.mock_operators import (
AirflowLink,
AirflowLink2,
CustomBaseIndexOpLink,
CustomOpLink,
GithubLink,
GoogleLink,
)
from unit.listeners import empty_listener
from unit.listeners.class_listener import ClassBasedListener
pytestmark = pytest.mark.db_test
# Will show up under airflow.macros.test_plugin.plugin_macro
def plugin_macro():
pass
# Creating a flask appbuilder BaseView
class PluginTestAppBuilderBaseView(AppBuilderBaseView):
default_view = "test"
@expose("/")
def test(self):
return self.render_template("test_plugin/test.html", content="Hello galaxy!")
v_appbuilder_view = PluginTestAppBuilderBaseView()
v_appbuilder_package = {
"name": "Test View",
"category": "Test Plugin",
"view": v_appbuilder_view,
"label": "Test Label",
}
v_nomenu_appbuilder_package = {"view": v_appbuilder_view}
# Creating flask appbuilder Menu Items
appbuilder_mitem = {
"name": "Google",
"href": "https://www.google.com",
"category": "Search",
}
appbuilder_mitem_toplevel = {
"name": "apache",
"href": "https://www.apache.org/",
"label": "The Apache Software Foundation",
}
# Creating a flask blueprint to integrate the templates and static folder
bp = Blueprint(
"test_plugin",
__name__,
template_folder="templates", # registers airflow/plugins/templates as a Jinja template folder
static_folder="static",
static_url_path="/static/test_plugin",
)
app = FastAPI()
app_with_metadata = {"app": app, "url_prefix": "/some_prefix", "name": "Name of the App"}
class DummyMiddleware(BaseHTTPMiddleware):
async def dispatch(self, request, call_next):
return await call_next(request)
middleware_with_metadata = {
"middleware": DummyMiddleware,
"args": [],
"kwargs": {},
"name": "Name of the Middleware",
}
external_view_with_metadata = {
"name": "Test IFrame Airflow Docs",
"href": "https://airflow.apache.org/",
"icon": "https://raw.githubusercontent.com/lucide-icons/lucide/refs/heads/main/icons/plug.svg",
"url_route": "test_iframe_plugin",
"destination": "nav",
"category": "browse",
}
react_app_with_metadata = {
"name": "Test React App",
"bundle_url": "https://example.com/test-plugin-bundle.js",
"icon": "https://raw.githubusercontent.com/lucide-icons/lucide/refs/heads/main/icons/plug.svg",
"url_route": "test_react_app",
"destination": "nav",
"category": "browse",
}
# Extend an existing class to avoid the need to implement the full interface
class CustomCronDataIntervalTimetable(CronDataIntervalTimetable):
pass
class CustomPriorityWeightStrategy(PriorityWeightStrategy):
def get_weight(self, ti):
return 1
# Defining the plugin class
class AirflowTestPlugin(AirflowPlugin):
name = "test_plugin"
macros = [plugin_macro]
flask_blueprints = [bp]
fastapi_apps = [app_with_metadata]
fastapi_root_middlewares = [middleware_with_metadata]
external_views = [external_view_with_metadata]
react_apps = [react_app_with_metadata]
appbuilder_views = [v_appbuilder_package]
appbuilder_menu_items = [appbuilder_mitem, appbuilder_mitem_toplevel]
global_operator_extra_links = [
AirflowLink(),
GithubLink(),
]
operator_extra_links = [GoogleLink(), AirflowLink2(), CustomOpLink(), CustomBaseIndexOpLink(1)]
timetables = [CustomCronDataIntervalTimetable]
listeners = [empty_listener, ClassBasedListener()]
priority_weight_strategies = [CustomPriorityWeightStrategy]
class MockPluginA(AirflowPlugin):
name = "plugin-a"
class MockPluginB(AirflowPlugin):
name = "plugin-b"
class MockPluginC(AirflowPlugin):
name = "plugin-c"
class AirflowTestOnLoadPlugin(AirflowPlugin):
name = "preload"
def on_load(self, *args, **kwargs):
self.name = "postload"
| {
"repo_id": "apache/airflow",
"file_path": "providers/fab/tests/unit/fab/plugins/test_plugin.py",
"license": "Apache License 2.0",
"lines": 136,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-ctl/src/airflowctl/ctl/console_formatting.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import json
import sys
from collections.abc import Callable, Sequence
from typing import TYPE_CHECKING, Any
from rich.box import ASCII_DOUBLE_HEAD
from rich.console import Console
from rich.syntax import Syntax
from rich.table import Table
from tabulate import tabulate
from airflowctl.ctl.utils import yaml
if TYPE_CHECKING:
from typing import TypeGuard
# TODO (bugraoz93): Use Vendor Approach and unify with airflow.platform for core-ctl
def is_tty():
"""Check if stdout is connected (is associated with a terminal device) to a tty(-like) device."""
if not hasattr(sys.stdout, "isatty"):
return False
return sys.stdout.isatty()
def is_data_sequence(data: Sequence[dict | Any]) -> TypeGuard[Sequence[dict]]:
return all(isinstance(d, dict) for d in data)
class AirflowConsole(Console):
"""Airflow rich console."""
def __init__(self, show_header: bool = True, *args, **kwargs):
super().__init__(*args, **kwargs)
# Set the width to constant to pipe whole output from console
self._width = 200 if not is_tty() else self._width
# If show header in tables
self.show_header = show_header
def print_as_json(self, data: dict):
"""Render dict as json text representation."""
json_content = json.dumps(data)
self.print(Syntax(json_content, "json", theme="ansi_dark"), soft_wrap=True)
def print_as_yaml(self, data: dict):
"""Render dict as yaml text representation."""
yaml_content = yaml.dump(data)
self.print(Syntax(yaml_content, "yaml", theme="ansi_dark"), soft_wrap=True)
def print_as_table(self, data: list[dict]):
"""Render list of dictionaries as table."""
if not data:
self.print("No data found")
return
table = SimpleTable(show_header=self.show_header)
for col in data[0]:
table.add_column(col)
for row in data:
table.add_row(*(str(d) for d in row.values()))
self.print(table)
def print_as_plain_table(self, data: list[dict]):
"""Render list of dictionaries as a simple table than can be easily piped."""
if not data:
self.print("No data found")
return
rows = [d.values() for d in data]
output = tabulate(rows, tablefmt="plain", headers=list(data[0]))
self.print(output)
def _normalize_data(self, value: Any, output: str) -> list | str | dict | None:
if isinstance(value, (tuple, list)):
if output == "table":
return ",".join(str(self._normalize_data(x, output)) for x in value)
return [self._normalize_data(x, output) for x in value]
if isinstance(value, dict) and output != "table":
return {k: self._normalize_data(v, output) for k, v in value.items()}
if value is None:
return None
return str(value)
def print_as(
self,
data: Sequence[dict | Any],
output: str,
mapper: Callable[[Any], dict] | None = None,
) -> None:
"""Print provided using format specified by output argument."""
output_to_renderer: dict[str, Callable[[Any], None]] = {
"json": self.print_as_json,
"yaml": self.print_as_yaml,
"table": self.print_as_table,
"plain": self.print_as_plain_table,
}
renderer = output_to_renderer.get(output)
if not renderer:
raise ValueError(f"Unknown formatter: {output}. Allowed options: {list(output_to_renderer)}")
if mapper:
dict_data: Sequence[dict] = [mapper(d) for d in data]
elif is_data_sequence(data):
dict_data = data
else:
raise ValueError("To tabulate non-dictionary data you need to provide `mapper` function")
dict_data = [{k: self._normalize_data(v, output) for k, v in d.items()} for d in dict_data]
renderer(dict_data)
class SimpleTable(Table):
"""A rich Table with some default hardcoded for consistency."""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.show_edge = kwargs.get("show_edge", False)
self.pad_edge = kwargs.get("pad_edge", False)
self.box = kwargs.get("box", ASCII_DOUBLE_HEAD)
self.show_header = kwargs.get("show_header", False)
self.title_style = kwargs.get("title_style", "bold green")
self.title_justify = kwargs.get("title_justify", "left")
self.caption = kwargs.get("caption", " ")
def add_column(self, *args, **kwargs) -> None:
"""Add a column to the table. We use different default."""
kwargs["overflow"] = kwargs.get("overflow") # to avoid truncating
super().add_column(*args, **kwargs)
| {
"repo_id": "apache/airflow",
"file_path": "airflow-ctl/src/airflowctl/ctl/console_formatting.py",
"license": "Apache License 2.0",
"lines": 121,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:airflow-ctl/src/airflowctl/ctl/utils/yaml.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# TODO (bugraoz93): Use Vendor Approach and unify with airflow.utils.yaml for core-ctl
"""
Use libyaml for YAML dump/load operations where possible.
If libyaml is available we will use it -- it is significantly faster.
This module delegates all other properties to the yaml module, so it can be used as:
.. code-block:: python
import airflow.utils.yaml as yaml
And then be used directly in place of the normal python module.
"""
from __future__ import annotations
from typing import TYPE_CHECKING, Any, BinaryIO, TextIO, cast
if TYPE_CHECKING:
from yaml.error import MarkedYAMLError, YAMLError # noqa: F401
def safe_load(stream: bytes | str | BinaryIO | TextIO) -> Any:
"""Like yaml.safe_load, but use the C libyaml for speed where we can."""
# delay import until use.
from yaml import load as orig
try:
from yaml import CSafeLoader as SafeLoader
except ImportError:
from yaml import SafeLoader # type: ignore[assignment]
return orig(stream, SafeLoader)
def dump(data: Any, **kwargs) -> str:
"""Like yaml.safe_dump, but use the C libyaml for speed where we can."""
# delay import until use.
from yaml import dump as orig
try:
from yaml import CSafeDumper as SafeDumper
except ImportError:
from yaml import SafeDumper # type: ignore[assignment]
return cast("str", orig(data, Dumper=SafeDumper, **kwargs))
def __getattr__(name):
# Delegate anything else to the yaml module
import yaml
if name == "FullLoader":
# Try to use CFullLoader by default
getattr(yaml, "CFullLoader", yaml.FullLoader)
return getattr(yaml, name)
| {
"repo_id": "apache/airflow",
"file_path": "airflow-ctl/src/airflowctl/ctl/utils/yaml.py",
"license": "Apache License 2.0",
"lines": 54,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:task-sdk/src/airflow/sdk/io/fs.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import inspect
import logging
from collections.abc import Callable, Mapping
from functools import cache
from typing import TYPE_CHECKING
from fsspec.implementations.local import LocalFileSystem
from airflow.sdk._shared.module_loading import import_string
from airflow.sdk._shared.observability.metrics.stats import Stats
from airflow.sdk.providers_manager_runtime import ProvidersManagerTaskRuntime
if TYPE_CHECKING:
from fsspec import AbstractFileSystem
from airflow.sdk.io.typedef import Properties
log = logging.getLogger(__name__)
def _file(_: str | None, storage_options: Properties) -> LocalFileSystem:
return LocalFileSystem(**storage_options)
# builtin supported filesystems
_BUILTIN_SCHEME_TO_FS: dict[str, Callable[[str | None, Properties], AbstractFileSystem]] = {
"file": _file,
"local": _file,
}
@cache
def _register_filesystems() -> Mapping[
str,
Callable[[str | None, Properties], AbstractFileSystem] | Callable[[str | None], AbstractFileSystem],
]:
scheme_to_fs = _BUILTIN_SCHEME_TO_FS.copy()
with Stats.timer("airflow.io.load_filesystems") as timer:
manager = ProvidersManagerTaskRuntime()
for fs_module_name in manager.filesystem_module_names:
fs_module = import_string(fs_module_name)
for scheme in getattr(fs_module, "schemes", []):
if scheme in scheme_to_fs:
log.warning("Overriding scheme %s for %s", scheme, fs_module_name)
method = getattr(fs_module, "get_fs", None)
if method is None:
raise ImportError(f"Filesystem {fs_module_name} does not have a get_fs method")
scheme_to_fs[scheme] = method
log.debug("loading filesystems from providers took %.3f ms", timer.duration)
return scheme_to_fs
def get_fs(
scheme: str, conn_id: str | None = None, storage_options: Properties | None = None
) -> AbstractFileSystem:
"""
Get a filesystem by scheme.
:param scheme: the scheme to get the filesystem for
:return: the filesystem method
:param conn_id: the airflow connection id to use
:param storage_options: the storage options to pass to the filesystem
"""
filesystems = _register_filesystems()
try:
fs = filesystems[scheme]
except KeyError:
raise ValueError(f"No filesystem registered for scheme {scheme}") from None
options = storage_options or {}
# MyPy does not recognize dynamic parameters inspection when we call the method, and we have to do
# it for compatibility reasons with already released providers, that's why we need to ignore
# mypy errors here
parameters = inspect.signature(fs).parameters
if len(parameters) == 1:
if options:
raise AttributeError(
f"Filesystem {scheme} does not support storage options, but options were passed."
f"This most likely means that you are using an old version of the provider that does not "
f"support storage options. Please upgrade the provider if possible."
)
return fs(conn_id) # type: ignore[call-arg]
return fs(conn_id, options) # type: ignore[call-arg]
def has_fs(scheme: str) -> bool:
"""
Check if a filesystem is available for a scheme.
:param scheme: the scheme to check
:return: True if a filesystem is available for the scheme
"""
return scheme in _register_filesystems()
| {
"repo_id": "apache/airflow",
"file_path": "task-sdk/src/airflow/sdk/io/fs.py",
"license": "Apache License 2.0",
"lines": 92,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:airflow-core/tests/unit/utils/test_deprecation_tools.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import contextlib
import re
import sys
import uuid
import warnings
from contextlib import contextmanager
from types import ModuleType
from unittest import mock
import pytest
from airflow.utils.deprecation_tools import (
DeprecatedImportWarning,
add_deprecated_classes,
getattr_with_deprecation,
)
@contextmanager
def temporary_module(module_name):
"""Context manager to safely add and remove modules from sys.modules."""
original_module = sys.modules.get(module_name)
try:
yield
finally:
if original_module is not None:
sys.modules[module_name] = original_module
elif module_name in sys.modules:
del sys.modules[module_name]
def get_unique_module_name(base_name="test_module"):
"""Generate a unique module name to avoid conflicts."""
return f"{base_name}_{uuid.uuid4().hex[:8]}"
class TestGetAttrWithDeprecation:
"""Tests for the getattr_with_deprecation function."""
def test_getattr_with_deprecation_specific_class(self):
"""Test deprecated import for a specific class."""
imports = {"OldClass": "new.module.NewClass"}
# Mock the new module and class
mock_module = mock.MagicMock()
mock_new_class = mock.MagicMock()
mock_module.NewClass = mock_new_class
with mock.patch("airflow.utils.deprecation_tools.importlib.import_module", return_value=mock_module):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
result = getattr_with_deprecation(
imports=imports,
module="old.module",
override_deprecated_classes={},
extra_message="",
name="OldClass",
)
assert result == mock_new_class
assert len(w) == 1
assert issubclass(w[0].category, DeprecatedImportWarning)
assert "old.module.OldClass" in str(w[0].message)
assert "new.module.NewClass" in str(w[0].message)
def test_getattr_with_deprecation_wildcard(self):
"""Test deprecated import using wildcard pattern."""
imports = {"*": "new.module"}
# Mock the new module and attribute
mock_module = mock.MagicMock()
mock_attribute = mock.MagicMock()
mock_module.SomeAttribute = mock_attribute
with mock.patch("airflow.utils.deprecation_tools.importlib.import_module", return_value=mock_module):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
result = getattr_with_deprecation(
imports=imports,
module="old.module",
override_deprecated_classes={},
extra_message="",
name="SomeAttribute",
)
assert result == mock_attribute
assert len(w) == 1
assert issubclass(w[0].category, DeprecatedImportWarning)
assert "old.module.SomeAttribute" in str(w[0].message)
assert "new.module.SomeAttribute" in str(w[0].message)
def test_getattr_with_deprecation_wildcard_with_override(self):
"""Test wildcard pattern with override deprecated classes."""
imports = {"*": "new.module"}
override_deprecated_classes = {"SomeAttribute": "override.module.OverrideClass"}
# Mock the new module and attribute
mock_module = mock.MagicMock()
mock_attribute = mock.MagicMock()
mock_module.SomeAttribute = mock_attribute
with mock.patch("airflow.utils.deprecation_tools.importlib.import_module", return_value=mock_module):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
result = getattr_with_deprecation(
imports=imports,
module="old.module",
override_deprecated_classes=override_deprecated_classes,
extra_message="",
name="SomeAttribute",
)
assert result == mock_attribute
assert len(w) == 1
assert issubclass(w[0].category, DeprecatedImportWarning)
assert "old.module.SomeAttribute" in str(w[0].message)
assert "override.module.OverrideClass" in str(w[0].message)
def test_getattr_with_deprecation_specific_class_priority(self):
"""Test that specific class mapping takes priority over wildcard."""
imports = {"SpecificClass": "specific.module.SpecificClass", "*": "wildcard.module"}
# Mock the specific module and class
mock_module = mock.MagicMock()
mock_specific_class = mock.MagicMock()
mock_module.SpecificClass = mock_specific_class
with mock.patch("airflow.utils.deprecation_tools.importlib.import_module", return_value=mock_module):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
result = getattr_with_deprecation(
imports=imports,
module="old.module",
override_deprecated_classes={},
extra_message="",
name="SpecificClass",
)
assert result == mock_specific_class
assert len(w) == 1
assert issubclass(w[0].category, DeprecatedImportWarning)
assert "old.module.SpecificClass" in str(w[0].message)
assert "specific.module.SpecificClass" in str(w[0].message)
def test_getattr_with_deprecation_attribute_not_found(self):
"""Test AttributeError when attribute not found."""
imports = {"ExistingClass": "new.module.ExistingClass"}
with pytest.raises(AttributeError, match=r"has no attribute.*NonExistentClass"):
getattr_with_deprecation(
imports=imports,
module="old.module",
override_deprecated_classes={},
extra_message="",
name="NonExistentClass",
)
def test_getattr_with_deprecation_import_error(self):
"""Test ImportError when target module cannot be imported."""
imports = {"*": "nonexistent.module"}
with mock.patch(
"airflow.utils.deprecation_tools.importlib.import_module",
side_effect=ImportError("Module not found"),
):
with pytest.raises(ImportError, match="Could not import"):
getattr_with_deprecation(
imports=imports,
module="old.module",
override_deprecated_classes={},
extra_message="",
name="SomeAttribute",
)
def test_getattr_with_deprecation_with_extra_message(self):
"""Test that extra message is included in warning."""
imports = {"*": "new.module"}
extra_message = "This is an extra message"
# Mock the new module and attribute
mock_module = mock.MagicMock()
mock_attribute = mock.MagicMock()
mock_module.SomeAttribute = mock_attribute
with mock.patch("airflow.utils.deprecation_tools.importlib.import_module", return_value=mock_module):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
getattr_with_deprecation(
imports=imports,
module="old.module",
override_deprecated_classes={},
extra_message=extra_message,
name="SomeAttribute",
)
assert len(w) == 1
assert extra_message in str(w[0].message)
@pytest.mark.parametrize("dunder_attribute", ["__path__", "__file__"])
def test_getattr_with_deprecation_wildcard_skips_dunder_attributes(self, dunder_attribute):
"""Test that wildcard pattern skips Python special attributes."""
imports = {"*": "new.module"}
# Special attributes should raise AttributeError, not be redirected
with pytest.raises(AttributeError, match=rf"has no attribute.*{re.escape(dunder_attribute)}"):
getattr_with_deprecation(
imports=imports,
module="old.module",
override_deprecated_classes={},
extra_message="",
name=dunder_attribute,
)
@pytest.mark.parametrize("non_dunder_attr", ["__version", "__author", "_private", "public"])
def test_getattr_with_deprecation_wildcard_allows_non_dunder_attributes(self, non_dunder_attr):
"""Test that wildcard pattern allows non-dunder attributes (including single underscore prefixed)."""
imports = {"*": "unittest.mock"}
# These should be redirected through wildcard pattern
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
with contextlib.suppress(ImportError, AttributeError):
# Expected - the target module might not have the attribute
# The important thing is that it tried to redirect (didn't raise AttributeError immediately)
getattr_with_deprecation(
imports=imports,
module="old.module",
override_deprecated_classes={},
extra_message="",
name=non_dunder_attr,
)
# Should have generated a deprecation warning
assert len(w) == 1
assert "deprecated" in str(w[0].message).lower()
class TestAddDeprecatedClasses:
"""Tests for the add_deprecated_classes function."""
@pytest.mark.parametrize(
("test_case", "module_imports", "override_classes", "expected_behavior"),
[
(
"basic_class_mapping",
{"old_module": {"OldClass": "new.module.NewClass"}},
None,
"creates_virtual_module",
),
(
"wildcard_pattern",
{"timezone": {"*": "airflow.sdk.timezone"}},
None,
"creates_virtual_module",
),
(
"with_override",
{"old_module": {"OldClass": "new.module.NewClass"}},
{"old_module": {"OldClass": "override.module.OverrideClass"}},
"creates_virtual_module",
),
],
ids=["basic_class_mapping", "wildcard_pattern", "with_override"],
)
def test_virtual_module_creation(self, test_case, module_imports, override_classes, expected_behavior):
"""Test add_deprecated_classes creates virtual modules correctly."""
# Use unique package and module names to avoid conflicts
package_name = get_unique_module_name("test_package")
module_name = f"{package_name}.{next(iter(module_imports.keys()))}"
with temporary_module(module_name):
add_deprecated_classes(module_imports, package_name, override_classes)
# Check that the module was added to sys.modules
assert module_name in sys.modules
assert isinstance(sys.modules[module_name], ModuleType)
assert hasattr(sys.modules[module_name], "__getattr__")
def test_add_deprecated_classes_doesnt_override_existing(self):
"""Test that add_deprecated_classes doesn't override existing modules."""
module_name = get_unique_module_name("existing_module")
full_module_name = f"airflow.test.{module_name}"
# Create an existing module
existing_module = ModuleType(full_module_name)
existing_module.existing_attr = "existing_value"
sys.modules[full_module_name] = existing_module
with temporary_module(full_module_name):
# This should not override the existing module
add_deprecated_classes(
{module_name: {"NewClass": "new.module.NewClass"}},
package="airflow.test",
)
# The existing module should still be there
assert sys.modules[full_module_name] == existing_module
assert sys.modules[full_module_name].existing_attr == "existing_value"
@pytest.mark.parametrize(
(
"test_case",
"module_imports",
"attr_name",
"target_attr",
"expected_target_msg",
"override_classes",
),
[
(
"direct_imports",
{
"get_something": "target.module.get_something",
"another_attr": "target.module.another_attr",
},
"get_something",
"get_something",
"target.module.get_something",
None,
),
(
"with_wildcard",
{"specific_attr": "target.module.specific_attr", "*": "target.module"},
"any_attribute",
"any_attribute",
"target.module.any_attribute",
None,
),
(
"with_override",
{"get_something": "target.module.get_something"},
"get_something",
"get_something",
"override.module.OverrideClass",
{"get_something": "override.module.OverrideClass"},
),
],
ids=["direct_imports", "with_wildcard", "with_override"],
)
def test_current_module_deprecation(
self, test_case, module_imports, attr_name, target_attr, expected_target_msg, override_classes
):
"""Test add_deprecated_classes with current module (__name__ key) functionality."""
module_name = get_unique_module_name(f"{test_case}_module")
full_module_name = f"airflow.test.{module_name}"
# Create a module to modify
test_module = ModuleType(full_module_name)
sys.modules[full_module_name] = test_module
with temporary_module(full_module_name):
# Mock the target module and attribute
mock_target_module = mock.MagicMock()
mock_attribute = mock.MagicMock()
setattr(mock_target_module, target_attr, mock_attribute)
with mock.patch(
"airflow.utils.deprecation_tools.importlib.import_module", return_value=mock_target_module
):
# Prepare override parameter
override_param = {full_module_name: override_classes} if override_classes else None
add_deprecated_classes(
{full_module_name: module_imports},
package=full_module_name,
override_deprecated_classes=override_param,
)
# The module should now have a __getattr__ method
assert hasattr(test_module, "__getattr__")
# Test that accessing the deprecated attribute works
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
result = getattr(test_module, attr_name)
assert result == mock_attribute
assert len(w) == 1
assert issubclass(w[0].category, DeprecatedImportWarning)
assert f"{full_module_name}.{attr_name}" in str(w[0].message)
assert expected_target_msg in str(w[0].message)
def test_add_deprecated_classes_mixed_current_and_virtual_modules(self):
"""Test add_deprecated_classes with mixed current module and virtual module imports."""
base_module_name = get_unique_module_name("mixed_module")
full_module_name = f"airflow.test.{base_module_name}"
virtual_module_name = f"{base_module_name}_virtual"
full_virtual_module_name = f"{full_module_name}.{virtual_module_name}"
# Create a module to modify
test_module = ModuleType(full_module_name)
sys.modules[full_module_name] = test_module
with temporary_module(full_module_name), temporary_module(full_virtual_module_name):
# Mock the target modules and attributes
mock_current_module = mock.MagicMock()
mock_current_attr = mock.MagicMock()
mock_current_module.current_attr = mock_current_attr
mock_virtual_module = mock.MagicMock()
mock_virtual_attr = mock.MagicMock()
mock_virtual_module.VirtualClass = mock_virtual_attr
def mock_import_module(module_name):
if "current.module" in module_name:
return mock_current_module
if "virtual.module" in module_name:
return mock_virtual_module
raise ImportError(f"Module {module_name} not found")
with mock.patch(
"airflow.utils.deprecation_tools.importlib.import_module", side_effect=mock_import_module
):
add_deprecated_classes(
{
full_module_name: {
"current_attr": "current.module.current_attr",
},
virtual_module_name: {
"VirtualClass": "virtual.module.VirtualClass",
},
},
package=full_module_name,
)
# Test current module access
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
result = test_module.current_attr
assert result == mock_current_attr
assert len(w) == 1
assert issubclass(w[0].category, DeprecatedImportWarning)
assert f"{full_module_name}.current_attr" in str(w[0].message)
# Test virtual module access
virtual_module = sys.modules[full_virtual_module_name]
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
result = virtual_module.VirtualClass
assert result == mock_virtual_attr
assert len(w) == 1
assert issubclass(w[0].category, DeprecatedImportWarning)
assert f"{full_virtual_module_name}.VirtualClass" in str(w[0].message)
def test_add_deprecated_classes_current_module_not_in_sys_modules(self):
"""Test add_deprecated_classes raises error when current module not in sys.modules."""
nonexistent_module = "nonexistent.module.name"
with pytest.raises(ValueError, match=f"Module {nonexistent_module} not found in sys.modules"):
add_deprecated_classes(
{nonexistent_module: {"attr": "target.module.attr"}},
package=nonexistent_module,
)
def test_add_deprecated_classes_with_custom_message(self):
"""Test add_deprecated_classes with custom message parameter."""
module_name = get_unique_module_name("custom_msg_module")
full_module_name = f"airflow.test.{module_name}"
# Create a module to modify
test_module = ModuleType(full_module_name)
sys.modules[full_module_name] = test_module
with temporary_module(full_module_name):
# Mock the target module and attribute
mock_target_module = mock.MagicMock()
mock_attribute = mock.MagicMock()
mock_target_module.deprecated_attr = mock_attribute
with mock.patch(
"airflow.utils.deprecation_tools.importlib.import_module", return_value=mock_target_module
):
custom_message = "We are just going to remove {module}.{name}. Prepare yourselves!"
add_deprecated_classes(
{full_module_name: {"deprecated_attr": "target.module.deprecated_attr"}},
package=full_module_name,
message=custom_message,
)
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
result = getattr(test_module, "deprecated_attr")
assert result == mock_attribute
assert len(w) == 1
assert issubclass(w[0].category, DeprecatedImportWarning)
expected = (
f"We are just going to remove {full_module_name}.deprecated_attr. Prepare yourselves!"
)
assert str(w[0].message) == expected
assert "Please use" not in str(w[0].message)
def test_add_deprecated_classes_preserves_existing_module_attributes(self):
"""Test that add_deprecated_classes preserves existing module attributes."""
module_name = get_unique_module_name("preserve_module")
full_module_name = f"airflow.test.{module_name}"
# Create a module with existing attributes
test_module = ModuleType(full_module_name)
test_module.existing_attr = "existing_value"
test_module.existing_function = lambda: "existing_function_result"
sys.modules[full_module_name] = test_module
with temporary_module(full_module_name):
add_deprecated_classes(
{
full_module_name: {
"deprecated_attr": "target.module.deprecated_attr",
}
},
package=full_module_name,
)
# Existing attributes should still be accessible
assert test_module.existing_attr == "existing_value"
assert test_module.existing_function() == "existing_function_result"
# The module should have __getattr__ for deprecated attributes
assert hasattr(test_module, "__getattr__")
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/tests/unit/utils/test_deprecation_tools.py",
"license": "Apache License 2.0",
"lines": 461,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-core/src/airflow/api_fastapi/core_api/datamodels/hitl.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from collections.abc import Iterable, Mapping
from datetime import datetime
from typing import Any
from pydantic import Field, field_validator
from airflow.api_fastapi.core_api.base import BaseModel
from airflow.api_fastapi.core_api.datamodels.task_instance_history import TaskInstanceHistoryResponse
from airflow.api_fastapi.core_api.datamodels.task_instances import TaskInstanceResponse
class UpdateHITLDetailPayload(BaseModel):
"""Schema for updating the content of a Human-in-the-loop detail."""
chosen_options: list[str] = Field(min_length=1)
params_input: Mapping = Field(default_factory=dict)
class HITLDetailResponse(BaseModel):
"""Response of updating a Human-in-the-loop detail."""
responded_by: HITLUser
responded_at: datetime
chosen_options: list[str] = Field(min_length=1)
params_input: Mapping = Field(default_factory=dict)
class HITLUser(BaseModel):
"""Schema for a Human-in-the-loop users."""
id: str
name: str
class BaseHITLDetail(BaseModel):
"""The common part within HITLDetail and HITLDetailHistory."""
# User Request Detail
options: list[str] = Field(min_length=1)
subject: str
body: str | None = None
defaults: list[str] | None = None
multiple: bool = False
params: Mapping = Field(default_factory=dict)
assigned_users: list[HITLUser] = Field(default_factory=list)
created_at: datetime
# Response Content Detail
responded_by_user: HITLUser | None = None
responded_at: datetime | None = None
chosen_options: list[str] | None = None
params_input: dict[str, Any] = Field(default_factory=dict)
response_received: bool = False
@field_validator("params", mode="before")
@classmethod
def get_params(cls, params: dict[str, Any]) -> dict[str, Any]:
"""Convert params attribute to dict representation."""
return {
key: value
if BaseHITLDetail._is_param(value)
else {
"value": value,
"description": None,
"schema": {},
}
for key, value in params.items()
}
@staticmethod
def _is_param(value: Any) -> bool:
return isinstance(value, dict) and all(key in value for key in ("description", "schema", "value"))
class HITLDetail(BaseHITLDetail):
"""Schema for Human-in-the-loop detail."""
task_instance: TaskInstanceResponse
class HITLDetailCollection(BaseModel):
"""Schema for a collection of Human-in-the-loop details."""
hitl_details: Iterable[HITLDetail]
total_entries: int
class HITLDetailHistory(BaseHITLDetail):
"""Schema for Human-in-the-loop detail history."""
task_instance: TaskInstanceHistoryResponse
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/api_fastapi/core_api/datamodels/hitl.py",
"license": "Apache License 2.0",
"lines": 82,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:airflow-core/src/airflow/api_fastapi/core_api/routes/public/hitl.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import Annotated
import structlog
from fastapi import Depends, HTTPException, status
from sqlalchemy import select
from sqlalchemy.orm import joinedload
from airflow._shared.timezones import timezone
from airflow.api_fastapi.auth.managers.models.resource_details import DagAccessEntity
from airflow.api_fastapi.common.db.common import SessionDep, paginated_select
from airflow.api_fastapi.common.parameters import (
QueryHITLDetailBodySearch,
QueryHITLDetailDagIdPatternSearch,
QueryHITLDetailMapIndexFilter,
QueryHITLDetailRespondedUserIdFilter,
QueryHITLDetailRespondedUserNameFilter,
QueryHITLDetailResponseReceivedFilter,
QueryHITLDetailSubjectSearch,
QueryHITLDetailTaskIdFilter,
QueryHITLDetailTaskIdPatternSearch,
QueryLimit,
QueryOffset,
QueryTIStateFilter,
RangeFilter,
SortParam,
datetime_range_filter_factory,
)
from airflow.api_fastapi.common.router import AirflowRouter
from airflow.api_fastapi.core_api.datamodels.hitl import (
HITLDetail,
HITLDetailCollection,
HITLDetailHistory,
HITLDetailResponse,
UpdateHITLDetailPayload,
)
from airflow.api_fastapi.core_api.openapi.exceptions import create_openapi_http_exception_doc
from airflow.api_fastapi.core_api.security import (
GetUserDep,
ReadableTIFilterDep,
get_auth_manager,
requires_access_dag,
)
from airflow.api_fastapi.logging.decorators import action_logging
from airflow.models.base import Base
from airflow.models.dag_version import DagVersion
from airflow.models.dagrun import DagRun
from airflow.models.hitl import HITLDetail as HITLDetailModel, HITLUser
from airflow.models.taskinstance import TaskInstance as TI
from airflow.models.taskinstancehistory import TaskInstanceHistory as TIH
task_instances_hitl_router = AirflowRouter(
tags=["Task Instance"],
prefix="/dags/{dag_id}/dagRuns/{dag_run_id}",
)
task_instance_hitl_path = "/taskInstances/{task_id}/{map_index}/hitlDetails"
log = structlog.get_logger(__name__)
def _get_task_instance_with_hitl_detail(
dag_id: str,
dag_run_id: str,
task_id: str,
session: SessionDep,
map_index: int,
try_number: int | None = None,
) -> TI | TIH:
def _query(orm_object: Base) -> TI | TIH | None:
query = (
select(orm_object)
.where(
orm_object.dag_id == dag_id,
orm_object.run_id == dag_run_id,
orm_object.task_id == task_id,
orm_object.map_index == map_index,
)
.options(joinedload(orm_object.hitl_detail))
)
if try_number is not None:
query = query.where(orm_object.try_number == try_number)
ti_or_tih = session.scalar(query)
return ti_or_tih
if try_number is None:
ti_or_tih = _query(TI)
else:
ti_or_tih = _query(TIH) or _query(TI)
if ti_or_tih is None:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=(
f"The Task Instance with dag_id: `{dag_id}`, run_id: `{dag_run_id}`, "
f"task_id: `{task_id}` and map_index: `{map_index}` was not found"
),
)
if not ti_or_tih.hitl_detail:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Human-in-the-loop detail does not exist for Task Instance with id {ti_or_tih.id}",
)
return ti_or_tih
@task_instances_hitl_router.patch(
task_instance_hitl_path,
responses=create_openapi_http_exception_doc(
[
status.HTTP_403_FORBIDDEN,
status.HTTP_404_NOT_FOUND,
status.HTTP_409_CONFLICT,
]
),
dependencies=[
Depends(requires_access_dag(method="PUT", access_entity=DagAccessEntity.HITL_DETAIL)),
Depends(action_logging()),
],
)
def update_hitl_detail(
dag_id: str,
dag_run_id: str,
task_id: str,
update_hitl_detail_payload: UpdateHITLDetailPayload,
user: GetUserDep,
session: SessionDep,
map_index: int = -1,
) -> HITLDetailResponse:
"""Update a Human-in-the-loop detail."""
task_instance = _get_task_instance_with_hitl_detail(
dag_id=dag_id,
dag_run_id=dag_run_id,
task_id=task_id,
session=session,
map_index=map_index,
)
hitl_detail_model = task_instance.hitl_detail
if hitl_detail_model.response_received:
raise HTTPException(
status_code=status.HTTP_409_CONFLICT,
detail=(
f"Human-in-the-loop detail has already been updated for Task Instance with id {task_instance.id} "
"and is not allowed to write again."
),
)
user_id = user.get_id()
user_name = user.get_name()
if isinstance(user_id, int):
# FabAuthManager (ab_user) store user id as integer, but common interface is string type
user_id = str(user_id)
hitl_user = HITLUser(id=user_id, name=user_name)
if hitl_detail_model.assigned_users:
# Convert assigned_users list to set of user IDs for authorization check
assigned_user_ids = {assigned_user["id"] for assigned_user in hitl_detail_model.assigned_users}
if not get_auth_manager().is_authorized_hitl_task(assigned_users=assigned_user_ids, user=user):
log.error("User=%s (id=%s) is not a respondent for the task", user_name, user_id)
raise HTTPException(
status.HTTP_403_FORBIDDEN,
f"User={user_name} (id={user_id}) is not a respondent for the task.",
)
hitl_detail_model.responded_by = hitl_user
hitl_detail_model.responded_at = timezone.utcnow()
hitl_detail_model.chosen_options = update_hitl_detail_payload.chosen_options
hitl_detail_model.params_input = update_hitl_detail_payload.params_input
session.add(hitl_detail_model)
session.commit()
return HITLDetailResponse.model_validate(hitl_detail_model)
@task_instances_hitl_router.get(
task_instance_hitl_path,
status_code=status.HTTP_200_OK,
responses=create_openapi_http_exception_doc([status.HTTP_404_NOT_FOUND]),
dependencies=[Depends(requires_access_dag(method="GET", access_entity=DagAccessEntity.HITL_DETAIL))],
)
def get_hitl_detail(
dag_id: str,
dag_run_id: str,
task_id: str,
session: SessionDep,
map_index: int = -1,
) -> HITLDetail:
"""Get a Human-in-the-loop detail of a specific task instance."""
task_instance = _get_task_instance_with_hitl_detail(
dag_id=dag_id,
dag_run_id=dag_run_id,
task_id=task_id,
session=session,
map_index=map_index,
try_number=None,
)
return task_instance.hitl_detail
@task_instances_hitl_router.get(
task_instance_hitl_path + "/tries/{try_number}",
status_code=status.HTTP_200_OK,
responses=create_openapi_http_exception_doc([status.HTTP_404_NOT_FOUND]),
dependencies=[Depends(requires_access_dag(method="GET", access_entity=DagAccessEntity.HITL_DETAIL))],
)
def get_hitl_detail_try_detail(
dag_id: str,
dag_run_id: str,
task_id: str,
session: SessionDep,
map_index: int = -1,
try_number: int | None = None,
) -> HITLDetailHistory:
"""Get a Human-in-the-loop detail of a specific task instance."""
task_instance_history = _get_task_instance_with_hitl_detail(
dag_id=dag_id,
dag_run_id=dag_run_id,
task_id=task_id,
session=session,
map_index=map_index,
try_number=try_number,
)
return task_instance_history.hitl_detail
@task_instances_hitl_router.get(
"/hitlDetails",
status_code=status.HTTP_200_OK,
dependencies=[Depends(requires_access_dag(method="GET", access_entity=DagAccessEntity.HITL_DETAIL))],
)
def get_hitl_details(
dag_id: str,
dag_run_id: str,
limit: QueryLimit,
offset: QueryOffset,
order_by: Annotated[
SortParam,
Depends(
SortParam(
allowed_attrs=[
"ti_id",
"subject",
"responded_at",
"created_at",
"responded_by_user_id",
"responded_by_user_name",
],
model=HITLDetailModel,
to_replace={
"dag_id": TI.dag_id,
"run_id": TI.run_id,
"task_display_name": TI.task_display_name,
"run_after": DagRun.run_after,
"rendered_map_index": TI.rendered_map_index,
"task_instance_operator": TI.operator,
"task_instance_state": TI.state,
},
).dynamic_depends(),
),
],
session: SessionDep,
# permission filter
readable_ti_filter: ReadableTIFilterDep,
# ti related filter
dag_id_pattern: QueryHITLDetailDagIdPatternSearch,
task_id: QueryHITLDetailTaskIdFilter,
task_id_pattern: QueryHITLDetailTaskIdPatternSearch,
map_index: QueryHITLDetailMapIndexFilter,
ti_state: QueryTIStateFilter,
# hitl detail related filter
response_received: QueryHITLDetailResponseReceivedFilter,
responded_by_user_id: QueryHITLDetailRespondedUserIdFilter,
responded_by_user_name: QueryHITLDetailRespondedUserNameFilter,
subject_patten: QueryHITLDetailSubjectSearch,
body_patten: QueryHITLDetailBodySearch,
created_at: Annotated[RangeFilter, Depends(datetime_range_filter_factory("created_at", HITLDetailModel))],
) -> HITLDetailCollection:
"""Get Human-in-the-loop details."""
query = (
select(HITLDetailModel)
.join(TI, HITLDetailModel.ti_id == TI.id)
.join(TI.dag_run)
.options(
joinedload(HITLDetailModel.task_instance).options(
joinedload(TI.dag_run).joinedload(DagRun.dag_model),
joinedload(TI.task_instance_note),
joinedload(TI.dag_version).joinedload(DagVersion.bundle),
),
)
)
if dag_id != "~":
query = query.where(TI.dag_id == dag_id)
if dag_run_id != "~":
query = query.where(TI.run_id == dag_run_id)
hitl_detail_select, total_entries = paginated_select(
statement=query,
filters=[
# permission filter
readable_ti_filter,
# ti related filter
dag_id_pattern,
task_id,
task_id_pattern,
map_index,
ti_state,
# hitl detail related filter
response_received,
responded_by_user_id,
responded_by_user_name,
subject_patten,
body_patten,
created_at,
],
offset=offset,
limit=limit,
order_by=order_by,
session=session,
)
hitl_details = session.scalars(hitl_detail_select)
return HITLDetailCollection(
hitl_details=hitl_details,
total_entries=total_entries,
)
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/api_fastapi/core_api/routes/public/hitl.py",
"license": "Apache License 2.0",
"lines": 318,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:airflow-core/src/airflow/api_fastapi/execution_api/datamodels/hitl.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from collections.abc import Iterable
from typing import Any
from uuid import UUID
from pydantic import Field
from airflow.api_fastapi.common.types import UtcDateTime
from airflow.api_fastapi.core_api.base import BaseModel
from airflow.models.hitl import HITLDetail
class HITLUser(BaseModel):
"""Schema for a Human-in-the-loop users."""
id: str
name: str
class HITLDetailRequest(BaseModel):
"""Schema for the request part of a Human-in-the-loop detail for a specific task instance."""
ti_id: UUID
options: list[str] = Field(min_length=1)
subject: str
body: str | None = None
defaults: list[str] | None = None
multiple: bool = False
params: dict[str, Any] = Field(default_factory=dict)
assigned_users: list[HITLUser] = Field(default_factory=list)
class UpdateHITLDetailPayload(BaseModel):
"""Schema for writing the response part of a Human-in-the-loop detail for a specific task instance."""
ti_id: UUID
chosen_options: list[str] = Field(min_length=1)
params_input: dict[str, Any] = Field(default_factory=dict)
class HITLDetailResponse(BaseModel):
"""Schema for the response part of a Human-in-the-loop detail for a specific task instance."""
response_received: bool
responded_by_user: HITLUser | None = None
responded_at: UtcDateTime | None
# It's empty if the user has not yet responded.
chosen_options: Iterable[str] | None
params_input: dict[str, Any] = Field(default_factory=dict)
@classmethod
def from_hitl_detail_orm(cls, hitl_detail: HITLDetail) -> HITLDetailResponse:
hitl_user = (
HITLUser(
id=hitl_detail.responded_by_user_id,
name=hitl_detail.responded_by_user_name,
)
if hitl_detail.responded_by_user
else None
)
return HITLDetailResponse(
response_received=hitl_detail.response_received,
responded_at=hitl_detail.responded_at,
responded_by_user=hitl_user,
chosen_options=hitl_detail.chosen_options or (),
params_input=hitl_detail.params_input or {},
)
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/api_fastapi/execution_api/datamodels/hitl.py",
"license": "Apache License 2.0",
"lines": 68,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:airflow-core/src/airflow/api_fastapi/execution_api/routes/hitl.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from uuid import UUID
import structlog
from fastapi import APIRouter, HTTPException, status
from sqlalchemy import select
from airflow._shared.timezones import timezone
from airflow.api_fastapi.common.db.common import SessionDep
from airflow.api_fastapi.execution_api.datamodels.hitl import (
HITLDetailRequest,
HITLDetailResponse,
UpdateHITLDetailPayload,
)
from airflow.models.hitl import HITLDetail
router = APIRouter()
log = structlog.get_logger(__name__)
@router.post(
"/{task_instance_id}",
status_code=status.HTTP_201_CREATED,
)
def upsert_hitl_detail(
task_instance_id: UUID,
payload: HITLDetailRequest,
session: SessionDep,
) -> HITLDetailRequest:
"""
Create a Human-in-the-loop detail for a specific Task Instance.
There're 3 cases handled here.
1. If a HITLOperator task instance does not have a HITLDetail,
a new HITLDetail is created without a response section.
2. If a HITLOperator task instance has a HITLDetail but lacks a response,
the existing HITLDetail is returned.
This situation occurs when a task instance is cleared before a response is received.
3. If a HITLOperator task instance has both a HITLDetail and a response section,
the existing response is removed, and the HITLDetail is returned.
This happens when a task instance is cleared after a response has been received.
This design ensures that each task instance has only one HITLDetail.
"""
hitl_detail_model = session.scalar(select(HITLDetail).where(HITLDetail.ti_id == task_instance_id))
if not hitl_detail_model:
hitl_detail_model = HITLDetail(
ti_id=task_instance_id,
options=payload.options,
subject=payload.subject,
body=payload.body,
defaults=payload.defaults,
multiple=payload.multiple,
params=payload.params,
assignees=[user.model_dump() for user in payload.assigned_users],
)
session.add(hitl_detail_model)
elif hitl_detail_model.response_received:
# Cleanup the response part of HITLDetail as we only store one response for one task instance.
# It normally happens after retry, we keep only the latest response.
hitl_detail_model.responded_by = None
hitl_detail_model.responded_at = None
hitl_detail_model.chosen_options = None
hitl_detail_model.params_input = {}
session.add(hitl_detail_model)
return HITLDetailRequest.model_validate(hitl_detail_model)
def _check_hitl_detail_exists(hitl_detail_model: HITLDetail | None) -> HITLDetail:
if not hitl_detail_model:
raise HTTPException(
status.HTTP_404_NOT_FOUND,
detail={
"reason": "not_found",
"message": (
"HITLDetail not found. "
"This happens most likely due to clearing task instance before receiving response."
),
},
)
return hitl_detail_model
@router.patch("/{task_instance_id}")
def update_hitl_detail(
task_instance_id: UUID,
payload: UpdateHITLDetailPayload,
session: SessionDep,
) -> HITLDetailResponse:
"""Update the response part of a Human-in-the-loop detail for a specific Task Instance."""
hitl_detail_model_result = session.execute(
select(HITLDetail).where(HITLDetail.ti_id == task_instance_id)
).scalar()
hitl_detail_model = _check_hitl_detail_exists(hitl_detail_model_result)
if hitl_detail_model.response_received:
raise HTTPException(
status.HTTP_409_CONFLICT,
f"Human-in-the-loop detail for Task Instance with id {task_instance_id} already exists.",
)
hitl_detail_model.responded_by = None
hitl_detail_model.responded_at = timezone.utcnow()
hitl_detail_model.chosen_options = payload.chosen_options
hitl_detail_model.params_input = payload.params_input
session.add(hitl_detail_model)
session.commit()
return HITLDetailResponse.from_hitl_detail_orm(hitl_detail_model)
@router.get(
"/{task_instance_id}",
status_code=status.HTTP_200_OK,
)
def get_hitl_detail(
task_instance_id: UUID,
session: SessionDep,
) -> HITLDetailResponse:
"""Get Human-in-the-loop detail for a specific Task Instance."""
hitl_detail_model_result = session.execute(
select(HITLDetail).where(HITLDetail.ti_id == task_instance_id),
).scalar()
hitl_detail_model = _check_hitl_detail_exists(hitl_detail_model_result)
return HITLDetailResponse.from_hitl_detail_orm(hitl_detail_model)
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/api_fastapi/execution_api/routes/hitl.py",
"license": "Apache License 2.0",
"lines": 125,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:airflow-core/src/airflow/models/hitl.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from datetime import datetime
from typing import TYPE_CHECKING, Any, TypedDict
from uuid import UUID
import sqlalchemy as sa
from sqlalchemy import Boolean, ForeignKeyConstraint, String, Text, Uuid, func, literal
from sqlalchemy.ext.compiler import compiles
from sqlalchemy.ext.hybrid import hybrid_property
from sqlalchemy.orm import Mapped, mapped_column, relationship
from sqlalchemy.sql.functions import FunctionElement
from airflow._shared.timezones import timezone
from airflow.models.base import Base
from airflow.utils.sqlalchemy import UtcDateTime
if TYPE_CHECKING:
from sqlalchemy.sql import ColumnElement
from sqlalchemy.sql.compiler import SQLCompiler
class JSONExtract(FunctionElement):
"""
Cross-dialect JSON key extractor.
:meta: private
"""
type = String()
inherit_cache = True
def __init__(self, column: ColumnElement[Any], key: str, **kwargs: dict[str, Any]) -> None:
super().__init__(column, literal(key), **kwargs)
@compiles(JSONExtract, "postgresql")
def compile_postgres(element: JSONExtract, compiler: SQLCompiler, **kwargs: dict[str, Any]) -> str:
"""
Compile JSONExtract for PostgreSQL.
:meta: private
"""
column, key = element.clauses
return compiler.process(func.json_extract_path_text(column, key), **kwargs)
@compiles(JSONExtract, "sqlite")
@compiles(JSONExtract, "mysql")
def compile_sqlite_mysql(element: JSONExtract, compiler: SQLCompiler, **kwargs: dict[str, Any]) -> str:
"""
Compile JSONExtract for SQLite/MySQL.
:meta: private
"""
column, key = element.clauses
return compiler.process(func.json_extract(column, f"$.{key.value}"), **kwargs)
class HITLUser(TypedDict):
"""Typed dict for saving a Human-in-the-loop user information."""
id: str
name: str
class HITLDetailPropertyMixin:
"""The property part of HITLDetail and HITLDetailHistory."""
responded_at: datetime | None
responded_by: dict[str, Any] | None
assignees: list[dict[str, str]] | None
@hybrid_property
def response_received(self) -> bool:
return self.responded_at is not None
@response_received.expression # type: ignore[no-redef]
def response_received(cls):
return cls.responded_at.is_not(None)
@hybrid_property
def responded_by_user_id(self) -> str | None:
return self.responded_by["id"] if self.responded_by else None
@responded_by_user_id.expression # type: ignore[no-redef]
def responded_by_user_id(cls):
return JSONExtract(cls.responded_by, "id")
@hybrid_property
def responded_by_user_name(self) -> str | None:
return self.responded_by["name"] if self.responded_by else None
@responded_by_user_name.expression # type: ignore[no-redef]
def responded_by_user_name(cls):
return JSONExtract(cls.responded_by, "name")
@hybrid_property
def assigned_users(self) -> list[HITLUser]:
if not self.assignees:
return []
return [
HITLUser(
id=assignee["id"],
name=assignee["name"],
)
for assignee in self.assignees
]
@hybrid_property
def responded_by_user(self) -> HITLUser | None:
if self.responded_by is None:
return None
return HITLUser(
id=self.responded_by["id"],
name=self.responded_by["name"],
)
class HITLDetail(Base, HITLDetailPropertyMixin):
"""Human-in-the-loop request and corresponding response."""
__tablename__ = "hitl_detail"
ti_id: Mapped[UUID] = mapped_column(
Uuid(),
primary_key=True,
nullable=False,
)
# User Request Detail
options: Mapped[dict] = mapped_column(sa.JSON(), nullable=False)
subject: Mapped[str] = mapped_column(Text, nullable=False)
body: Mapped[str | None] = mapped_column(Text, nullable=True)
defaults: Mapped[dict | None] = mapped_column(sa.JSON(), nullable=True)
multiple: Mapped[bool | None] = mapped_column(Boolean, unique=False, default=False, nullable=True)
params: Mapped[dict] = mapped_column(sa.JSON(), nullable=False, default={})
assignees: Mapped[list[dict[str, str]] | None] = mapped_column(sa.JSON(), nullable=True)
created_at: Mapped[datetime] = mapped_column(UtcDateTime, default=timezone.utcnow, nullable=False)
# Response Content Detail
responded_at: Mapped[datetime | None] = mapped_column(UtcDateTime, nullable=True)
responded_by: Mapped[dict | None] = mapped_column(sa.JSON(), nullable=True)
chosen_options: Mapped[list[str] | None] = mapped_column(
sa.JSON(),
nullable=True,
default=None,
)
params_input: Mapped[dict] = mapped_column(sa.JSON(), nullable=False, default={})
task_instance = relationship(
"TaskInstance",
lazy="joined",
back_populates="hitl_detail",
)
__table_args__ = (
ForeignKeyConstraint(
(ti_id,),
["task_instance.id"],
name="hitl_detail_ti_fkey",
ondelete="CASCADE",
onupdate="CASCADE",
),
)
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/models/hitl.py",
"license": "Apache License 2.0",
"lines": 144,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_hitl.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import json
from collections.abc import Callable
from datetime import datetime, timedelta
from operator import itemgetter
from typing import TYPE_CHECKING, Any
from unittest import mock
import pytest
import time_machine
from sqlalchemy import delete, select
from sqlalchemy.orm import Session
from airflow._shared.timezones.timezone import utc, utcnow
from airflow.models.hitl import HITLDetail
from airflow.models.log import Log
from airflow.sdk.execution_time.hitl import HITLUser
from airflow.utils.state import TaskInstanceState
from tests_common.test_utils.asserts import assert_queries_count
from tests_common.test_utils.format_datetime import from_datetime_to_zulu_without_ms
if TYPE_CHECKING:
from fastapi.testclient import TestClient
from airflow.models.taskinstance import TaskInstance
from tests_common.pytest_plugin import CreateTaskInstance
pytestmark = pytest.mark.db_test
DAG_ID = "test_hitl_dag"
ANOTHER_DAG_ID = "another_hitl_dag"
TASK_ID = "sample_task_hitl"
DEFAULT_CREATED_AT = datetime(2025, 9, 15, 13, 0, 0, tzinfo=utc)
ANOTHER_CREATED_AT = datetime(2025, 9, 16, 12, 0, 0, tzinfo=utc)
@pytest.fixture
def sample_ti(
create_task_instance: CreateTaskInstance,
session: Session,
) -> TaskInstance:
ti = create_task_instance(
dag_id=DAG_ID,
task_id=TASK_ID,
session=session,
)
session.commit()
return ti
@pytest.fixture
def sample_ti_url_identifier() -> str:
return f"/dags/{DAG_ID}/dagRuns/test/taskInstances/{TASK_ID}/-1"
@pytest.fixture
def sample_hitl_detail(sample_ti: TaskInstance, session: Session) -> HITLDetail:
hitl_detail_model = HITLDetail(
ti_id=sample_ti.id,
options=["Approve", "Reject"],
subject="This is subject",
body="this is body",
defaults=["Approve"],
multiple=False,
params={"input_1": 1},
assignees=None,
)
session.add(hitl_detail_model)
session.commit()
return hitl_detail_model
@pytest.fixture
def sample_hitl_detail_non_respondent(sample_ti: TaskInstance, session: Session) -> HITLDetail:
hitl_detail_model = HITLDetail(
ti_id=sample_ti.id,
options=["Approve", "Reject"],
subject="This is subject",
body="this is body",
defaults=["Approve"],
multiple=False,
params={"input_1": 1},
assignees=[HITLUser(id="non_test", name="non_test")],
)
session.add(hitl_detail_model)
session.commit()
return hitl_detail_model
@pytest.fixture
def sample_hitl_detail_respondent(sample_ti: TaskInstance, session: Session) -> HITLDetail:
hitl_detail_model = HITLDetail(
ti_id=sample_ti.id,
options=["Approve", "Reject"],
subject="This is subject",
body="this is body",
defaults=["Approve"],
multiple=False,
params={"input_1": 1},
assignees=[HITLUser(id="test", name="test")],
)
session.add(hitl_detail_model)
session.commit()
return hitl_detail_model
@pytest.fixture
def sample_tis(create_task_instance: CreateTaskInstance) -> list[TaskInstance]:
tis = [
create_task_instance(
dag_id=f"hitl_dag_{i}",
run_id=f"hitl_run_{i}",
task_id=f"hitl_task_{i}",
state=TaskInstanceState.DEFERRED,
)
for i in range(5)
]
tis.extend(
[
create_task_instance(
dag_id=f"other_Dag_{i}",
run_id=f"another_hitl_run_{i}",
task_id=f"another_hitl_task_{i}",
state=TaskInstanceState.SUCCESS,
)
for i in range(3)
]
)
return tis
@pytest.fixture
def sample_hitl_details(sample_tis: list[TaskInstance], session: Session) -> list[HITLDetail]:
hitl_detail_models = [
HITLDetail(
ti_id=ti.id,
options=["Approve", "Reject"],
subject=f"This is subject {i}",
body=f"this is body {i}",
defaults=["Approve"],
multiple=False,
params={"input_1": 1},
created_at=DEFAULT_CREATED_AT,
)
for i, ti in enumerate(sample_tis[:5])
]
hitl_detail_models.extend(
[
HITLDetail(
ti_id=ti.id,
options=["1", "2", "3"],
subject=f"Subject {i} this is",
body=f"Body {i} this is",
defaults=["1"],
multiple=False,
params={"input": 1},
responded_at=utcnow(),
chosen_options=[str(i)],
params_input={"input": i},
responded_by={"id": "test", "name": "test"},
created_at=ANOTHER_CREATED_AT,
)
for i, ti in enumerate(sample_tis[5:])
]
)
session.add_all(hitl_detail_models)
session.commit()
return hitl_detail_models
expected_ti_not_found_error_msg = (
f"The Task Instance with dag_id: `{DAG_ID}`,"
f" run_id: `test`, task_id: `{TASK_ID}` and map_index: `-1` was not found"
)
@pytest.fixture
def expected_hitl_detail_not_found_error_msg(sample_ti: TaskInstance) -> str:
if TYPE_CHECKING:
assert sample_ti.task
return f"Human-in-the-loop detail does not exist for Task Instance with id {sample_ti.id}"
@pytest.fixture
def expected_sample_hitl_detail_dict(sample_ti: TaskInstance) -> dict[str, Any]:
return {
"body": "this is body",
"defaults": ["Approve"],
"multiple": False,
"options": ["Approve", "Reject"],
"params": {"input_1": {"value": 1, "schema": {}, "description": None}},
"assigned_users": [],
"created_at": mock.ANY,
"params_input": {},
"responded_at": None,
"chosen_options": None,
"response_received": False,
"subject": "This is subject",
"responded_by_user": None,
"task_instance": {
"dag_display_name": DAG_ID,
"dag_id": DAG_ID,
"dag_run_id": "test",
"dag_version": {
"bundle_name": "dag_maker",
"bundle_url": None,
"bundle_version": None,
"created_at": mock.ANY,
"dag_display_name": DAG_ID,
"dag_id": DAG_ID,
"id": mock.ANY,
"version_number": 1,
},
"duration": None,
"end_date": None,
"executor": None,
"executor_config": "{}",
"hostname": "",
"id": str(sample_ti.id),
"logical_date": mock.ANY,
"map_index": -1,
"max_tries": 0,
"note": None,
"operator": "EmptyOperator",
"operator_name": "EmptyOperator",
"pid": None,
"pool": "default_pool",
"pool_slots": 1,
"priority_weight": 1,
"queue": "default",
"queued_when": None,
"rendered_fields": {},
"rendered_map_index": None,
"run_after": mock.ANY,
"scheduled_when": None,
"start_date": None,
"state": None,
"task_display_name": "sample_task_hitl",
"task_id": TASK_ID,
"trigger": None,
"triggerer_job": None,
"try_number": 0,
"unixname": "root",
},
}
@pytest.fixture(autouse=True)
def cleanup_audit_log(session: Session) -> None:
session.execute(delete(Log))
session.commit()
def _assert_sample_audit_log(audit_log: Log) -> None:
assert audit_log.dag_id == DAG_ID
assert audit_log.task_id == TASK_ID
assert audit_log.run_id == "test"
assert audit_log.try_number is None
assert audit_log.owner == "test"
assert audit_log.owner_display_name == "test"
assert audit_log.event == "update_hitl_detail"
if TYPE_CHECKING:
assert isinstance(audit_log.extra, str)
expected_extra = {
"chosen_options": ["Approve"],
"params_input": {"input_1": 2},
"method": "PATCH",
"map_index": "-1",
}
assert json.loads(audit_log.extra) == expected_extra
@pytest.fixture
def sample_update_payload() -> dict[str, Any]:
return {"chosen_options": ["Approve"], "params_input": {"input_1": 2}}
class TestUpdateHITLDetailEndpoint:
@time_machine.travel(datetime(2025, 7, 3, 0, 0, 0), tick=False)
@pytest.mark.usefixtures("sample_hitl_detail")
def test_should_respond_200_with_existing_response(
self,
test_client: TestClient,
sample_ti_url_identifier: str,
sample_update_payload: dict[str, Any],
session: Session,
) -> None:
response = test_client.patch(
f"{sample_ti_url_identifier}/hitlDetails",
json=sample_update_payload,
)
assert response.status_code == 200
assert response.json() == {
"params_input": {"input_1": 2},
"chosen_options": ["Approve"],
"responded_by": {"id": "test", "name": "test"},
"responded_at": "2025-07-03T00:00:00Z",
}
audit_log = session.scalar(select(Log))
assert audit_log is not None
_assert_sample_audit_log(audit_log)
@time_machine.travel(datetime(2025, 7, 3, 0, 0, 0), tick=False)
@pytest.mark.usefixtures("sample_hitl_detail_respondent")
def test_should_respond_200_to_assigned_users(
self,
test_client: TestClient,
sample_ti_url_identifier: str,
sample_update_payload: dict[str, Any],
session: Session,
):
"""Test with an authorized user and the user is a respondent to the task."""
response = test_client.patch(
f"{sample_ti_url_identifier}/hitlDetails",
json=sample_update_payload,
)
assert response.status_code == 200
assert response.json() == {
"params_input": {"input_1": 2},
"chosen_options": ["Approve"],
"responded_by": {"id": "test", "name": "test"},
"responded_at": "2025-07-03T00:00:00Z",
}
audit_log = session.scalar(select(Log))
assert audit_log is not None
_assert_sample_audit_log(audit_log)
def test_should_respond_401(
self,
unauthenticated_test_client: TestClient,
sample_ti_url_identifier: str,
sample_update_payload: dict[str, Any],
) -> None:
response = unauthenticated_test_client.patch(
f"{sample_ti_url_identifier}/hitlDetails",
json=sample_update_payload,
)
assert response.status_code == 401
def test_should_respond_403(
self,
unauthorized_test_client: TestClient,
sample_ti_url_identifier: str,
sample_update_payload: dict[str, Any],
) -> None:
response = unauthorized_test_client.patch(
f"{sample_ti_url_identifier}/hitlDetails",
json=sample_update_payload,
)
assert response.status_code == 403
@time_machine.travel(datetime(2025, 7, 3, 0, 0, 0), tick=False)
@pytest.mark.usefixtures("sample_hitl_detail_non_respondent")
def test_should_respond_403_to_non_respondent_user(
self,
test_client: TestClient,
sample_ti_url_identifier: str,
sample_update_payload: dict[str, Any],
):
"""Test with an authorized user but the user is not a respondent to the task."""
response = test_client.patch(
f"{sample_ti_url_identifier}/hitlDetails",
json=sample_update_payload,
)
assert response.status_code == 403
def test_should_respond_404_without_ti(
self,
test_client: TestClient,
sample_ti_url_identifier: str,
) -> None:
response = test_client.patch(
f"{sample_ti_url_identifier}/hitlDetails",
json={"chosen_options": ["Approve"], "params_input": {"input_1": 2}},
)
assert response.status_code == 404
assert response.json() == {"detail": expected_ti_not_found_error_msg}
def test_should_respond_404_without_hitl_detail(
self,
test_client: TestClient,
sample_ti_url_identifier: str,
sample_update_payload: dict[str, Any],
expected_hitl_detail_not_found_error_msg: str,
) -> None:
response = test_client.patch(
f"{sample_ti_url_identifier}/hitlDetails",
json=sample_update_payload,
)
assert response.status_code == 404
assert response.json() == {"detail": expected_hitl_detail_not_found_error_msg}
@time_machine.travel(datetime(2025, 7, 3, 0, 0, 0), tick=False)
@pytest.mark.usefixtures("sample_hitl_detail")
def test_should_respond_409(
self,
test_client: TestClient,
sample_ti_url_identifier: str,
sample_ti: TaskInstance,
) -> None:
response = test_client.patch(
f"{sample_ti_url_identifier}/hitlDetails",
json={"chosen_options": ["Approve"], "params_input": {"input_1": 2}},
)
expected_response = {
"params_input": {"input_1": 2},
"chosen_options": ["Approve"],
"responded_by": {"id": "test", "name": "test"},
"responded_at": "2025-07-03T00:00:00Z",
}
assert response.status_code == 200
assert response.json() == expected_response
response = test_client.patch(
f"{sample_ti_url_identifier}/hitlDetails",
json={"chosen_options": ["Approve"], "params_input": {"input_1": 3}},
)
assert response.status_code == 409
assert response.json() == {
"detail": (
"Human-in-the-loop detail has already been updated for Task Instance "
f"with id {sample_ti.id} "
"and is not allowed to write again."
)
}
@pytest.mark.usefixtures("sample_hitl_detail")
def test_should_respond_422_with_empty_option(
self,
test_client: TestClient,
sample_ti_url_identifier: str,
) -> None:
response = test_client.patch(
f"{sample_ti_url_identifier}/hitlDetails",
json={"chosen_options": [], "params_input": {"input_1": 2}},
)
assert response.status_code == 422
class TestGetHITLDetailEndpoint:
@pytest.mark.usefixtures("sample_hitl_detail")
def test_should_respond_200_with_existing_response(
self,
test_client: TestClient,
sample_ti_url_identifier: str,
expected_sample_hitl_detail_dict: dict[str, Any],
) -> None:
response = test_client.get(f"{sample_ti_url_identifier}/hitlDetails")
assert response.status_code == 200
assert response.json() == expected_sample_hitl_detail_dict
def test_should_respond_401(
self,
unauthenticated_test_client: TestClient,
sample_ti_url_identifier: str,
) -> None:
response = unauthenticated_test_client.get(f"{sample_ti_url_identifier}/hitlDetails")
assert response.status_code == 401
def test_should_respond_403(
self,
unauthorized_test_client: TestClient,
sample_ti_url_identifier: str,
) -> None:
response = unauthorized_test_client.get(f"{sample_ti_url_identifier}/hitlDetails")
assert response.status_code == 403
def test_should_respond_404_without_ti(
self,
test_client: TestClient,
sample_ti_url_identifier: str,
) -> None:
response = test_client.get(f"{sample_ti_url_identifier}/hitlDetails")
assert response.status_code == 404
assert response.json() == {"detail": expected_ti_not_found_error_msg}
def test_should_respond_404_without_hitl_detail(
self,
test_client: TestClient,
sample_ti_url_identifier: str,
expected_hitl_detail_not_found_error_msg: str,
) -> None:
response = test_client.get(f"{sample_ti_url_identifier}/hitlDetails")
assert response.status_code == 404
assert response.json() == {"detail": expected_hitl_detail_not_found_error_msg}
class TestGetHITLDetailsEndpoint:
@pytest.mark.usefixtures("sample_hitl_detail")
def test_should_respond_200_with_existing_response(
self,
test_client: TestClient,
expected_sample_hitl_detail_dict: dict[str, Any],
) -> None:
with assert_queries_count(3):
response = test_client.get("/dags/~/dagRuns/~/hitlDetails")
assert response.status_code == 200
assert response.json() == {
"hitl_details": [expected_sample_hitl_detail_dict],
"total_entries": 1,
}
@pytest.mark.usefixtures("sample_hitl_details")
@pytest.mark.parametrize(
("params", "expected_ti_count"),
[
# ti related filter
({"dag_id_pattern": "hitl_dag"}, 5),
({"dag_id_pattern": "other_Dag_"}, 3),
({"task_id": "hitl_task_0"}, 1),
({"task_id_pattern": "another_hitl"}, 3),
({"map_index": -1}, 8),
({"map_index": 1}, 0),
({"state": "deferred"}, 5),
({"state": "success"}, 3),
# hitl detail related filter
({"subject_search": "This is subject"}, 5),
({"body_search": "this is"}, 8),
({"response_received": False}, 5),
({"response_received": True}, 3),
({"responded_by_user_id": ["test"]}, 3),
({"responded_by_user_name": ["test"]}, 3),
(
{"created_at_gte": from_datetime_to_zulu_without_ms(DEFAULT_CREATED_AT + timedelta(days=1))},
0,
),
(
{"created_at_lte": from_datetime_to_zulu_without_ms(DEFAULT_CREATED_AT - timedelta(days=1))},
0,
),
(
{
"created_at_gte": from_datetime_to_zulu_without_ms(DEFAULT_CREATED_AT),
"created_at_lte": from_datetime_to_zulu_without_ms(DEFAULT_CREATED_AT),
},
5,
),
],
ids=[
"dag_id_pattern_hitl_dag",
"dag_id_pattern_other_dag",
"task_id",
"task_id_pattern",
"map_index_none",
"map_index_1",
"ti_state_deferred",
"ti_state_success",
"subject",
"body",
"response_not_received",
"response_received",
"responded_by_user_id",
"responded_by_user_name",
"created_at_gte",
"created_at_lte",
"created_at",
],
)
def test_should_respond_200_with_existing_response_and_query(
self,
test_client: TestClient,
params: dict[str, Any],
expected_ti_count: int,
) -> None:
with assert_queries_count(3):
response = test_client.get("/dags/~/dagRuns/~/hitlDetails", params=params)
assert response.status_code == 200
assert response.json()["total_entries"] == expected_ti_count
assert len(response.json()["hitl_details"]) == expected_ti_count
@pytest.mark.usefixtures("sample_hitl_details")
def test_should_respond_200_with_existing_response_and_concrete_query(
self,
test_client: TestClient,
) -> None:
response = test_client.get("/dags/hitl_dag_0/dagRuns/hitl_run_0/hitlDetails")
assert response.status_code == 200
assert response.json() == {
"hitl_details": [
{
"task_instance": mock.ANY,
"options": ["Approve", "Reject"],
"subject": "This is subject 0",
"body": "this is body 0",
"defaults": ["Approve"],
"multiple": False,
"params": {"input_1": {"value": 1, "schema": {}, "description": None}},
"assigned_users": [],
"created_at": DEFAULT_CREATED_AT.isoformat().replace("+00:00", "Z"),
"responded_by_user": None,
"responded_at": None,
"chosen_options": None,
"params_input": {},
"response_received": False,
}
],
"total_entries": 1,
}
@pytest.mark.usefixtures("sample_hitl_details")
@pytest.mark.parametrize("asc_desc_mark", ["", "-"], ids=["asc", "desc"])
@pytest.mark.parametrize(
("key", "get_key_lambda"),
[
# ti key
("ti_id", lambda x: x["task_instance"]["id"]),
("dag_id", lambda x: x["task_instance"]["dag_id"]),
("run_id", lambda x: x["task_instance"]["dag_run_id"]),
("run_after", lambda x: x["task_instance"]["run_after"]),
("rendered_map_index", lambda x: x["task_instance"]["rendered_map_index"]),
("task_instance_operator", lambda x: x["task_instance"]["operator_name"]),
("task_instance_state", lambda x: x["task_instance"]["state"]),
# hitl key
("subject", itemgetter("subject")),
("responded_at", itemgetter("responded_at")),
("created_at", itemgetter("created_at")),
],
ids=[
# ti key
"ti_id",
"dag_id",
"run_id",
"run_after",
"rendered_map_index",
"task_instance_operator",
"task_instance_state",
# hitl key
"subject",
"responded_at",
"created_at",
],
)
def test_should_respond_200_with_existing_response_and_order_by(
self,
test_client: TestClient,
asc_desc_mark: str,
key: str,
get_key_lambda: Callable,
) -> None:
reverse = asc_desc_mark == "-"
response = test_client.get(
"/dags/~/dagRuns/~/hitlDetails", params={"order_by": f"{asc_desc_mark}{key}"}
)
data = response.json()
hitl_details = data["hitl_details"]
assert response.status_code == 200
assert data["total_entries"] == 8
assert len(hitl_details) == 8
sorted_hitl_details = sorted(
hitl_details,
key=lambda x: (
# pull none to the last no matter it's asc or desc
(get_key_lambda(x) is not None) if reverse else (get_key_lambda(x) is None),
get_key_lambda(x),
x["task_instance"]["id"],
),
reverse=reverse,
)
# Remove entries with None, because None orders depends on the DB implementation
hitl_details = [d for d in hitl_details if get_key_lambda(d) is not None]
sorted_hitl_details = [d for d in sorted_hitl_details if get_key_lambda(d) is not None]
assert hitl_details == sorted_hitl_details
def test_should_respond_200_without_response(self, test_client: TestClient) -> None:
response = test_client.get("/dags/~/dagRuns/~/hitlDetails")
assert response.status_code == 200
assert response.json() == {
"hitl_details": [],
"total_entries": 0,
}
def test_should_respond_401(self, unauthenticated_test_client: TestClient) -> None:
response = unauthenticated_test_client.get("/dags/~/dagRuns/~/hitlDetails")
assert response.status_code == 401
def test_should_respond_403(self, unauthorized_test_client: TestClient) -> None:
response = unauthorized_test_client.get("/dags/~/dagRuns/~/hitlDetails")
assert response.status_code == 403
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_hitl.py",
"license": "Apache License 2.0",
"lines": 637,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-core/tests/unit/api_fastapi/execution_api/versions/head/test_hitl.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from datetime import datetime
from typing import TYPE_CHECKING, Any
import pytest
import time_machine
from httpx import Client
from uuid6 import uuid7
from airflow._shared.timezones.timezone import convert_to_utc
from airflow.models.hitl import HITLDetail
if TYPE_CHECKING:
from fastapi.testclient import TestClient
from sqlalchemy.orm import Session
from airflow.models.taskinstance import TaskInstance
from tests_common.pytest_plugin import CreateTaskInstance
pytestmark = pytest.mark.db_test
default_hitl_detail_request_kwargs: dict[str, Any] = {
# ti_id decided at a later stage
"subject": "This is subject",
"body": "this is body",
"options": ["Approve", "Reject"],
"defaults": ["Approve"],
"multiple": False,
"params": {"input_1": 1},
"assignees": None,
}
expected_empty_hitl_detail_response_part: dict[str, Any] = {
"responded_at": None,
"chosen_options": [],
"responded_by_user": None,
"params_input": {},
"response_received": False,
}
@pytest.fixture
def sample_ti(create_task_instance: CreateTaskInstance) -> TaskInstance:
return create_task_instance()
@pytest.fixture
def sample_hitl_detail(session: Session, sample_ti: TaskInstance) -> HITLDetail:
hitl_detail_model = HITLDetail(
ti_id=sample_ti.id,
**default_hitl_detail_request_kwargs,
)
session.add(hitl_detail_model)
session.commit()
return hitl_detail_model
@pytest.fixture
def expected_sample_hitl_detail_dict(sample_ti: TaskInstance) -> dict[str, Any]:
return {
"ti_id": sample_ti.id,
**default_hitl_detail_request_kwargs,
**expected_empty_hitl_detail_response_part,
}
@pytest.mark.parametrize(
"existing_hitl_detail_args",
[
None,
default_hitl_detail_request_kwargs,
{
**default_hitl_detail_request_kwargs,
**{
"params_input": {"input_1": 2},
"responded_at": convert_to_utc(datetime(2025, 7, 3, 0, 0, 0)),
"chosen_options": ["Reject"],
"responded_by": None,
},
},
],
ids=[
"no existing hitl detail",
"existing hitl detail without response",
"existing hitl detail with response",
],
)
def test_upsert_hitl_detail(
client: TestClient,
create_task_instance: CreateTaskInstance,
session: Session,
existing_hitl_detail_args: dict[str, Any],
) -> None:
ti = create_task_instance()
session.commit()
if existing_hitl_detail_args:
session.add(HITLDetail(ti_id=ti.id, **existing_hitl_detail_args))
session.commit()
response = client.post(
f"/execution/hitlDetails/{ti.id}",
json={
"ti_id": str(ti.id),
**default_hitl_detail_request_kwargs,
},
)
expected_json = {
"ti_id": str(ti.id),
**default_hitl_detail_request_kwargs,
}
expected_json["assigned_users"] = expected_json.pop("assignees") or []
assert response.status_code == 201
assert response.json() == expected_json
def test_upsert_hitl_detail_with_empty_option(
client: TestClient,
create_task_instance: CreateTaskInstance,
session: Session,
) -> None:
ti = create_task_instance()
session.commit()
response = client.post(
f"/execution/hitlDetails/{ti.id}",
json={
"ti_id": str(ti.id),
"subject": "This is subject",
"body": "this is body",
"options": [],
"defaults": ["Approve"],
"multiple": False,
"params": {"input_1": 1},
},
)
assert response.status_code == 422
@time_machine.travel(datetime(2025, 7, 3, 0, 0, 0), tick=False)
@pytest.mark.usefixtures("sample_hitl_detail")
def test_update_hitl_detail(client: Client, sample_ti: TaskInstance) -> None:
response = client.patch(
f"/execution/hitlDetails/{sample_ti.id}",
json={
"ti_id": str(sample_ti.id),
"chosen_options": ["Reject"],
"params_input": {"input_1": 2},
},
)
assert response.status_code == 200
assert response.json() == {
"params_input": {"input_1": 2},
"responded_at": "2025-07-03T00:00:00Z",
"chosen_options": ["Reject"],
"response_received": True,
"responded_by_user": None,
}
def test_update_hitl_detail_without_option(client: Client, sample_ti: TaskInstance) -> None:
response = client.patch(
f"/execution/hitlDetails/{sample_ti.id}",
json={
"ti_id": str(sample_ti.id),
"chosen_options": [],
"params_input": {"input_1": 2},
},
)
assert response.status_code == 422
def test_update_hitl_detail_without_ti(client: Client) -> None:
ti_id = str(uuid7())
response = client.patch(
f"/execution/hitlDetails/{ti_id}",
json={
"ti_id": ti_id,
"chosen_options": ["Reject"],
"params_input": {"input_1": 2},
},
)
assert response.status_code == 404
assert response.json() == {
"detail": {
"message": "HITLDetail not found. This happens most likely due to clearing task instance before receiving response.",
"reason": "not_found",
},
}
@pytest.mark.usefixtures("sample_hitl_detail")
def test_get_hitl_detail(client: Client, sample_ti: TaskInstance) -> None:
response = client.get(f"/execution/hitlDetails/{sample_ti.id}")
assert response.status_code == 200
assert response.json() == expected_empty_hitl_detail_response_part
def test_get_hitl_detail_without_ti(client: Client) -> None:
response = client.get(f"/execution/hitlDetails/{uuid7()}")
assert response.status_code == 404
assert response.json() == {
"detail": {
"message": "HITLDetail not found. This happens most likely due to clearing task instance before receiving response.",
"reason": "not_found",
},
}
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/tests/unit/api_fastapi/execution_api/versions/head/test_hitl.py",
"license": "Apache License 2.0",
"lines": 193,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/standard/src/airflow/providers/standard/operators/hitl.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import logging
from airflow.providers.common.compat.sdk import AirflowOptionalProviderFeatureException
from airflow.providers.standard.version_compat import AIRFLOW_V_3_1_3_PLUS, AIRFLOW_V_3_1_PLUS
if not AIRFLOW_V_3_1_PLUS:
raise AirflowOptionalProviderFeatureException("Human in the loop functionality needs Airflow 3.1+.")
from collections.abc import Collection, Mapping, Sequence
from typing import TYPE_CHECKING, Any
from urllib.parse import ParseResult, urlencode, urlparse, urlunparse
from airflow.providers.common.compat.sdk import conf
from airflow.providers.standard.exceptions import HITLRejectException, HITLTimeoutError, HITLTriggerEventError
from airflow.providers.standard.operators.branch import BranchMixIn
from airflow.providers.standard.triggers.hitl import HITLTrigger, HITLTriggerEventSuccessPayload
from airflow.providers.standard.utils.skipmixin import SkipMixin
from airflow.providers.standard.version_compat import BaseOperator
from airflow.sdk.bases.notifier import BaseNotifier
from airflow.sdk.definitions.param import ParamsDict
from airflow.sdk.execution_time.hitl import upsert_hitl_detail
from airflow.sdk.timezone import utcnow
if TYPE_CHECKING:
from airflow.providers.common.compat.sdk import Context
from airflow.sdk.execution_time.hitl import HITLUser
from airflow.sdk.types import RuntimeTaskInstanceProtocol
class HITLOperator(BaseOperator):
"""
Base class for all Human-in-the-loop Operators to inherit from.
:param subject: Headline/subject presented to the user for the interaction task.
:param options: List of options that the an user can select from to complete the task.
:param body: Descriptive text (with Markdown support) that gives the details that are needed to decide.
:param defaults: The default options and the options that are taken if timeout is passed.
:param multiple: Whether the user can select one or multiple options.
:param params: dictionary of parameter definitions that are in the format of Dag params such that
a Form Field can be rendered. Entered data is validated (schema, required fields) like for a Dag run
and added to XCom of the task result.
"""
template_fields: Collection[str] = ("subject", "body")
def __init__(
self,
*,
subject: str,
options: list[str],
body: str | None = None,
defaults: str | list[str] | None = None,
multiple: bool = False,
params: ParamsDict | dict[str, Any] | None = None,
notifiers: Sequence[BaseNotifier] | BaseNotifier | None = None,
assigned_users: HITLUser | list[HITLUser] | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.subject = subject
self.body = body
self.options = options
# allow defaults to store more than one options when multiple=True
self.defaults = [defaults] if isinstance(defaults, str) else defaults
self.multiple = multiple
self.params: ParamsDict = params if isinstance(params, ParamsDict) else ParamsDict(params or {})
if hasattr(ParamsDict, "filter_params_by_source"):
# Params that exist only in Dag level does not make sense to appear in HITLOperator
self.params = ParamsDict.filter_params_by_source(self.params, source="task")
elif self.params:
self.log.debug(
"ParamsDict.filter_params_by_source not available; HITLOperator will also include Dag level params."
)
self.notifiers: Sequence[BaseNotifier] = (
[notifiers] if isinstance(notifiers, BaseNotifier) else notifiers or []
)
self.assigned_users = [assigned_users] if isinstance(assigned_users, dict) else assigned_users
self.validate_options()
self.validate_params()
self.validate_defaults()
# HITL summary for the use of listeners; subclasses can extend it.
self.hitl_summary: dict[str, Any] = {
"subject": self.subject,
"body": self.body,
"options": self.options,
"defaults": self.defaults,
"multiple": self.multiple,
"assigned_users": self.assigned_users,
"serialized_params": self.serialized_params or None,
}
def validate_options(self) -> None:
"""
Validate the `options` attribute of the instance.
Raises:
ValueError: If `options` is empty.
"""
if not self.options:
raise ValueError('"options" cannot be empty.')
def validate_params(self) -> None:
"""
Validate the `params` attribute of the instance.
Raises:
ValueError: If `"_options"` key is present in `params`, which is not allowed.
"""
self.params.validate()
if "_options" in self.params:
raise ValueError('"_options" is not allowed in params')
def validate_defaults(self) -> None:
"""
Validate whether the given defaults pass the following criteria.
1. Default options should be the subset of options.
2. When multiple is False, there should only be one option.
"""
if self.defaults is not None:
if not set(self.defaults).issubset(self.options):
raise ValueError(f'defaults "{self.defaults}" should be a subset of options "{self.options}"')
if self.multiple is False and len(self.defaults) > 1:
raise ValueError('More than one defaults given when "multiple" is set to False.')
def execute(self, context: Context):
"""Add a Human-in-the-loop Response and then defer to HITLTrigger and wait for user input."""
ti_id = context["task_instance"].id
# Write Human-in-the-loop input request to DB
upsert_hitl_detail(
ti_id=ti_id,
options=self.options,
subject=self.subject,
body=self.body,
defaults=self.defaults,
multiple=self.multiple,
params=self.serialized_params,
assigned_users=self.assigned_users,
)
if self.execution_timeout:
timeout_datetime = utcnow() + self.execution_timeout
else:
timeout_datetime = None
# Enrich summary with runtime info
self.hitl_summary["timeout_datetime"] = timeout_datetime.isoformat() if timeout_datetime else None
self.log.info("Waiting for response")
for notifier in self.notifiers:
notifier(context)
# Defer the Human-in-the-loop response checking process to HITLTrigger
self.defer(
trigger=HITLTrigger(
ti_id=ti_id,
options=self.options,
defaults=self.defaults,
params=self.serialized_params,
multiple=self.multiple,
timeout_datetime=timeout_datetime,
),
method_name="execute_complete",
)
@property
def serialized_params(self) -> dict[str, dict[str, Any]]:
if not AIRFLOW_V_3_1_3_PLUS:
return self.params.dump() if isinstance(self.params, ParamsDict) else self.params
return {k: self.params.get_param(k).serialize() for k in self.params}
def execute_complete(self, context: Context, event: dict[str, Any]) -> Any:
if "error" in event:
self.hitl_summary["error_type"] = event["error_type"]
self.process_trigger_event_error(event)
chosen_options = event["chosen_options"]
params_input = event["params_input"] or {}
self.validate_chosen_options(chosen_options)
self.validate_params_input(params_input)
self.hitl_summary.update(
{
"chosen_options": chosen_options,
"params_input": params_input,
"responded_at": event["responded_at"].isoformat(),
"responded_by_user": event["responded_by_user"],
}
)
return HITLTriggerEventSuccessPayload(
chosen_options=chosen_options,
params_input=params_input,
responded_at=event["responded_at"],
responded_by_user=event["responded_by_user"],
)
def process_trigger_event_error(self, event: dict[str, Any]) -> None:
if event["error_type"] == "timeout":
raise HITLTimeoutError(event)
raise HITLTriggerEventError(event)
def validate_chosen_options(self, chosen_options: list[str]) -> None:
"""Check whether user provide valid response."""
if diff := set(chosen_options) - set(self.options):
raise ValueError(f"Responses {diff} not in {self.options}")
def validate_params_input(self, params_input: Mapping) -> None:
"""Check whether user provide valid params input."""
if self.params and params_input and set(self.serialized_params.keys()) ^ set(params_input):
raise ValueError(f"params_input {params_input} does not match params {self.params}")
for key, value in params_input.items():
self.params[key] = value
def generate_link_to_ui(
self,
*,
task_instance: RuntimeTaskInstanceProtocol,
base_url: str | None = None,
options: str | list[str] | None = None,
params_input: dict[str, Any] | None = None,
) -> str:
"""
Generate a URL link to the "required actions" page for a specific task instance.
This URL includes query parameters based on allowed options and parameters.
Args:
task_instance: The task instance to generate the link for.
base_url: Optional base URL to use. Defaults to ``api.base_url`` from config.
options: Optional subset of allowed options to include in the URL.
params_input: Optional subset of allowed params to include in the URL.
Raises:
ValueError: If any provided option or parameter is invalid.
ValueError: If no base_url can be determined.
Returns:
The full URL pointing to the required actions page with query parameters.
"""
query_param: dict[str, Any] = {}
options = [options] if isinstance(options, str) else options
if options:
if diff := set(options) - set(self.options):
raise ValueError(f"options {diff} are not valid options")
query_param["_options"] = options
if params_input:
if diff := set(params_input.keys()) - set(self.params.keys()):
raise ValueError(f"params {diff} are not valid params")
query_param.update(params_input)
if not (base_url := base_url or conf.get("api", "base_url", fallback=None)):
raise ValueError("Not able to retrieve base_url")
query_param["map_index"] = task_instance.map_index
parsed_base_url: ParseResult = urlparse(base_url)
return urlunparse(
(
parsed_base_url.scheme,
parsed_base_url.netloc,
f"/dags/{task_instance.dag_id}/runs/{task_instance.run_id}/tasks/{task_instance.task_id}/required_actions",
"",
urlencode(query_param) if query_param else "",
"",
)
)
@staticmethod
def generate_link_to_ui_from_context(
*,
context: Context,
base_url: str | None = None,
options: list[str] | None = None,
params_input: dict[str, Any] | None = None,
) -> str:
"""
Generate a "required actions" page URL from a task context.
Delegates to ``generate_link_to_ui`` using the task and task_instance extracted from
the provided context.
Args:
context: The Airflow task context containing 'task' and 'task_instance'.
base_url: Optional base URL to use.
options: Optional list of allowed options to include.
params_input: Optional dictionary of allowed parameters to include.
Returns:
The full URL pointing to the required actions page with query parameters.
"""
hitl_op = context["task"]
if not isinstance(hitl_op, HITLOperator):
raise ValueError("This method only supports HITLOperator")
return hitl_op.generate_link_to_ui(
task_instance=context["task_instance"],
base_url=base_url,
options=options,
params_input=params_input,
)
class ApprovalOperator(HITLOperator, SkipMixin):
"""Human-in-the-loop Operator that has only 'Approval' and 'Reject' options."""
inherits_from_skipmixin = True
FIXED_ARGS = ["options", "multiple"]
APPROVE = "Approve"
REJECT = "Reject"
def __init__(
self,
*,
ignore_downstream_trigger_rules: bool = False,
fail_on_reject: bool = False,
**kwargs,
) -> None:
"""
Human-in-the-loop Operator for simple approval workflows.
This operator presents the user with two fixed options: "Approve" and "Reject".
Behavior:
- "Approve": Downstream tasks execute as normal.
- "Reject":
- Downstream tasks are skipped according to the `ignore_downstream_trigger_rules` setting.
- If `fail_on_reject=True`, the task fails instead of only skipping downstream tasks.
Warning:
Using `fail_on_reject=True` is generally discouraged. A HITLOperator's role is to collect
human input, and receiving any response—including "Reject"—indicates the task succeeded.
Treating "Reject" as a task failure mixes human decision outcomes with Airflow task
success/failure states.
Only use this option if you explicitly intend for a "Reject" response to fail the task.
Args:
ignore_downstream_trigger_rules: If True, skips all downstream tasks regardless of trigger rules.
fail_on_reject: If True, the task fails when "Reject" is selected. Generally discouraged.
Read the warning carefully before using.
"""
for arg in self.FIXED_ARGS:
if arg in kwargs:
raise ValueError(f"Passing {arg} to ApprovalOperator is not allowed.")
self.ignore_downstream_trigger_rules = ignore_downstream_trigger_rules
self.fail_on_reject = fail_on_reject
super().__init__(
options=[self.APPROVE, self.REJECT],
multiple=False,
**kwargs,
)
self.hitl_summary["ignore_downstream_trigger_rules"] = self.ignore_downstream_trigger_rules
self.hitl_summary["fail_on_reject"] = self.fail_on_reject
def execute_complete(self, context: Context, event: dict[str, Any]) -> Any:
ret = super().execute_complete(context=context, event=event)
chosen_option = ret["chosen_options"][0]
self.hitl_summary["approved"] = chosen_option == self.APPROVE
if chosen_option == self.APPROVE:
self.log.info("Approved. Proceeding with downstream tasks...")
return ret
if self.fail_on_reject and chosen_option == self.REJECT:
raise HITLRejectException('Receive "Reject"')
if not self.downstream_task_ids:
self.log.info("No downstream tasks; nothing to do.")
return ret
def get_tasks_to_skip():
if self.ignore_downstream_trigger_rules is True:
tasks = context["task"].get_flat_relatives(upstream=False)
else:
tasks = context["task"].get_direct_relatives(upstream=False)
yield from (t for t in tasks if not t.is_teardown)
tasks_to_skip = get_tasks_to_skip()
# this lets us avoid an intermediate list unless debug logging
if self.log.getEffectiveLevel() <= logging.DEBUG:
self.log.debug("Downstream task IDs %s", tasks_to_skip := list(get_tasks_to_skip()))
self.log.info("Skipping downstream tasks")
self.skip(ti=context["ti"], tasks=tasks_to_skip)
return ret
class HITLBranchOperator(HITLOperator, BranchMixIn):
"""BranchOperator based on Human-in-the-loop Response."""
inherits_from_skipmixin = True
def __init__(self, *, options_mapping: dict[str, str] | None = None, **kwargs) -> None:
"""
Initialize HITLBranchOperator.
Args:
options_mapping:
A dictionary mapping option labels (must match entries in `self.options`)
to string values (e.g., task IDs). Defaults to an empty dict if not provided.
Raises:
ValueError:
- If `options_mapping` contains keys not present in `self.options`.
- If any value in `options_mapping` is not a string.
"""
super().__init__(**kwargs)
self.options_mapping = options_mapping or {}
self.validate_options_mapping()
self.hitl_summary["options_mapping"] = self.options_mapping
def validate_options_mapping(self) -> None:
"""
Validate that `options_mapping` keys match `self.options` and all values are strings.
Raises:
ValueError: If any key is not in `self.options` or any value is not a string.
"""
if not self.options_mapping:
return
# Validate that the choice options are keys in the mapping are the same
invalid_keys = set(self.options_mapping.keys()) - set(self.options)
if invalid_keys:
raise ValueError(
f"`options_mapping` contains keys that are not in `options`: {sorted(invalid_keys)}"
)
# validate that all values are strings
invalid_entries = {
k: (v, type(v).__name__) for k, v in self.options_mapping.items() if not isinstance(v, str)
}
if invalid_entries:
raise ValueError(
f"`options_mapping` values must be strings (task_ids).\nInvalid entries: {invalid_entries}"
)
def execute_complete(self, context: Context, event: dict[str, Any]) -> Any:
"""Execute the operator and branch based on chosen options."""
ret = super().execute_complete(context=context, event=event)
chosen_options = ret["chosen_options"]
# Map options to task IDs using the mapping, fallback to original option
chosen_options = [self.options_mapping.get(option, option) for option in chosen_options]
self.hitl_summary["branches_to_execute"] = chosen_options
return self.do_branch(context=context, branches_to_execute=chosen_options)
class HITLEntryOperator(HITLOperator):
"""Human-in-the-loop Operator that is used to accept user input through TriggerForm."""
OK = "OK"
def __init__(self, **kwargs) -> None:
if "options" not in kwargs:
kwargs["options"] = [self.OK]
if "defaults" not in kwargs:
kwargs["defaults"] = [self.OK]
super().__init__(**kwargs)
| {
"repo_id": "apache/airflow",
"file_path": "providers/standard/src/airflow/providers/standard/operators/hitl.py",
"license": "Apache License 2.0",
"lines": 403,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/standard/src/airflow/providers/standard/triggers/hitl.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from airflow.providers.common.compat.sdk import AirflowOptionalProviderFeatureException
from airflow.providers.standard.version_compat import AIRFLOW_V_3_1_PLUS
if not AIRFLOW_V_3_1_PLUS:
raise AirflowOptionalProviderFeatureException("Human in the loop functionality needs Airflow 3.1+.")
import asyncio
from collections.abc import AsyncIterator
from datetime import datetime
from typing import TYPE_CHECKING, Any, Literal, TypedDict
from uuid import UUID
from asgiref.sync import sync_to_async
from airflow.providers.common.compat.sdk import ParamValidationError
from airflow.sdk import Param
from airflow.sdk.definitions.param import ParamsDict
from airflow.sdk.execution_time.hitl import (
HITLUser,
get_hitl_detail_content_detail,
update_hitl_detail_response,
)
from airflow.sdk.timezone import utcnow
from airflow.triggers.base import BaseTrigger, TriggerEvent
class HITLTriggerEventSuccessPayload(TypedDict, total=False):
"""Minimum required keys for a success Human-in-the-loop TriggerEvent."""
chosen_options: list[str]
params_input: dict[str, dict[str, Any]]
responded_by_user: HITLUser | None
responded_at: datetime
timedout: bool
class HITLTriggerEventFailurePayload(TypedDict):
"""Minimum required keys for a failed Human-in-the-loop TriggerEvent."""
error: str
error_type: Literal["timeout", "unknown", "validation"]
class HITLTrigger(BaseTrigger):
"""A trigger that checks whether Human-in-the-loop responses are received."""
def __init__(
self,
*,
ti_id: UUID,
options: list[str],
params: dict[str, dict[str, Any]],
defaults: list[str] | None = None,
multiple: bool = False,
timeout_datetime: datetime | None,
poke_interval: float = 5.0,
**kwargs,
):
super().__init__(**kwargs)
self.ti_id = ti_id
self.poke_interval = poke_interval
self.options = options
self.multiple = multiple
self.defaults = defaults
self.timeout_datetime = timeout_datetime
self.params = ParamsDict(
{
k: Param(
v.pop("value"),
**v,
)
if HITLTrigger._is_param(v)
else Param(v)
for k, v in params.items()
},
)
@staticmethod
def _is_param(value: Any) -> bool:
return isinstance(value, dict) and all(key in value for key in ("description", "schema", "value"))
def serialize(self) -> tuple[str, dict[str, Any]]:
"""Serialize HITLTrigger arguments and classpath."""
return (
"airflow.providers.standard.triggers.hitl.HITLTrigger",
{
"ti_id": self.ti_id,
"options": self.options,
"defaults": self.defaults,
"params": {k: self.params.get_param(k).serialize() for k in self.params},
"multiple": self.multiple,
"timeout_datetime": self.timeout_datetime,
"poke_interval": self.poke_interval,
},
)
async def _handle_timeout(self) -> TriggerEvent:
"""Handle HITL timeout logic and yield appropriate event."""
resp = await sync_to_async(get_hitl_detail_content_detail)(ti_id=self.ti_id)
# Case 1: Response arrived just before timeout
if resp.response_received and resp.chosen_options:
if TYPE_CHECKING:
assert resp.responded_by_user is not None
assert resp.responded_at is not None
chosen_options_list = list(resp.chosen_options or [])
self.log.info(
"[HITL] responded_by=%s (id=%s) options=%s at %s (timeout fallback skipped)",
resp.responded_by_user.name,
resp.responded_by_user.id,
chosen_options_list,
resp.responded_at,
)
return TriggerEvent(
HITLTriggerEventSuccessPayload(
chosen_options=chosen_options_list,
params_input=resp.params_input or {},
responded_at=resp.responded_at,
responded_by_user=HITLUser(
id=resp.responded_by_user.id,
name=resp.responded_by_user.name,
),
timedout=False,
)
)
# Case 2: No defaults defined → failure
if self.defaults is None:
return TriggerEvent(
HITLTriggerEventFailurePayload(
error="The timeout has passed, and the response has not yet been received.",
error_type="timeout",
)
)
# Case 3: Timeout fallback to default
resp = await sync_to_async(update_hitl_detail_response)(
ti_id=self.ti_id,
chosen_options=self.defaults,
params_input=self.params.dump(),
)
if TYPE_CHECKING:
assert resp.responded_at is not None
self.log.info(
"[HITL] timeout reached before receiving response, fallback to default %s",
self.defaults,
)
return TriggerEvent(
HITLTriggerEventSuccessPayload(
chosen_options=self.defaults,
params_input=self.params.dump(),
responded_by_user=None,
responded_at=resp.responded_at,
timedout=True,
)
)
async def _handle_response(self):
"""Check if HITL response is ready and yield success if so."""
resp = await sync_to_async(get_hitl_detail_content_detail)(ti_id=self.ti_id)
if TYPE_CHECKING:
assert resp.responded_by_user is not None
assert resp.responded_at is not None
if not (resp.response_received and resp.chosen_options):
return None
# validate input
if params_input := resp.params_input:
try:
for key, value in params_input.items():
self.params[key] = value
except ParamValidationError as err:
return TriggerEvent(
HITLTriggerEventFailurePayload(
error=str(err),
error_type="validation",
)
)
chosen_options_list = list(resp.chosen_options or [])
self.log.info(
"[HITL] responded_by=%s (id=%s) options=%s at %s",
resp.responded_by_user.name,
resp.responded_by_user.id,
chosen_options_list,
resp.responded_at,
)
return TriggerEvent(
HITLTriggerEventSuccessPayload(
chosen_options=chosen_options_list,
params_input=params_input or {},
responded_at=resp.responded_at,
responded_by_user=HITLUser(
id=resp.responded_by_user.id,
name=resp.responded_by_user.name,
),
timedout=False,
)
)
async def run(self) -> AsyncIterator[TriggerEvent]:
"""Loop until the Human-in-the-loop response received or timeout reached."""
while True:
if self.timeout_datetime and self.timeout_datetime < utcnow():
event = await self._handle_timeout()
yield event
return
event = await self._handle_response()
if event:
yield event
return
await asyncio.sleep(self.poke_interval)
| {
"repo_id": "apache/airflow",
"file_path": "providers/standard/src/airflow/providers/standard/triggers/hitl.py",
"license": "Apache License 2.0",
"lines": 206,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/standard/tests/unit/standard/operators/test_hitl.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from uuid import UUID, uuid4
import pytest
from tests_common.test_utils.version_compat import AIRFLOW_V_3_1_PLUS, AIRFLOW_V_3_2_PLUS
if not AIRFLOW_V_3_1_PLUS:
pytest.skip("Human in the loop is only compatible with Airflow >= 3.1.0", allow_module_level=True)
import datetime
from typing import TYPE_CHECKING, Any
from unittest.mock import MagicMock, patch
from urllib.parse import parse_qs, urlparse
import pytest
from sqlalchemy import select
from airflow.models import TaskInstance, Trigger
from airflow.models.hitl import HITLDetail
from airflow.providers.common.compat.sdk import AirflowException, DownstreamTasksSkipped, ParamValidationError
from airflow.providers.standard.exceptions import HITLRejectException, HITLTimeoutError, HITLTriggerEventError
from airflow.providers.standard.operators.empty import EmptyOperator
from airflow.providers.standard.operators.hitl import (
ApprovalOperator,
HITLBranchOperator,
HITLEntryOperator,
HITLOperator,
)
from airflow.sdk import Param, timezone
from airflow.sdk.definitions.param import ParamsDict
from airflow.sdk.execution_time.hitl import HITLUser
from tests_common.test_utils.config import conf_vars
from tests_common.test_utils.version_compat import AIRFLOW_V_3_1_3_PLUS
if TYPE_CHECKING:
from sqlalchemy.orm import Session
from airflow.sdk import Context
from airflow.sdk.types import Operator
from tests_common.pytest_plugin import DagMaker
pytestmark = pytest.mark.db_test
DEFAULT_DATE = timezone.datetime(2016, 1, 1)
INTERVAL = datetime.timedelta(hours=12)
@pytest.fixture
def hitl_task_and_ti_for_generating_link(dag_maker: DagMaker) -> tuple[HITLOperator, TaskInstance]:
with dag_maker("test_dag"):
task = HITLOperator(
task_id="hitl_test",
subject="This is subject",
options=["1", "2", "3", "4", "5"],
body="This is body",
defaults=["1"],
assigned_users=HITLUser(id="test", name="test"),
multiple=True,
params=ParamsDict({"input_1": 1, "input_2": 2, "input_3": 3}),
)
dr = dag_maker.create_dagrun()
return task, dag_maker.run_ti(task.task_id, dr)
@pytest.fixture
def get_context_from_model_ti(mock_supervisor_comms: Any) -> Any:
def _get_context(ti: TaskInstance, task: Operator) -> Context:
from airflow.api_fastapi.execution_api.datamodels.taskinstance import (
DagRun as DRDataModel,
TaskInstance as TIDataModel,
TIRunContext,
)
from airflow.sdk.execution_time.task_runner import RuntimeTaskInstance
# make mypy happy
assert ti is not None
dag_run = ti.dag_run
ti_model = TIDataModel.model_validate(ti, from_attributes=True)
runtime_ti = RuntimeTaskInstance.model_construct(
**ti_model.model_dump(exclude_unset=True),
task=task,
_ti_context_from_server=TIRunContext(
dag_run=DRDataModel.model_validate(dag_run, from_attributes=True),
max_tries=ti.max_tries,
variables=[],
connections=[],
xcom_keys_to_clear=[],
),
)
return runtime_ti.get_template_context()
return _get_context
class TestHITLOperator:
def test_validate_options(self) -> None:
hitl_op = HITLOperator(
task_id="hitl_test",
subject="This is subject",
options=["1", "2", "3", "4", "5"],
body="This is body",
defaults=["1"],
multiple=False,
params=ParamsDict({"input_1": 1}),
)
hitl_op.validate_options()
def test_validate_options_with_empty_options(self) -> None:
# validate_options is called during initialization
with pytest.raises(ValueError, match='"options" cannot be empty.'):
HITLOperator(
task_id="hitl_test",
subject="This is subject",
options=[],
body="This is body",
defaults=["1"],
multiple=False,
params=ParamsDict({"input_1": 1}),
)
@pytest.mark.parametrize(
("params", "exc", "error_msg"),
(
(ParamsDict({"_options": 1}), ValueError, '"_options" is not allowed in params'),
(
ParamsDict({"param": Param("", type="integer")}),
ParamValidationError,
(
"Invalid input for param param: '' is not of type 'integer'\n\n"
"Failed validating 'type' in schema:\n"
" {'type': 'integer'}\n\n"
"On instance:\n ''"
),
),
),
)
def test_validate_params(
self, params: ParamsDict, exc: type[ValueError | ParamValidationError], error_msg: str
) -> None:
# validate_params is called during initialization
with pytest.raises(exc, match=error_msg):
HITLOperator(
task_id="hitl_test",
subject="This is subject",
options=["1", "2"],
body="This is body",
defaults=["1"],
multiple=False,
params=params,
)
def test_validate_defaults(self) -> None:
hitl_op = HITLOperator(
task_id="hitl_test",
subject="This is subject",
options=["1", "2", "3", "4", "5"],
body="This is body",
defaults=["1"],
multiple=False,
params=ParamsDict({"input_1": 1}),
)
hitl_op.validate_defaults()
@pytest.mark.parametrize(
("extra_kwargs", "expected_error_msg"),
[
({"defaults": ["0"]}, r'defaults ".*" should be a subset of options ".*"'),
(
{"multiple": False, "defaults": ["1", "2"]},
'More than one defaults given when "multiple" is set to False.',
),
],
ids=[
"defaults not in option",
"multiple defaults when multiple is False",
],
)
def test_validate_defaults_with_invalid_defaults(
self,
extra_kwargs: dict[str, Any],
expected_error_msg: str,
) -> None:
# validate_default is called during initialization
with pytest.raises(ValueError, match=expected_error_msg):
HITLOperator(
task_id="hitl_test",
subject="This is subject",
body="This is body",
options=["1", "2", "3", "4", "5"],
params=ParamsDict({"input_1": 1}),
**extra_kwargs,
)
def test_execute(self, dag_maker: DagMaker, session: Session) -> None:
notifier = MagicMock()
with dag_maker("test_dag"):
task = HITLOperator(
task_id="hitl_test",
subject="This is subject",
options=["1", "2", "3", "4", "5"],
body="This is body",
defaults=["1"],
assigned_users=HITLUser(id="test", name="test"),
multiple=False,
params=ParamsDict({"input_1": 1}),
notifiers=[notifier],
)
dr = dag_maker.create_dagrun()
ti = dag_maker.run_ti(task.task_id, dr)
hitl_detail_model = session.scalar(select(HITLDetail).where(HITLDetail.ti_id == ti.id))
assert hitl_detail_model is not None
assert hitl_detail_model.ti_id == ti.id
assert hitl_detail_model.subject == "This is subject"
assert hitl_detail_model.options == ["1", "2", "3", "4", "5"]
assert hitl_detail_model.body == "This is body"
assert hitl_detail_model.defaults == ["1"]
assert hitl_detail_model.multiple is False
assert hitl_detail_model.assignees == [{"id": "test", "name": "test"}]
assert hitl_detail_model.responded_at is None
assert hitl_detail_model.responded_by is None
assert hitl_detail_model.chosen_options is None
assert hitl_detail_model.params_input == {}
expected_params: dict[str, Any]
if AIRFLOW_V_3_2_PLUS:
expected_params = {"input_1": {"value": 1, "description": None, "schema": {}, "source": "task"}}
elif AIRFLOW_V_3_1_3_PLUS:
expected_params = {"input_1": {"value": 1, "description": None, "schema": {}}}
else:
expected_params = {"input_1": 1}
assert hitl_detail_model.params == expected_params
assert notifier.called is True
expected_params_in_trigger_kwargs: dict[str, dict[str, Any]]
# trigger_kwargs are encoded via BaseSerialization in versions < 3.2
expected_ti_id: str | UUID = ti.id
if AIRFLOW_V_3_2_PLUS:
expected_params_in_trigger_kwargs = expected_params
# trigger_kwargs are encoded via serde from task sdk in versions >= 3.2
expected_ti_id = ti.id
else:
expected_params_in_trigger_kwargs = {"input_1": {"value": 1, "description": None, "schema": {}}}
registered_trigger = session.scalar(
select(Trigger).where(Trigger.classpath == "airflow.providers.standard.triggers.hitl.HITLTrigger")
)
assert registered_trigger is not None
assert registered_trigger.kwargs == {
"ti_id": expected_ti_id,
"options": ["1", "2", "3", "4", "5"],
"defaults": ["1"],
"params": expected_params_in_trigger_kwargs,
"multiple": False,
"timeout_datetime": None,
"poke_interval": 5.0,
}
@pytest.mark.skipif(not AIRFLOW_V_3_1_3_PLUS, reason="This only works in airflow-core >= 3.1.3")
@pytest.mark.parametrize(
("input_params", "expected_params"),
[
(
ParamsDict({"input": 1}),
{
"input": {
"description": None,
"schema": {},
"value": 1,
},
},
),
(
{"input": Param(5, type="integer", minimum=3, description="test")},
{
"input": {
"value": 5,
"schema": {
"minimum": 3,
"type": "integer",
},
"description": "test",
}
},
),
(
{"input": 1},
{
"input": {
"value": 1,
"schema": {},
"description": None,
}
},
),
(None, {}),
],
)
def test_serialzed_params(
self, input_params: ParamsDict | dict[str, Any] | None, expected_params: dict[str, Any]
) -> None:
hitl_op = HITLOperator(
task_id="hitl_test",
subject="This is subject",
body="This is body",
options=["1", "2", "3", "4", "5"],
params=input_params,
)
if AIRFLOW_V_3_2_PLUS:
for key in expected_params:
expected_params[key]["source"] = "task"
assert hitl_op.serialized_params == expected_params
@pytest.mark.skipif(
AIRFLOW_V_3_1_3_PLUS,
reason="Preserve the old behavior if airflow-core < 3.1.3. Otherwise the UI will break.",
)
def test_serialzed_params_legacy(self) -> None:
hitl_op = HITLOperator(
task_id="hitl_test",
subject="This is subject",
body="This is body",
options=["1", "2", "3", "4", "5"],
params={"input": Param(1)},
)
assert hitl_op.serialized_params == {"input": 1}
def test_execute_complete(self) -> None:
hitl_op = HITLOperator(
task_id="hitl_test",
subject="This is subject",
body="This is body",
options=["1", "2", "3", "4", "5"],
params={"input": 1},
)
responded_at_dt = timezone.utcnow()
ret = hitl_op.execute_complete(
context={},
event={
"chosen_options": ["1"],
"params_input": {"input": 2},
"responded_at": responded_at_dt,
"responded_by_user": {"id": "test", "name": "test"},
},
)
assert ret == {
"chosen_options": ["1"],
"params_input": {"input": 2},
"responded_at": responded_at_dt,
"responded_by_user": {"id": "test", "name": "test"},
}
@pytest.mark.parametrize(
("event", "expected_exception"),
[
({"error": "unknown", "error_type": "unknown"}, HITLTriggerEventError),
({"error": "this is timeotu", "error_type": "timeout"}, HITLTimeoutError),
],
)
def test_process_trigger_event_error(
self,
event: dict[str, Any],
expected_exception: type[Exception],
) -> None:
hitl_op = HITLOperator(
task_id="hitl_test",
subject="This is subject",
body="This is body",
options=["1", "2", "3", "4", "5"],
params={"input": 1},
)
with pytest.raises(expected_exception):
hitl_op.process_trigger_event_error(event)
def test_validate_chosen_options_with_invalid_content(self) -> None:
hitl_op = HITLOperator(
task_id="hitl_test",
subject="This is subject",
body="This is body",
options=["1", "2", "3", "4", "5"],
params={"input": 1},
)
with pytest.raises(ValueError, match="not exists"):
hitl_op.execute_complete(
context={},
event={
"chosen_options": ["not exists"],
"params_input": {"input": 2},
"responded_by_user": {"id": "test", "name": "test"},
},
)
@pytest.mark.parametrize(
("params", "params_input", "exc", "error_msg"),
(
(
ParamsDict({"input": 1}),
{"no such key": 2, "input": 333},
ValueError,
"params_input {'no such key': 2, 'input': 333} does not match params {'input': 1}",
),
(
ParamsDict({"input": Param(3, type="number", minimum=3)}),
{"input": 0},
ParamValidationError,
(
"Invalid input for param input: 0 is less than the minimum of 3\n\n"
"Failed validating 'minimum' in schema:\n.*"
),
),
),
)
def test_validate_params_input_with_invalid_input(
self,
params: ParamsDict,
params_input: dict[str, Any],
exc: type[ValueError | ParamValidationError],
error_msg: str,
) -> None:
hitl_op = HITLOperator(
task_id="hitl_test",
subject="This is subject",
body="This is body",
options=["1", "2", "3", "4", "5"],
params=params,
)
with pytest.raises(exc, match=error_msg):
hitl_op.execute_complete(
context={},
event={
"chosen_options": ["1"],
"params_input": params_input,
"responded_by_user": {"id": "test", "name": "test"},
},
)
@pytest.mark.parametrize(
("options", "params_input", "expected_parsed_query"),
[
(None, None, {"map_index": ["-1"]}),
("1", None, {"_options": ["['1']"], "map_index": ["-1"]}),
(["1", "2"], None, {"_options": ["['1', '2']"], "map_index": ["-1"]}),
(None, {"input_1": "123"}, {"input_1": ["123"], "map_index": ["-1"]}),
(
["3", "4", "5"],
{"input_1": "123123", "input_2": "345345"},
{
"_options": ["['3', '4', '5']"],
"input_1": ["123123"],
"input_2": ["345345"],
"map_index": ["-1"],
},
),
],
ids=[
"empty",
"single-option",
"multiple-options",
"single-param-input",
"multiple-options-and-param-inputs",
],
)
@pytest.mark.parametrize(
"conf_base_url",
[None, "http://localhost:8080/"],
ids=["no_conf_url", "with_conf_url"],
)
@pytest.mark.parametrize(
"base_url",
["http://test", "http://test_2:8080"],
ids=["url_1", "url_2"],
)
def test_generate_link_to_ui(
self,
base_url: str,
conf_base_url: str,
options: list[str] | None,
params_input: dict[str, Any] | None,
expected_parsed_query: dict[str, list[str]],
hitl_task_and_ti_for_generating_link: tuple[HITLOperator, TaskInstance],
) -> None:
with conf_vars({("api", "base_url"): conf_base_url}):
if conf_base_url:
base_url = conf_base_url
task, ti = hitl_task_and_ti_for_generating_link
url = task.generate_link_to_ui(
task_instance=ti,
base_url=base_url,
options=options,
params_input=params_input,
)
base_url_parsed_result = urlparse(base_url)
parse_result = urlparse(url)
assert parse_result.scheme == base_url_parsed_result.scheme
assert parse_result.netloc == base_url_parsed_result.netloc
assert parse_result.path == "/dags/test_dag/runs/test/tasks/hitl_test/required_actions"
assert parse_result.params == ""
assert parse_qs(parse_result.query) == expected_parsed_query
@pytest.mark.parametrize(
("options", "params_input", "expected_err_msg"),
[
([100, "2", 30000], None, "options {.*} are not valid options"),
(
None,
{"input_not_exist": 123, "no_such_key": 123},
"params {.*} are not valid params",
),
],
)
def test_generate_link_to_ui_with_invalid_input(
self,
options: list[Any] | None,
params_input: dict[str, Any] | None,
expected_err_msg: str,
hitl_task_and_ti_for_generating_link: tuple[HITLOperator, TaskInstance],
) -> None:
task, ti = hitl_task_and_ti_for_generating_link
with pytest.raises(ValueError, match=expected_err_msg):
task.generate_link_to_ui(task_instance=ti, options=options, params_input=params_input)
def test_generate_link_to_ui_without_base_url(
self,
hitl_task_and_ti_for_generating_link: tuple[HITLOperator, TaskInstance],
) -> None:
task, ti = hitl_task_and_ti_for_generating_link
with pytest.raises(ValueError, match="Not able to retrieve base_url"):
task.generate_link_to_ui(task_instance=ti)
class TestApprovalOperator:
def test_init_with_options(self) -> None:
with pytest.raises(ValueError, match="Passing options to ApprovalOperator is not allowed."):
ApprovalOperator(
task_id="hitl_test",
subject="This is subject",
body="This is body",
options=["1", "2", "3", "4", "5"],
params={"input": 1},
)
def test_init_with_multiple_set_to_true(self) -> None:
with pytest.raises(ValueError, match="Passing multiple to ApprovalOperator is not allowed."):
ApprovalOperator(
task_id="hitl_test",
subject="This is subject",
params={"input": 1},
multiple=True,
)
def test_execute_complete(self) -> None:
hitl_op = ApprovalOperator(
task_id="hitl_test",
subject="This is subject",
)
responded_at_dt = timezone.utcnow()
ret = hitl_op.execute_complete(
context={},
event={
"chosen_options": ["Approve"],
"params_input": {},
"responded_at": responded_at_dt,
"responded_by_user": {"id": "test", "name": "test"},
},
)
assert ret == {
"chosen_options": ["Approve"],
"params_input": {},
"responded_at": responded_at_dt,
"responded_by_user": {"id": "test", "name": "test"},
}
def test_execute_complete_with_downstream_tasks(
self, dag_maker: DagMaker, get_context_from_model_ti: Any
) -> None:
with dag_maker("hitl_test_dag", serialized=True):
hitl_op = ApprovalOperator(
task_id="hitl_test",
subject="This is subject",
)
hitl_op >> EmptyOperator(task_id="op1")
dr = dag_maker.create_dagrun()
ti = dr.get_task_instance("hitl_test")
with pytest.raises(DownstreamTasksSkipped) as exc_info:
hitl_op.execute_complete(
context=get_context_from_model_ti(ti, hitl_op),
event={
"chosen_options": ["Reject"],
"params_input": {},
"responded_at": timezone.utcnow(),
"responded_by_user": {"id": "test", "name": "test"},
},
)
assert set(exc_info.value.tasks) == {"op1"}
def test_execute_complete_with_fail_on_reject_set_to_true(
self, dag_maker: DagMaker, get_context_from_model_ti: Any
) -> None:
with dag_maker("hitl_test_dag", serialized=True):
hitl_op = ApprovalOperator(task_id="hitl_test", subject="This is subject", fail_on_reject=True)
hitl_op >> EmptyOperator(task_id="op1")
dr = dag_maker.create_dagrun()
ti = dr.get_task_instance("hitl_test")
with pytest.raises(HITLRejectException):
hitl_op.execute_complete(
context=get_context_from_model_ti(ti, hitl_op),
event={
"chosen_options": ["Reject"],
"params_input": {},
"responded_at": timezone.utcnow(),
"responded_by_user": {"id": "test", "name": "test"},
},
)
class TestHITLEntryOperator:
def test_init_without_options_and_default(self) -> None:
op = HITLEntryOperator(
task_id="hitl_test",
subject="This is subject",
body="This is body",
params={"input": 1},
)
assert op.options == ["OK"]
assert op.defaults == ["OK"]
def test_init_without_options(self) -> None:
op = HITLEntryOperator(
task_id="hitl_test",
subject="This is subject",
body="This is body",
params={"input": 1},
defaults=None,
)
assert op.options == ["OK"]
assert op.defaults is None
def test_init_without_default(self) -> None:
op = HITLEntryOperator(
task_id="hitl_test",
subject="This is subject",
body="This is body",
params={"input": 1},
options=["OK", "NOT OK"],
)
assert op.options == ["OK", "NOT OK"]
assert op.defaults is None
class TestHITLBranchOperator:
def test_execute_complete(self, dag_maker: DagMaker, get_context_from_model_ti: Any) -> None:
with dag_maker("hitl_test_dag", serialized=True):
branch_op = HITLBranchOperator(
task_id="make_choice",
subject="This is subject",
options=[f"branch_{i}" for i in range(1, 6)],
)
branch_op >> [EmptyOperator(task_id=f"branch_{i}") for i in range(1, 6)]
dr = dag_maker.create_dagrun()
ti = dr.get_task_instance("make_choice")
with pytest.raises(DownstreamTasksSkipped) as exc_info:
branch_op.execute_complete(
context=get_context_from_model_ti(ti, branch_op),
event={
"chosen_options": ["branch_1"],
"params_input": {},
"responded_at": timezone.utcnow(),
"responded_by_user": {"id": "test", "name": "test"},
},
)
assert set(exc_info.value.tasks) == set((f"branch_{i}", -1) for i in range(2, 6))
def test_execute_complete_with_multiple_branches(
self, dag_maker: DagMaker, get_context_from_model_ti: Any
) -> None:
with dag_maker("hitl_test_dag", serialized=True):
branch_op = HITLBranchOperator(
task_id="make_choice",
subject="This is subject",
multiple=True,
options=[f"branch_{i}" for i in range(1, 6)],
)
branch_op >> [EmptyOperator(task_id=f"branch_{i}") for i in range(1, 6)]
responded_at_dt = timezone.utcnow()
dr = dag_maker.create_dagrun()
ti = dr.get_task_instance("make_choice")
with pytest.raises(DownstreamTasksSkipped) as exc_info:
branch_op.execute_complete(
context=get_context_from_model_ti(ti, branch_op),
event={
"chosen_options": [f"branch_{i}" for i in range(1, 4)],
"params_input": {},
"responded_at": responded_at_dt,
"responded_by_user": {"id": "test", "name": "test"},
},
)
assert set(exc_info.value.tasks) == set((f"branch_{i}", -1) for i in range(4, 6))
def test_mapping_applies_for_single_choice(
self, dag_maker: DagMaker, get_context_from_model_ti: Any
) -> None:
# ["Approve"]; map -> "publish"
with dag_maker("hitl_map_dag", serialized=True):
op = HITLBranchOperator(
task_id="choose",
subject="S",
options=["Approve", "Reject"],
options_mapping={"Approve": "publish"},
)
op >> [EmptyOperator(task_id="publish"), EmptyOperator(task_id="archive")]
dr = dag_maker.create_dagrun()
ti = dr.get_task_instance("choose")
with pytest.raises(DownstreamTasksSkipped) as exc:
op.execute_complete(
context=get_context_from_model_ti(ti, op),
event={
"chosen_options": ["Approve"],
"params_input": {},
"responded_at": timezone.utcnow(),
"responded_by_user": {"id": "test", "name": "test"},
},
)
# checks to see that the "archive" task was skipped
assert set(exc.value.tasks) == {("archive", -1)}
def test_mapping_with_multiple_choices(self, dag_maker: DagMaker, get_context_from_model_ti: Any) -> None:
# multiple=True; mapping applied per option; no dedup implied
with dag_maker("hitl_map_dag", serialized=True):
op = HITLBranchOperator(
task_id="choose",
subject="S",
multiple=True,
options=["Approve", "KeepAsIs"],
options_mapping={"Approve": "publish", "KeepAsIs": "keep"},
)
op >> [
EmptyOperator(task_id="publish"),
EmptyOperator(task_id="keep"),
EmptyOperator(task_id="other"),
]
dr = dag_maker.create_dagrun()
ti = dr.get_task_instance("choose")
with pytest.raises(DownstreamTasksSkipped) as exc:
op.execute_complete(
context=get_context_from_model_ti(ti, op),
event={
"chosen_options": ["Approve", "KeepAsIs"],
"params_input": {},
"responded_at": timezone.utcnow(),
"responded_by_user": {"id": "test", "name": "test"},
},
)
# publish + keep chosen → only "other" skipped
assert set(exc.value.tasks) == {("other", -1)}
def test_fallback_to_option_when_not_mapped(
self, dag_maker: DagMaker, get_context_from_model_ti: Any
) -> None:
# No mapping: option must match downstream task_id
with dag_maker("hitl_map_dag", serialized=True):
op = HITLBranchOperator(
task_id="choose",
subject="S",
options=["branch_1", "branch_2"], # no mapping for branch_2
)
op >> [EmptyOperator(task_id="branch_1"), EmptyOperator(task_id="branch_2")]
dr = dag_maker.create_dagrun()
ti = dr.get_task_instance("choose")
with pytest.raises(DownstreamTasksSkipped) as exc:
op.execute_complete(
context=get_context_from_model_ti(ti, op),
event={
"chosen_options": ["branch_2"],
"params_input": {},
"responded_at": timezone.utcnow(),
"responded_by_user": {"id": "test", "name": "test"},
},
)
assert set(exc.value.tasks) == {("branch_1", -1)}
def test_error_if_mapped_branch_not_direct_downstream(
self, dag_maker: DagMaker, get_context_from_model_ti: Any
) -> None:
# Don't add the mapped task downstream → expect a clean error
with dag_maker("hitl_map_dag", serialized=True):
op = HITLBranchOperator(
task_id="choose",
subject="S",
options=["Approve"],
options_mapping={"Approve": "not_a_downstream"},
)
# Intentionally no downstream "not_a_downstream"
dr = dag_maker.create_dagrun()
ti = dr.get_task_instance("choose")
with pytest.raises(AirflowException, match="downstream|not found"):
op.execute_complete(
context=get_context_from_model_ti(ti, op),
event={
"chosen_options": ["Approve"],
"params_input": {},
"responded_at": timezone.utcnow(),
"responded_by_user": {"id": "test", "name": "test"},
},
)
@pytest.mark.parametrize("bad", [123, ["publish"], {"x": "y"}, b"publish"])
def test_options_mapping_non_string_value_raises(self, bad: Any) -> None:
with pytest.raises(ValueError, match=r"values must be strings \(task_ids\)"):
HITLBranchOperator(
task_id="choose",
subject="S",
options=["Approve"],
options_mapping={"Approve": bad},
)
def test_options_mapping_key_not_in_options_raises(self) -> None:
with pytest.raises(ValueError, match="contains keys that are not in `options`"):
HITLBranchOperator(
task_id="choose",
subject="S",
options=["Approve", "Reject"],
options_mapping={"NotAnOption": "publish"},
)
class TestHITLSummaryForListeners:
"""Verify hitl_summary dict at all lifecycle stages: __init__, execute, execute_complete."""
def test_hitl_operator_init_all_fields(self) -> None:
"""hitl_summary is exactly the expected dict after __init__ with all fields set."""
op = HITLOperator(
task_id="test",
subject="Please review",
body="Details here",
options=["Yes", "No"],
defaults=["Yes"],
multiple=False,
assigned_users=HITLUser(id="u1", name="Alice"),
params={"env": Param("prod", type="string", description="Target env")},
)
assert op.hitl_summary == {
"subject": "Please review",
"body": "Details here",
"options": ["Yes", "No"],
"defaults": ["Yes"],
"multiple": False,
"assigned_users": [{"id": "u1", "name": "Alice"}],
"serialized_params": op.serialized_params,
}
def test_hitl_operator_init_minimal(self) -> None:
"""hitl_summary with only required fields; optional ones default to None."""
op = HITLOperator(
task_id="test",
subject="Review",
options=["A", "B"],
)
assert op.hitl_summary == {
"subject": "Review",
"body": None,
"options": ["A", "B"],
"defaults": None,
"multiple": False,
"assigned_users": None,
"serialized_params": None,
}
def test_approval_operator_init_summary(self) -> None:
"""ApprovalOperator hitl_summary includes base + approval-specific fields."""
op = ApprovalOperator(
task_id="test",
subject="Deploy?",
ignore_downstream_trigger_rules=True,
fail_on_reject=True,
)
assert op.hitl_summary == {
"subject": "Deploy?",
"body": None,
"options": ["Approve", "Reject"],
"defaults": None,
"multiple": False,
"assigned_users": None,
"serialized_params": None,
"ignore_downstream_trigger_rules": True,
"fail_on_reject": True,
}
def test_approval_operator_init_defaults(self) -> None:
"""ApprovalOperator with default settings."""
op = ApprovalOperator(task_id="test", subject="Deploy?")
assert op.hitl_summary == {
"subject": "Deploy?",
"body": None,
"options": ["Approve", "Reject"],
"defaults": None,
"multiple": False,
"assigned_users": None,
"serialized_params": None,
"ignore_downstream_trigger_rules": False,
"fail_on_reject": False,
}
def test_hitl_branch_operator_init_with_mapping(self) -> None:
"""HITLBranchOperator hitl_summary includes base + options_mapping."""
op = HITLBranchOperator(
task_id="test",
subject="Choose",
options=["A", "B"],
options_mapping={"A": "task_a", "B": "task_b"},
)
assert op.hitl_summary == {
"subject": "Choose",
"body": None,
"options": ["A", "B"],
"defaults": None,
"multiple": False,
"assigned_users": None,
"serialized_params": None,
"options_mapping": {"A": "task_a", "B": "task_b"},
}
def test_hitl_branch_operator_init_without_mapping(self) -> None:
"""HITLBranchOperator stores empty dict for options_mapping when not provided."""
op = HITLBranchOperator(
task_id="test",
subject="Choose",
options=["A", "B"],
)
assert op.hitl_summary == {
"subject": "Choose",
"body": None,
"options": ["A", "B"],
"defaults": None,
"multiple": False,
"assigned_users": None,
"serialized_params": None,
"options_mapping": {},
}
def test_hitl_entry_operator_init_summary(self) -> None:
"""HITLEntryOperator hitl_summary includes base fields with OK defaults."""
op = HITLEntryOperator(
task_id="test",
subject="Enter data",
params={"name": Param("", type="string")},
)
assert op.hitl_summary == {
"subject": "Enter data",
"body": None,
"options": ["OK"],
"defaults": ["OK"],
"multiple": False,
"assigned_users": None,
"serialized_params": op.serialized_params,
}
def test_hitl_entry_operator_init_custom_options(self) -> None:
"""HITLEntryOperator with explicit options and no defaults."""
op = HITLEntryOperator(
task_id="test",
subject="Confirm",
options=["OK", "Cancel"],
)
assert op.hitl_summary == {
"subject": "Confirm",
"body": None,
"options": ["OK", "Cancel"],
"defaults": None,
"multiple": False,
"assigned_users": None,
"serialized_params": None,
}
def test_execute_enriches_summary_with_timeout(self) -> None:
"""execute() adds timeout_datetime; all other init keys remain."""
op = HITLOperator(
task_id="test",
subject="Review",
options=["OK"],
execution_timeout=datetime.timedelta(minutes=10),
)
with (
patch("airflow.providers.standard.operators.hitl.upsert_hitl_detail"),
patch.object(op, "defer"),
):
op.execute({"task_instance": MagicMock(id=uuid4())}) # type: ignore[arg-type]
s = op.hitl_summary
# Validate the timeout value is a parseable ISO string
timeout_dt = datetime.datetime.fromisoformat(s["timeout_datetime"])
assert timeout_dt is not None
assert s == {
"subject": "Review",
"body": None,
"options": ["OK"],
"defaults": None,
"multiple": False,
"assigned_users": None,
"serialized_params": None,
"timeout_datetime": s["timeout_datetime"],
}
def test_execute_without_timeout(self) -> None:
"""execute() sets timeout_datetime to None when no execution_timeout."""
op = HITLOperator(
task_id="test",
subject="Review",
options=["OK"],
)
with (
patch("airflow.providers.standard.operators.hitl.upsert_hitl_detail"),
patch.object(op, "defer"),
):
op.execute({"task_instance": MagicMock(id=uuid4())}) # type: ignore[arg-type]
assert op.hitl_summary == {
"subject": "Review",
"body": None,
"options": ["OK"],
"defaults": None,
"multiple": False,
"assigned_users": None,
"serialized_params": None,
"timeout_datetime": None,
}
def test_hitl_operator_execute_complete_enriches_summary(self) -> None:
"""execute_complete() adds response fields directly into hitl_summary."""
op = HITLOperator(
task_id="test",
subject="Review",
options=["1", "2"],
params={"input": 1},
)
responded_at = timezone.utcnow()
op.execute_complete(
context={},
event={
"chosen_options": ["1"],
"params_input": {"input": 1},
"responded_at": responded_at,
"responded_by_user": {"id": "u1", "name": "Alice"},
},
)
assert op.hitl_summary == {
"subject": "Review",
"body": None,
"options": ["1", "2"],
"defaults": None,
"multiple": False,
"assigned_users": None,
"serialized_params": op.serialized_params,
"chosen_options": ["1"],
"params_input": {"input": 1},
"responded_at": responded_at.isoformat(),
"responded_by_user": {"id": "u1", "name": "Alice"},
}
def test_hitl_operator_execute_complete_error_stores_error_type(self) -> None:
"""execute_complete() stores error_type in hitl_summary on error events."""
op = HITLOperator(
task_id="test",
subject="Review",
options=["OK"],
)
with pytest.raises(HITLTimeoutError):
op.execute_complete(
context={},
event={"error": "timed out", "error_type": "timeout"},
)
assert op.hitl_summary == {
"subject": "Review",
"body": None,
"options": ["OK"],
"defaults": None,
"multiple": False,
"assigned_users": None,
"serialized_params": None,
"error_type": "timeout",
}
def test_approval_operator_execute_complete_approved(self) -> None:
"""Approving sets approved=True in hitl_summary."""
op = ApprovalOperator(task_id="test", subject="Deploy?")
responded_at = timezone.utcnow()
op.execute_complete(
context={},
event={
"chosen_options": ["Approve"],
"params_input": {},
"responded_at": responded_at,
"responded_by_user": {"id": "u1", "name": "Alice"},
},
)
assert op.hitl_summary == {
"subject": "Deploy?",
"body": None,
"options": ["Approve", "Reject"],
"defaults": None,
"multiple": False,
"assigned_users": None,
"serialized_params": None,
"ignore_downstream_trigger_rules": False,
"fail_on_reject": False,
"chosen_options": ["Approve"],
"params_input": {},
"responded_at": responded_at.isoformat(),
"responded_by_user": {"id": "u1", "name": "Alice"},
"approved": True,
}
def test_approval_operator_execute_complete_rejected(self) -> None:
"""Rejecting sets approved=False in hitl_summary."""
op = ApprovalOperator(task_id="test", subject="Deploy?")
responded_at = timezone.utcnow()
op.execute_complete(
context={},
event={
"chosen_options": ["Reject"],
"params_input": {},
"responded_at": responded_at,
"responded_by_user": {"id": "u1", "name": "Alice"},
},
)
assert op.hitl_summary == {
"subject": "Deploy?",
"body": None,
"options": ["Approve", "Reject"],
"defaults": None,
"multiple": False,
"assigned_users": None,
"serialized_params": None,
"ignore_downstream_trigger_rules": False,
"fail_on_reject": False,
"chosen_options": ["Reject"],
"params_input": {},
"responded_at": responded_at.isoformat(),
"responded_by_user": {"id": "u1", "name": "Alice"},
"approved": False,
}
def test_hitl_branch_operator_execute_complete_records_branches(
self, dag_maker: DagMaker, get_context_from_model_ti: Any
) -> None:
"""HITLBranchOperator stores branches_to_execute in hitl_summary."""
with dag_maker("hitl_summary_dag", serialized=True):
op = HITLBranchOperator(
task_id="choose",
subject="Choose",
options=["A", "B"],
options_mapping={"A": "task_a", "B": "task_b"},
)
op >> [EmptyOperator(task_id="task_a"), EmptyOperator(task_id="task_b")]
dr = dag_maker.create_dagrun()
ti = dr.get_task_instance("choose")
responded_at = timezone.utcnow()
with pytest.raises(DownstreamTasksSkipped):
op.execute_complete(
context=get_context_from_model_ti(ti, op),
event={
"chosen_options": ["A"],
"params_input": {},
"responded_at": responded_at,
"responded_by_user": {"id": "u1", "name": "Alice"},
},
)
assert op.hitl_summary == {
"subject": "Choose",
"body": None,
"options": ["A", "B"],
"defaults": None,
"multiple": False,
"assigned_users": None,
"serialized_params": None,
"options_mapping": {"A": "task_a", "B": "task_b"},
"chosen_options": ["A"],
"params_input": {},
"responded_at": responded_at.isoformat(),
"responded_by_user": {"id": "u1", "name": "Alice"},
"branches_to_execute": ["task_a"],
}
def test_full_lifecycle_approval(self) -> None:
"""Verify exact hitl_summary at each stage: __init__ -> execute -> execute_complete."""
op = ApprovalOperator(
task_id="test",
subject="Release v2.0?",
body="Please approve the production deployment.",
execution_timeout=datetime.timedelta(minutes=30),
)
# -- After __init__: only base + approval keys --
assert op.hitl_summary == {
"subject": "Release v2.0?",
"body": "Please approve the production deployment.",
"options": ["Approve", "Reject"],
"defaults": None,
"multiple": False,
"assigned_users": None,
"serialized_params": None,
"ignore_downstream_trigger_rules": False,
"fail_on_reject": False,
}
# -- After execute (mocked defer): timeout_datetime added --
with (
patch("airflow.providers.standard.operators.hitl.upsert_hitl_detail"),
patch.object(op, "defer"),
):
op.execute({"task_instance": MagicMock(id=uuid4())}) # type: ignore[arg-type]
s = op.hitl_summary
timeout_dt_str = s["timeout_datetime"]
assert timeout_dt_str is not None
datetime.datetime.fromisoformat(timeout_dt_str)
assert s == {
"subject": "Release v2.0?",
"body": "Please approve the production deployment.",
"options": ["Approve", "Reject"],
"defaults": None,
"multiple": False,
"assigned_users": None,
"serialized_params": None,
"ignore_downstream_trigger_rules": False,
"fail_on_reject": False,
"timeout_datetime": timeout_dt_str,
}
# -- After execute_complete: response + approved fields added --
responded_at = timezone.utcnow()
op.execute_complete(
context={},
event={
"chosen_options": ["Approve"],
"params_input": {},
"responded_at": responded_at,
"responded_by_user": {"id": "admin", "name": "Admin"},
},
)
assert s == {
"subject": "Release v2.0?",
"body": "Please approve the production deployment.",
"options": ["Approve", "Reject"],
"defaults": None,
"multiple": False,
"assigned_users": None,
"serialized_params": None,
"ignore_downstream_trigger_rules": False,
"fail_on_reject": False,
"timeout_datetime": timeout_dt_str,
"chosen_options": ["Approve"],
"params_input": {},
"responded_at": responded_at.isoformat(),
"responded_by_user": {"id": "admin", "name": "Admin"},
"approved": True,
}
| {
"repo_id": "apache/airflow",
"file_path": "providers/standard/tests/unit/standard/operators/test_hitl.py",
"license": "Apache License 2.0",
"lines": 1194,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/standard/tests/unit/standard/triggers/test_hitl.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import Any
import pytest
from tests_common.test_utils.version_compat import AIRFLOW_V_3_1_PLUS, AIRFLOW_V_3_2_PLUS
if not AIRFLOW_V_3_1_PLUS:
pytest.skip("Human in the loop public API compatible with Airflow >= 3.1.0", allow_module_level=True)
import asyncio
from datetime import datetime, timedelta
from unittest import mock
from uuid6 import uuid7
from airflow._shared.timezones.timezone import utc, utcnow
from airflow.api_fastapi.execution_api.datamodels.hitl import HITLDetailResponse, HITLUser
from airflow.providers.standard.triggers.hitl import (
HITLTrigger,
HITLTriggerEventFailurePayload,
HITLTriggerEventSuccessPayload,
)
from airflow.triggers.base import TriggerEvent
TI_ID = uuid7()
@pytest.fixture
def default_trigger_args() -> dict[str, Any]:
return {
"ti_id": TI_ID,
"options": ["1", "2", "3", "4", "5"],
"params": {
"input": {
"value": 1,
"schema": {},
"description": None,
"source": "task",
},
},
"multiple": False,
}
class TestHITLTrigger:
def test_serialization(self, default_trigger_args):
trigger = HITLTrigger(
defaults=["1"],
timeout_datetime=None,
poke_interval=50.0,
**default_trigger_args,
)
classpath, kwargs = trigger.serialize()
expected_params_in_trigger_kwargs: dict[str, dict[str, Any]]
if AIRFLOW_V_3_2_PLUS:
expected_params_in_trigger_kwargs = {
"input": {"value": 1, "description": None, "schema": {}, "source": "task"}
}
else:
expected_params_in_trigger_kwargs = {"input": {"value": 1, "description": None, "schema": {}}}
assert classpath == "airflow.providers.standard.triggers.hitl.HITLTrigger"
assert kwargs == {
"ti_id": TI_ID,
"options": ["1", "2", "3", "4", "5"],
"params": expected_params_in_trigger_kwargs,
"defaults": ["1"],
"multiple": False,
"timeout_datetime": None,
"poke_interval": 50.0,
}
@pytest.mark.db_test
@pytest.mark.asyncio
@mock.patch("airflow.sdk.execution_time.hitl.update_hitl_detail_response")
async def test_run_failed_due_to_timeout(self, mock_update, mock_supervisor_comms, default_trigger_args):
trigger = HITLTrigger(
timeout_datetime=utcnow() + timedelta(seconds=0.1),
poke_interval=5,
**default_trigger_args,
)
mock_supervisor_comms.send.return_value = HITLDetailResponse(
response_received=False,
responded_by_user=None,
responded_at=None,
chosen_options=None,
params_input={},
)
gen = trigger.run()
await asyncio.sleep(0.3)
trigger_task = asyncio.create_task(gen.__anext__())
event = await trigger_task
assert event == TriggerEvent(
HITLTriggerEventFailurePayload(
error="The timeout has passed, and the response has not yet been received.",
error_type="timeout",
)
)
@pytest.mark.db_test
@pytest.mark.asyncio
@mock.patch.object(HITLTrigger, "log")
@mock.patch("airflow.sdk.execution_time.hitl.update_hitl_detail_response")
async def test_run_fallback_to_default_due_to_timeout(
self, mock_update, mock_log, mock_supervisor_comms, default_trigger_args
):
trigger = HITLTrigger(
defaults=["1"],
timeout_datetime=utcnow() + timedelta(seconds=0.1),
poke_interval=5,
**default_trigger_args,
)
mock_supervisor_comms.send.return_value = HITLDetailResponse(
response_received=False,
responded_by_user=None,
responded_at=None,
chosen_options=None,
params_input={},
)
gen = trigger.run()
await asyncio.sleep(0.3)
trigger_task = asyncio.create_task(gen.__anext__())
event = await trigger_task
assert event == TriggerEvent(
HITLTriggerEventSuccessPayload(
chosen_options=["1"],
params_input={"input": 1},
responded_by_user=None,
responded_at=mock.ANY,
timedout=True,
)
)
assert mock_log.info.call_args == mock.call(
"[HITL] timeout reached before receiving response, fallback to default %s", ["1"]
)
@pytest.mark.db_test
@pytest.mark.asyncio
@mock.patch.object(HITLTrigger, "log")
@mock.patch("airflow.sdk.execution_time.hitl.update_hitl_detail_response")
async def test_run_should_check_response_in_timeout_handler(
self, mock_update, mock_log, mock_supervisor_comms, default_trigger_args
):
# action time only slightly before timeout
action_datetime = utcnow() + timedelta(seconds=0.1)
timeout_datetime = utcnow() + timedelta(seconds=0.1)
trigger = HITLTrigger(
defaults=["1"],
timeout_datetime=timeout_datetime,
poke_interval=5,
**default_trigger_args,
)
mock_supervisor_comms.send.return_value = HITLDetailResponse(
response_received=True,
responded_by_user=HITLUser(id="1", name="test"),
responded_at=action_datetime,
chosen_options=["2"],
params_input={},
)
gen = trigger.run()
await asyncio.sleep(0.3)
trigger_task = asyncio.create_task(gen.__anext__())
event = await trigger_task
assert event == TriggerEvent(
HITLTriggerEventSuccessPayload(
chosen_options=["2"],
params_input={},
responded_at=mock.ANY,
responded_by_user={"id": "1", "name": "test"},
timedout=False,
)
)
assert mock_log.info.call_args == mock.call(
"[HITL] responded_by=%s (id=%s) options=%s at %s (timeout fallback skipped)",
"test",
"1",
["2"],
action_datetime,
)
@pytest.mark.db_test
@pytest.mark.asyncio
@mock.patch.object(HITLTrigger, "log")
@mock.patch("airflow.sdk.execution_time.hitl.update_hitl_detail_response")
async def test_run(
self, mock_update, mock_log, mock_supervisor_comms, time_machine, default_trigger_args
):
time_machine.move_to(datetime(2025, 7, 29, 2, 0, 0))
trigger = HITLTrigger(
defaults=["1"],
timeout_datetime=None,
poke_interval=5,
**default_trigger_args,
)
mock_supervisor_comms.send.return_value = HITLDetailResponse(
response_received=True,
responded_by_user=HITLUser(id="test", name="test"),
responded_at=utcnow(),
chosen_options=["3"],
params_input={"input": 50},
)
gen = trigger.run()
await asyncio.sleep(0.3)
trigger_task = asyncio.create_task(gen.__anext__())
event = await trigger_task
assert event == TriggerEvent(
HITLTriggerEventSuccessPayload(
chosen_options=["3"],
params_input={"input": 50},
responded_at=mock.ANY,
responded_by_user={"id": "test", "name": "test"},
timedout=False,
)
)
assert mock_log.info.call_args == mock.call(
"[HITL] responded_by=%s (id=%s) options=%s at %s",
"test",
"test",
["3"],
datetime(2025, 7, 29, 2, 0, 0, tzinfo=utc),
)
| {
"repo_id": "apache/airflow",
"file_path": "providers/standard/tests/unit/standard/triggers/test_hitl.py",
"license": "Apache License 2.0",
"lines": 222,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:task-sdk/src/airflow/sdk/execution_time/hitl.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING, Any, TypedDict
from uuid import UUID
from airflow.sdk.api.datamodels._generated import HITLUser as APIHITLUser
from airflow.sdk.execution_time.comms import (
CreateHITLDetailPayload,
GetHITLDetailResponse,
UpdateHITLDetail,
)
if TYPE_CHECKING:
from airflow.sdk.api.datamodels._generated import HITLDetailResponse
class HITLUser(TypedDict):
id: str
name: str
def upsert_hitl_detail(
ti_id: UUID,
options: list[str],
subject: str,
body: str | None = None,
defaults: list[str] | None = None,
multiple: bool = False,
params: dict[str, Any] | None = None,
assigned_users: list[HITLUser] | None = None,
) -> None:
from airflow.sdk.execution_time.task_runner import SUPERVISOR_COMMS
SUPERVISOR_COMMS.send(
msg=CreateHITLDetailPayload(
ti_id=ti_id,
options=options,
subject=subject,
body=body,
defaults=defaults,
params=params,
multiple=multiple,
assigned_users=(
[APIHITLUser(id=user["id"], name=user["name"]) for user in assigned_users]
if assigned_users
else []
),
)
)
def update_hitl_detail_response(
ti_id: UUID,
chosen_options: list[str],
params_input: dict[str, Any],
) -> HITLDetailResponse:
from airflow.sdk.execution_time.task_runner import SUPERVISOR_COMMS
response = SUPERVISOR_COMMS.send(
msg=UpdateHITLDetail(
ti_id=ti_id,
chosen_options=chosen_options,
params_input=params_input,
),
)
if TYPE_CHECKING:
assert isinstance(response, HITLDetailResponse)
return response
def get_hitl_detail_content_detail(ti_id: UUID) -> HITLDetailResponse:
from airflow.sdk.execution_time.task_runner import SUPERVISOR_COMMS
response = SUPERVISOR_COMMS.send(msg=GetHITLDetailResponse(ti_id=ti_id))
if TYPE_CHECKING:
assert isinstance(response, HITLDetailResponse)
return response
| {
"repo_id": "apache/airflow",
"file_path": "task-sdk/src/airflow/sdk/execution_time/hitl.py",
"license": "Apache License 2.0",
"lines": 79,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:task-sdk/tests/task_sdk/execution_time/test_hitl.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from uuid6 import uuid7
from airflow.sdk import timezone
from airflow.sdk.api.datamodels._generated import HITLDetailResponse, HITLUser as APIHITLUser
from airflow.sdk.execution_time.comms import CreateHITLDetailPayload
from airflow.sdk.execution_time.hitl import (
HITLUser,
get_hitl_detail_content_detail,
update_hitl_detail_response,
upsert_hitl_detail,
)
TI_ID = uuid7()
def test_upsert_hitl_detail(mock_supervisor_comms) -> None:
upsert_hitl_detail(
ti_id=TI_ID,
options=["Approve", "Reject"],
subject="Subject",
body="Optional body",
defaults=["Approve", "Reject"],
params={"input_1": {"value": 1, "description": None, "schema": {}}},
assigned_users=[HITLUser(id="test", name="test")],
multiple=False,
)
mock_supervisor_comms.send.assert_called_with(
msg=CreateHITLDetailPayload(
ti_id=TI_ID,
options=["Approve", "Reject"],
subject="Subject",
body="Optional body",
defaults=["Approve", "Reject"],
params={"input_1": {"value": 1, "description": None, "schema": {}}},
assigned_users=[APIHITLUser(id="test", name="test")],
multiple=False,
)
)
def test_update_hitl_detail_response(mock_supervisor_comms) -> None:
timestamp = timezone.utcnow()
mock_supervisor_comms.send.return_value = HITLDetailResponse(
response_received=True,
chosen_options=["Approve"],
responded_at=timestamp,
responded_by_user=APIHITLUser(id="admin", name="admin"),
params_input={"input_1": 1},
)
resp = update_hitl_detail_response(
ti_id=TI_ID,
chosen_options=["Approve"],
params_input={"input_1": 1},
)
assert resp == HITLDetailResponse(
response_received=True,
chosen_options=["Approve"],
responded_at=timestamp,
responded_by_user=APIHITLUser(id="admin", name="admin"),
params_input={"input_1": 1},
)
def test_get_hitl_detail_content_detail(mock_supervisor_comms) -> None:
mock_supervisor_comms.send.return_value = HITLDetailResponse(
response_received=False,
chosen_options=None,
responded_at=None,
responded_by_user=None,
params_input={},
)
resp = get_hitl_detail_content_detail(TI_ID)
assert resp == HITLDetailResponse(
response_received=False,
chosen_options=None,
responded_at=None,
responded_by_user=None,
params_input={},
)
| {
"repo_id": "apache/airflow",
"file_path": "task-sdk/tests/task_sdk/execution_time/test_hitl.py",
"license": "Apache License 2.0",
"lines": 89,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-core/src/airflow/utils/log/log_stream_accumulator.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import os
import tempfile
from itertools import islice
from typing import IO, TYPE_CHECKING
if TYPE_CHECKING:
from airflow.typing_compat import Self
from airflow.utils.log.file_task_handler import (
LogHandlerOutputStream,
StructuredLogMessage,
StructuredLogStream,
)
class LogStreamAccumulator:
"""
Memory-efficient log stream accumulator that tracks the total number of lines while preserving the original stream.
This class captures logs from a stream and stores them in a buffer, flushing them to disk when the buffer
exceeds a specified threshold. This approach optimizes memory usage while handling large log streams.
Usage:
.. code-block:: python
with LogStreamAccumulator(stream, threshold) as log_accumulator:
# Get total number of lines captured
total_lines = log_accumulator.get_total_lines()
# Retrieve the original stream of logs
for log in log_accumulator.get_stream():
print(log)
"""
def __init__(
self,
stream: LogHandlerOutputStream,
threshold: int,
) -> None:
"""
Initialize the LogStreamAccumulator.
Args:
stream: The input log stream to capture and count.
threshold: Maximum number of lines to keep in memory before flushing to disk.
"""
self._stream = stream
self._threshold = threshold
self._buffer: list[StructuredLogMessage] = []
self._disk_lines: int = 0
self._tmpfile: IO[str] | None = None
def _flush_buffer_to_disk(self) -> None:
"""Flush the buffer contents to a temporary file on disk."""
if self._tmpfile is None:
self._tmpfile = tempfile.NamedTemporaryFile(delete=False, mode="w+", encoding="utf-8")
self._disk_lines += len(self._buffer)
self._tmpfile.writelines(f"{log.model_dump_json()}\n" for log in self._buffer)
self._tmpfile.flush()
self._buffer.clear()
def _capture(self) -> None:
"""Capture logs from the stream into the buffer, flushing to disk when threshold is reached."""
while True:
# `islice` will try to get up to `self._threshold` lines from the stream.
self._buffer.extend(islice(self._stream, self._threshold))
# If there are no more lines to capture, exit the loop.
if len(self._buffer) < self._threshold:
break
self._flush_buffer_to_disk()
def _cleanup(self) -> None:
"""Clean up the temporary file if it exists."""
self._buffer.clear()
if self._tmpfile:
self._tmpfile.close()
os.remove(self._tmpfile.name)
self._tmpfile = None
@property
def total_lines(self) -> int:
"""
Return the total number of lines captured from the stream.
Returns:
The sum of lines stored in the buffer and lines written to disk.
"""
return self._disk_lines + len(self._buffer)
@property
def stream(self) -> StructuredLogStream:
"""
Return the original stream of logs and clean up resources.
Important: This method automatically cleans up resources after all logs have been yielded.
Make sure to fully consume the returned generator to ensure proper cleanup.
Returns:
A stream of the captured log messages.
"""
try:
if not self._tmpfile:
# if no temporary file was created, return from the buffer
yield from self._buffer
else:
# avoid circular import
from airflow.utils.log.file_task_handler import StructuredLogMessage
with open(self._tmpfile.name, encoding="utf-8") as f:
yield from (StructuredLogMessage.model_validate_json(line.strip()) for line in f)
# yield the remaining buffer
yield from self._buffer
finally:
# Ensure cleanup after yielding
self._cleanup()
def __enter__(self) -> Self:
"""
Context manager entry point that initiates log capture.
Returns:
Self instance for use in context manager.
"""
self._capture()
return self
def __exit__(self, exc_type, exc_val, exc_tb) -> None:
"""
Context manager exit that doesn't perform resource cleanup.
Note: Resources are not cleaned up here. Cleanup is deferred until
get_stream() is called and fully consumed, ensuring all logs are properly
yielded before cleanup occurs.
"""
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/utils/log/log_stream_accumulator.py",
"license": "Apache License 2.0",
"lines": 129,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:airflow-core/tests/unit/utils/log/test_stream_accumulator.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import os
from typing import TYPE_CHECKING
from unittest import mock
import pendulum
import pytest
from airflow.utils.log.file_task_handler import StructuredLogMessage
from airflow.utils.log.log_stream_accumulator import LogStreamAccumulator
if TYPE_CHECKING:
from airflow.utils.log.file_task_handler import LogHandlerOutputStream
LOG_START_DATETIME = pendulum.datetime(2023, 10, 1, 0, 0, 0)
LOG_COUNT = 20
class TestLogStreamAccumulator:
"""Test cases for the LogStreamAccumulator class."""
@pytest.fixture
def structured_logs(self):
"""Create a stream of mock structured log messages."""
def generate_logs():
yield from (
StructuredLogMessage(
event=f"test_event_{i + 1}",
timestamp=LOG_START_DATETIME.add(seconds=i),
level="INFO",
message=f"Test log message {i + 1}",
)
for i in range(LOG_COUNT)
)
return generate_logs()
def validate_log_stream(self, log_stream: LogHandlerOutputStream):
"""Validate the log stream by checking the number of lines."""
count = 0
for i, log in enumerate(log_stream):
assert log.event == f"test_event_{i + 1}"
assert log.timestamp == LOG_START_DATETIME.add(seconds=i)
count += 1
assert count == 20
def test__capture(self, structured_logs):
"""Test that temporary file is properly cleaned up during get_stream, not when exiting context."""
accumulator = LogStreamAccumulator(structured_logs, 5)
with (
mock.patch.object(accumulator, "_capture") as mock_setup,
):
with accumulator:
mock_setup.assert_called_once()
def test__flush_buffer_to_disk(self, structured_logs):
"""Test flush-to-disk behavior with a small threshold."""
threshold = 6
# Mock the temporary file to verify it's being written to
with (
mock.patch("tempfile.NamedTemporaryFile") as mock_tmpfile,
):
mock_file = mock.MagicMock()
mock_tmpfile.return_value = mock_file
with LogStreamAccumulator(structured_logs, threshold) as accumulator:
mock_tmpfile.assert_called_once_with(
delete=False,
mode="w+",
encoding="utf-8",
)
# Verify _flush_buffer_to_disk was called multiple times
# (20 logs / 6 threshold = 3 flushes + 2 remaining logs in buffer)
assert accumulator._disk_lines == 18
assert mock_file.writelines.call_count == 3
assert len(accumulator._buffer) == 2
@pytest.mark.parametrize(
"threshold",
[
pytest.param(30, id="buffer_only"),
pytest.param(5, id="flush_to_disk"),
],
)
def test_get_stream(self, structured_logs, threshold):
"""Test that stream property returns all logs regardless of whether they were flushed to disk."""
tmpfile_name = None
with LogStreamAccumulator(structured_logs, threshold) as accumulator:
out_stream = accumulator.stream
# Check if the temporary file was created
if threshold < LOG_COUNT:
tmpfile_name = accumulator._tmpfile.name
assert os.path.exists(tmpfile_name)
else:
assert accumulator._tmpfile is None
# Validate the log stream
self.validate_log_stream(out_stream)
# Verify temp file was created and cleaned up
if threshold < LOG_COUNT:
assert accumulator._tmpfile is None
assert not os.path.exists(tmpfile_name) if tmpfile_name else True
@pytest.mark.parametrize(
("threshold", "expected_buffer_size", "expected_disk_lines"),
[
pytest.param(30, 20, 0, id="no_flush_needed"),
pytest.param(10, 0, 20, id="single_flush_needed"),
pytest.param(3, 2, 18, id="multiple_flushes_needed"),
],
)
def test_total_lines(self, structured_logs, threshold, expected_buffer_size, expected_disk_lines):
"""Test that LogStreamAccumulator correctly counts lines across buffer and disk."""
with LogStreamAccumulator(structured_logs, threshold) as accumulator:
# Check buffer and disk line counts
assert len(accumulator._buffer) == expected_buffer_size
assert accumulator._disk_lines == expected_disk_lines
# Validate the log stream and line counts
self.validate_log_stream(accumulator.stream)
def test__cleanup(self, structured_logs):
"""Test that cleanup happens when stream property is fully consumed, not on context exit."""
accumulator = LogStreamAccumulator(structured_logs, 5)
with mock.patch.object(accumulator, "_cleanup") as mock_cleanup:
with accumulator:
# _cleanup should not be called yet
mock_cleanup.assert_not_called()
# Get the stream but don't iterate through it yet
stream = accumulator.stream
mock_cleanup.assert_not_called()
# Now iterate through the stream
for _ in stream:
pass
# After fully consuming the stream, cleanup should be called
mock_cleanup.assert_called_once()
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/tests/unit/utils/log/test_stream_accumulator.py",
"license": "Apache License 2.0",
"lines": 135,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:devel-common/src/tests_common/test_utils/file_task_handler.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import itertools
from collections.abc import Generator, Iterable
from datetime import datetime
from typing import TYPE_CHECKING
import pendulum
from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS
if TYPE_CHECKING:
from airflow.utils.log.file_task_handler import ParsedLog, StructuredLogMessage
def extract_events(logs: Iterable[StructuredLogMessage], skip_source_info=True) -> list[str]:
"""Helper function to return just the event (a.k.a message) from a list of StructuredLogMessage"""
logs = iter(logs)
if skip_source_info:
def is_source_group(log: StructuredLogMessage) -> bool:
return not hasattr(log, "timestamp") or log.event == "::endgroup::" or hasattr(log, "sources")
logs = itertools.dropwhile(is_source_group, logs)
return [s.event for s in logs]
def convert_list_to_stream(input_list: list[str]) -> Generator[str, None, None]:
"""
Convert a list of strings to a stream-like object.
This function yields each string in the list one by one.
"""
yield from input_list
def mock_parsed_logs_factory(
event_prefix: str,
start_datetime: datetime,
count: int,
) -> list[ParsedLog]:
"""
Create a list of ParsedLog objects with the specified start datetime and count.
Each ParsedLog object contains a timestamp and a list of StructuredLogMessage objects.
"""
if AIRFLOW_V_3_0_PLUS:
from airflow.utils.log.file_task_handler import StructuredLogMessage
return [
(
pendulum.instance(start_datetime + pendulum.duration(seconds=i)),
i,
StructuredLogMessage(
timestamp=pendulum.instance(start_datetime + pendulum.duration(seconds=i)),
event=f"{event_prefix} Event {i}",
),
)
for i in range(count)
]
| {
"repo_id": "apache/airflow",
"file_path": "devel-common/src/tests_common/test_utils/file_task_handler.py",
"license": "Apache License 2.0",
"lines": 62,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:airflow-core/src/airflow/utils/serve_logs/core.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Serve logs process."""
from __future__ import annotations
import socket
import sys
import structlog
import uvicorn
from airflow.configuration import conf
logger = structlog.get_logger(__name__)
def serve_logs(port=None):
"""Serve logs generated by Worker."""
# setproctitle causes issue on Mac OS: https://github.com/benoitc/gunicorn/issues/3021
os_type = sys.platform
if os_type == "darwin":
logger.debug("Mac OS detected, skipping setproctitle")
else:
from setproctitle import setproctitle
setproctitle("airflow serve-logs")
port = port or conf.getint("logging", "WORKER_LOG_SERVER_PORT")
if socket.has_dualstack_ipv6():
serve_log_uri = f"http://[::]:{port}"
else:
serve_log_uri = f"http://0.0.0.0:{port}"
logger.info("Starting log server on %s", serve_log_uri)
# Get uvicorn logging configuration from Airflow settings
uvicorn_log_level = conf.get("logging", "uvicorn_logging_level", fallback="info").lower()
# Use uvicorn directly for ASGI applications
uvicorn.run(
"airflow.utils.serve_logs.log_server:get_app", host="", port=port, log_level=uvicorn_log_level
)
# Log serving is I/O bound and has low concurrency, so single process is sufficient
if __name__ == "__main__":
serve_logs()
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/utils/serve_logs/core.py",
"license": "Apache License 2.0",
"lines": 48,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:airflow-core/src/airflow/api_fastapi/core_api/services/public/dag_run.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import asyncio
import itertools
import json
import operator
from typing import TYPE_CHECKING, Any
import attrs
from sqlalchemy import select
from airflow.api_fastapi.common.db.common import SessionDep
from airflow.models.dagrun import DagRun
from airflow.models.xcom import XCOM_RETURN_KEY, XComModel
from airflow.utils.session import create_session_async
from airflow.utils.state import State
if TYPE_CHECKING:
from collections.abc import AsyncGenerator, Iterator
from sqlalchemy import ScalarResult
@attrs.define
class DagRunWaiter:
"""Wait for the specified dag run to finish, and collect info from it."""
dag_id: str
run_id: str
interval: float
result_task_ids: list[str] | None
session: SessionDep
async def _get_dag_run(self) -> DagRun:
async with create_session_async() as session:
return await session.scalar(select(DagRun).filter_by(dag_id=self.dag_id, run_id=self.run_id))
def _serialize_xcoms(self) -> dict[str, Any]:
xcom_query = XComModel.get_many(
run_id=self.run_id,
key=XCOM_RETURN_KEY,
task_ids=self.result_task_ids,
dag_ids=self.dag_id,
)
xcom_results: ScalarResult[tuple[XComModel]] = self.session.scalars(
xcom_query.order_by(XComModel.task_id, XComModel.map_index)
)
def _group_xcoms(g: Iterator[XComModel | tuple[XComModel]]) -> Any:
entries = [row[0] if isinstance(row, tuple) else row for row in g]
if len(entries) == 1 and entries[0].map_index < 0: # Unpack non-mapped task xcom.
return entries[0].value
return [entry.value for entry in entries] # Task is mapped; return all xcoms in a list.
return {
task_id: _group_xcoms(g)
for task_id, g in itertools.groupby(xcom_results, key=operator.attrgetter("task_id"))
}
def _serialize_response(self, dag_run: DagRun) -> str:
resp = {"state": dag_run.state}
if dag_run.state not in State.finished_dr_states:
return json.dumps(resp)
if self.result_task_ids:
resp["results"] = self._serialize_xcoms()
return json.dumps(resp)
async def wait(self) -> AsyncGenerator[str, None]:
yield self._serialize_response(dag_run := await self._get_dag_run())
yield "\n"
while dag_run.state not in State.finished_dr_states:
await asyncio.sleep(self.interval)
yield self._serialize_response(dag_run := await self._get_dag_run())
yield "\n"
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/api_fastapi/core_api/services/public/dag_run.py",
"license": "Apache License 2.0",
"lines": 76,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:airflow-core/src/airflow/api_fastapi/execution_api/versions/v2025_08_10.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from cadwyn import ResponseInfo, VersionChange, convert_response_to_previous_version_for, endpoint, schema
from airflow.api_fastapi.execution_api.datamodels.taskinstance import DagRun, TIRunContext
from airflow.api_fastapi.execution_api.routes.xcoms import GetXcomFilterParams, GetXComSliceFilterParams
class AddDagRunStateFieldAndPreviousEndpoint(VersionChange):
"""Add the `state` field to DagRun model and `/dag-runs/{dag_id}/previous` endpoint."""
description = __doc__
instructions_to_migrate_to_previous_version = (
schema(DagRun).field("state").didnt_exist,
endpoint("/dag-runs/{dag_id}/previous", ["GET"]).didnt_exist,
)
@convert_response_to_previous_version_for(TIRunContext) # type: ignore[arg-type]
def remove_state_from_dag_run(response: ResponseInfo) -> None: # type: ignore[misc]
"""Remove the `state` field from the dag_run object when converting to the previous version."""
if "dag_run" in response.body and isinstance(response.body["dag_run"], dict):
response.body["dag_run"].pop("state", None)
class AddIncludePriorDatesToGetXComSlice(VersionChange):
"""Add the `include_prior_dates` field to GetXComSliceFilterParams and GetXcomFilterParams."""
description = __doc__
instructions_to_migrate_to_previous_version = (
schema(GetXComSliceFilterParams).field("include_prior_dates").didnt_exist,
schema(GetXcomFilterParams).field("include_prior_dates").didnt_exist,
)
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/api_fastapi/execution_api/versions/v2025_08_10.py",
"license": "Apache License 2.0",
"lines": 39,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:airflow-core/src/airflow/models/dag_favorite.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from sqlalchemy import ForeignKey
from sqlalchemy.orm import Mapped, mapped_column
from airflow.models.base import Base, StringID
class DagFavorite(Base):
"""Association table model linking users to their favorite DAGs."""
__tablename__ = "dag_favorite"
user_id: Mapped[str] = mapped_column(StringID(), primary_key=True)
dag_id: Mapped[str] = mapped_column(
StringID(), ForeignKey("dag.dag_id", ondelete="CASCADE"), primary_key=True
)
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/models/dag_favorite.py",
"license": "Apache License 2.0",
"lines": 28,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/google/src/airflow/providers/google/cloud/hooks/vertex_ai/experiment_service.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from google.cloud import aiplatform
from google.cloud.aiplatform.compat.types import execution_v1 as gca_execution
from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID, GoogleBaseHook
class ExperimentHook(GoogleBaseHook):
"""Use the Vertex AI SDK for Python to manage your experiments."""
@GoogleBaseHook.fallback_to_default_project_id
def create_experiment(
self,
experiment_name: str,
location: str,
experiment_description: str = "",
project_id: str = PROVIDE_PROJECT_ID,
experiment_tensorboard: str | None = None,
):
"""
Create an experiment and, optionally, associate a Vertex AI TensorBoard instance using the Vertex AI SDK for Python.
:param project_id: Required. The ID of the Google Cloud project that the service belongs to.
:param location: Required. The ID of the Google Cloud location that the service belongs to.
:param experiment_name: Required. The name of the evaluation experiment.
:param experiment_description: Optional. Description of the evaluation experiment.
:param experiment_tensorboard: Optional. The Vertex TensorBoard instance to use as a backing
TensorBoard for the provided experiment. If no TensorBoard is provided, a default Tensorboard
instance is created and used by this experiment.
"""
aiplatform.init(
experiment=experiment_name,
experiment_description=experiment_description,
experiment_tensorboard=experiment_tensorboard if experiment_tensorboard else False,
project=project_id,
location=location,
)
self.log.info("Created experiment with name: %s", experiment_name)
@GoogleBaseHook.fallback_to_default_project_id
def delete_experiment(
self,
experiment_name: str,
location: str,
project_id: str = PROVIDE_PROJECT_ID,
delete_backing_tensorboard_runs: bool = False,
) -> None:
"""
Delete an experiment.
Deleting an experiment deletes that experiment and all experiment runs associated with the experiment.
The Vertex AI TensorBoard experiment associated with the experiment is not deleted.
:param project_id: Required. The ID of the Google Cloud project that the service belongs to.
:param location: Required. The ID of the Google Cloud location that the service belongs to.
:param experiment_name: Required. The name of the evaluation experiment.
:param delete_backing_tensorboard_runs: Optional. If True will also delete the Vertex AI TensorBoard
runs associated with the experiment runs under this experiment that we used to store time series
metrics.
"""
experiment = aiplatform.Experiment(
experiment_name=experiment_name, project=project_id, location=location
)
experiment.delete(delete_backing_tensorboard_runs=delete_backing_tensorboard_runs)
class ExperimentRunHook(GoogleBaseHook):
"""Use the Vertex AI SDK for Python to create and manage your experiment runs."""
@GoogleBaseHook.fallback_to_default_project_id
def create_experiment_run(
self,
experiment_run_name: str,
experiment_name: str,
location: str,
project_id: str = PROVIDE_PROJECT_ID,
experiment_run_tensorboard: str | None = None,
run_after_creation: bool = False,
) -> None:
"""
Create experiment run for the experiment.
:param project_id: Required. The ID of the Google Cloud project that the service belongs to.
:param location: Required. The ID of the Google Cloud location that the service belongs to.
:param experiment_name: Required. The name of the evaluation experiment.
:param experiment_run_name: Required. The specific run name or ID for this experiment.
:param experiment_run_tensorboard: Optional. A backing TensorBoard resource to enable and store time
series metrics logged to this experiment run.
:param run_after_creation: Optional. Responsible for state after creation of experiment run.
If true experiment run will be created with state RUNNING.
"""
experiment_run_state = (
gca_execution.Execution.State.NEW
if not run_after_creation
else gca_execution.Execution.State.RUNNING
)
experiment_run = aiplatform.ExperimentRun.create(
run_name=experiment_run_name,
experiment=experiment_name,
project=project_id,
location=location,
state=experiment_run_state,
tensorboard=experiment_run_tensorboard,
)
self.log.info(
"Created experiment run with name: %s and status: %s",
experiment_run.name,
experiment_run.state,
)
@GoogleBaseHook.fallback_to_default_project_id
def list_experiment_runs(
self,
experiment_name: str,
location: str,
project_id: str = PROVIDE_PROJECT_ID,
) -> list[aiplatform.ExperimentRun]:
"""
List experiment run for the experiment.
:param project_id: Required. The ID of the Google Cloud project that the service belongs to.
:param location: Required. The ID of the Google Cloud location that the service belongs to.
:param experiment_name: Required. The name of the evaluation experiment.
"""
experiment_runs = aiplatform.ExperimentRun.list(
experiment=experiment_name,
project=project_id,
location=location,
)
return experiment_runs
@GoogleBaseHook.fallback_to_default_project_id
def update_experiment_run_state(
self,
experiment_run_name: str,
experiment_name: str,
location: str,
new_state: gca_execution.Execution.State,
project_id: str = PROVIDE_PROJECT_ID,
) -> None:
"""
Update state of the experiment run.
:param project_id: Required. The ID of the Google Cloud project that the service belongs to.
:param location: Required. The ID of the Google Cloud location that the service belongs to.
:param experiment_name: Required. The name of the evaluation experiment.
:param experiment_run_name: Required. The specific run name or ID for this experiment.
:param new_state: Required. New state of the experiment run.
"""
experiment_run = aiplatform.ExperimentRun(
run_name=experiment_run_name,
experiment=experiment_name,
project=project_id,
location=location,
)
self.log.info("State of the %s before update is: %s", experiment_run.name, experiment_run.state)
experiment_run.update_state(new_state)
@GoogleBaseHook.fallback_to_default_project_id
def delete_experiment_run(
self,
experiment_run_name: str,
experiment_name: str,
location: str,
project_id: str = PROVIDE_PROJECT_ID,
delete_backing_tensorboard_run: bool = False,
) -> None:
"""
Delete experiment run from the experiment.
:param project_id: Required. The ID of the Google Cloud project that the service belongs to.
:param location: Required. The ID of the Google Cloud location that the service belongs to.
:param experiment_name: Required. The name of the evaluation experiment.
:param experiment_run_name: Required. The specific run name or ID for this experiment.
:param delete_backing_tensorboard_run: Whether to delete the backing Vertex AI TensorBoard run
that stores time series metrics for this run.
"""
self.log.info("Next experiment run will be deleted: %s", experiment_run_name)
experiment_run = aiplatform.ExperimentRun(
run_name=experiment_run_name, experiment=experiment_name, project=project_id, location=location
)
experiment_run.delete(delete_backing_tensorboard_run=delete_backing_tensorboard_run)
| {
"repo_id": "apache/airflow",
"file_path": "providers/google/src/airflow/providers/google/cloud/hooks/vertex_ai/experiment_service.py",
"license": "Apache License 2.0",
"lines": 180,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/google/src/airflow/providers/google/cloud/operators/vertex_ai/experiment_service.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from collections.abc import Sequence
from typing import TYPE_CHECKING
from google.api_core import exceptions
from airflow.providers.common.compat.sdk import AirflowException
from airflow.providers.google.cloud.hooks.vertex_ai.experiment_service import (
ExperimentHook,
ExperimentRunHook,
)
from airflow.providers.google.cloud.operators.cloud_base import GoogleCloudBaseOperator
if TYPE_CHECKING:
from airflow.providers.common.compat.sdk import Context
class CreateExperimentOperator(GoogleCloudBaseOperator):
"""
Use the Vertex AI SDK to create experiment.
:param project_id: Required. The ID of the Google Cloud project that the service belongs to.
:param location: Required. The ID of the Google Cloud location that the service belongs to.
:param experiment_name: Required. The name of the evaluation experiment.
:param experiment_description: Optional. Description of the evaluation experiment.
:param experiment_tensorboard: Optional. The Vertex TensorBoard instance to use as a backing
TensorBoard for the provided experiment. If no TensorBoard is provided, a default TensorBoard
instance is created and used by this experiment.
:param gcp_conn_id: The connection ID to use connecting to Google Cloud.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields = (
"location",
"project_id",
"impersonation_chain",
"experiment_name",
)
def __init__(
self,
*,
project_id: str,
location: str,
experiment_name: str,
experiment_description: str = "",
gcp_conn_id: str = "google_cloud_default",
impersonation_chain: str | Sequence[str] | None = None,
experiment_tensorboard: str | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.project_id = project_id
self.location = location
self.experiment_name = experiment_name
self.experiment_description = experiment_description
self.experiment_tensorboard = experiment_tensorboard
self.gcp_conn_id = gcp_conn_id
self.impersonation_chain = impersonation_chain
def execute(self, context: Context) -> None:
self.hook = ExperimentHook(
gcp_conn_id=self.gcp_conn_id,
impersonation_chain=self.impersonation_chain,
)
try:
self.hook.create_experiment(
project_id=self.project_id,
location=self.location,
experiment_name=self.experiment_name,
experiment_description=self.experiment_description,
experiment_tensorboard=self.experiment_tensorboard,
)
except exceptions.AlreadyExists:
raise AirflowException(f"Experiment with name {self.experiment_name} already exist")
self.log.info("Created experiment: %s", self.experiment_name)
class DeleteExperimentOperator(GoogleCloudBaseOperator):
"""
Use the Vertex AI SDK to delete experiment.
:param project_id: Required. The ID of the Google Cloud project that the service belongs to.
:param location: Required. The ID of the Google Cloud location that the service belongs to.
:param experiment_name: Required. The name of the evaluation experiment.
:param delete_backing_tensorboard_runs: Optional. If True will also delete the Vertex AI TensorBoard
runs associated with the experiment runs under this experiment that we used to store time series
metrics.
:param gcp_conn_id: The connection ID to use connecting to Google Cloud.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields = (
"location",
"project_id",
"impersonation_chain",
"experiment_name",
)
def __init__(
self,
*,
project_id: str,
location: str,
experiment_name: str,
delete_backing_tensorboard_runs: bool = False,
gcp_conn_id: str = "google_cloud_default",
impersonation_chain: str | Sequence[str] | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.project_id = project_id
self.location = location
self.experiment_name = experiment_name
self.delete_backing_tensorboard_runs = delete_backing_tensorboard_runs
self.gcp_conn_id = gcp_conn_id
self.impersonation_chain = impersonation_chain
def execute(self, context: Context) -> None:
self.hook = ExperimentHook(
gcp_conn_id=self.gcp_conn_id,
impersonation_chain=self.impersonation_chain,
)
try:
self.hook.delete_experiment(
project_id=self.project_id,
location=self.location,
experiment_name=self.experiment_name,
delete_backing_tensorboard_runs=self.delete_backing_tensorboard_runs,
)
except exceptions.NotFound:
raise AirflowException(f"Experiment with name {self.experiment_name} not found")
self.log.info("Deleted experiment: %s", self.experiment_name)
class CreateExperimentRunOperator(GoogleCloudBaseOperator):
"""
Use the Vertex AI SDK to create experiment run.
:param project_id: Required. The ID of the Google Cloud project that the service belongs to.
:param location: Required. The ID of the Google Cloud location that the service belongs to.
:param experiment_name: Required. The name of the evaluation experiment.
:param experiment_run_name: Required. The specific run name or ID for this experiment.
:param experiment_run_tensorboard: Optional. A backing TensorBoard resource to enable and store time series
metrics logged to this experiment run using log_time_series_metrics.
:param run_after_creation: Optional. If True experiment run will be created with state running.
:param gcp_conn_id: The connection ID to use connecting to Google Cloud.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields = (
"location",
"project_id",
"impersonation_chain",
"experiment_name",
"experiment_run_name",
)
def __init__(
self,
*,
project_id: str,
location: str,
experiment_name: str,
experiment_run_name: str,
experiment_run_tensorboard: str | None = None,
run_after_creation: bool = False,
gcp_conn_id: str = "google_cloud_default",
impersonation_chain: str | Sequence[str] | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.project_id = project_id
self.location = location
self.experiment_name = experiment_name
self.experiment_run_name = experiment_run_name
self.experiment_run_tensorboard = experiment_run_tensorboard
self.run_after_creation = run_after_creation
self.gcp_conn_id = gcp_conn_id
self.impersonation_chain = impersonation_chain
def execute(self, context: Context) -> None:
self.hook = ExperimentRunHook(
gcp_conn_id=self.gcp_conn_id,
impersonation_chain=self.impersonation_chain,
)
try:
self.hook.create_experiment_run(
project_id=self.project_id,
location=self.location,
experiment_name=self.experiment_name,
experiment_run_name=self.experiment_run_name,
experiment_run_tensorboard=self.experiment_run_tensorboard,
run_after_creation=self.run_after_creation,
)
except exceptions.AlreadyExists:
raise AirflowException(f"Experiment Run with name {self.experiment_run_name} already exist")
self.log.info("Created experiment run: %s", self.experiment_run_name)
class ListExperimentRunsOperator(GoogleCloudBaseOperator):
"""
Use the Vertex AI SDK to list experiment runs in experiment.
:param project_id: Required. The ID of the Google Cloud project that the service belongs to.
:param location: Required. The ID of the Google Cloud location that the service belongs to.
:param experiment_name: Required. The name of the evaluation experiment.
:param gcp_conn_id: The connection ID to use connecting to Google Cloud.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields = (
"location",
"project_id",
"impersonation_chain",
"experiment_name",
)
def __init__(
self,
*,
project_id: str,
location: str,
experiment_name: str,
gcp_conn_id: str = "google_cloud_default",
impersonation_chain: str | Sequence[str] | None = None,
**kwargs,
):
super().__init__(**kwargs)
self.project_id = project_id
self.location = location
self.experiment_name = experiment_name
self.gcp_conn_id = gcp_conn_id
self.impersonation_chain = impersonation_chain
def execute(self, context: Context) -> list[str]:
self.hook = ExperimentRunHook(
gcp_conn_id=self.gcp_conn_id,
impersonation_chain=self.impersonation_chain,
)
try:
experiment_runs = self.hook.list_experiment_runs(
project_id=self.project_id, experiment_name=self.experiment_name, location=self.location
)
except exceptions.NotFound:
raise AirflowException("Experiment %s not found", self.experiment_name)
return [er.name for er in experiment_runs]
class UpdateExperimentRunStateOperator(GoogleCloudBaseOperator):
"""
Use the Vertex AI SDK to update state of the experiment run.
:param project_id: Required. The ID of the Google Cloud project that the service belongs to.
:param location: Required. The ID of the Google Cloud location that the service belongs to.
:param experiment_name: Required. The name of the evaluation experiment.
:param experiment_run_name: Required. The specific run name or ID for this experiment.
:param new_state: Required. The specific state of experiment run.
:param gcp_conn_id: The connection ID to use connecting to Google Cloud.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields = (
"location",
"project_id",
"impersonation_chain",
"experiment_name",
"experiment_run_name",
"new_state",
)
def __init__(
self,
*,
project_id: str,
location: str,
experiment_name: str,
experiment_run_name: str,
new_state: int,
gcp_conn_id: str = "google_cloud_default",
impersonation_chain: str | Sequence[str] | None = None,
**kwargs,
):
super().__init__(**kwargs)
self.project_id = project_id
self.location = location
self.experiment_name = experiment_name
self.experiment_run_name = experiment_run_name
self.gcp_conn_id = gcp_conn_id
self.impersonation_chain = impersonation_chain
self.new_state = new_state
def execute(self, context: Context) -> None:
self.hook = ExperimentRunHook(
gcp_conn_id=self.gcp_conn_id,
impersonation_chain=self.impersonation_chain,
)
try:
self.hook.update_experiment_run_state(
project_id=self.project_id,
experiment_name=self.experiment_name,
experiment_run_name=self.experiment_run_name,
new_state=self.new_state,
location=self.location,
)
self.log.info("New state of the %s is: %s", self.experiment_run_name, self.new_state)
except exceptions.NotFound:
raise AirflowException("Experiment or experiment run not found")
class DeleteExperimentRunOperator(GoogleCloudBaseOperator):
"""
Use the Vertex AI SDK to delete experiment run.
:param project_id: Required. The ID of the Google Cloud project that the service belongs to.
:param location: Required. The ID of the Google Cloud location that the service belongs to.
:param experiment_name: Required. The name of the evaluation experiment.
:param experiment_run_name: Required. The specific run name or ID for this experiment.
:param gcp_conn_id: The connection ID to use connecting to Google Cloud.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields = (
"location",
"project_id",
"impersonation_chain",
"experiment_name",
"experiment_run_name",
)
def __init__(
self,
*,
project_id: str,
location: str,
experiment_name: str,
experiment_run_name: str,
gcp_conn_id: str = "google_cloud_default",
impersonation_chain: str | Sequence[str] | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.project_id = project_id
self.location = location
self.experiment_name = experiment_name
self.experiment_run_name = experiment_run_name
self.gcp_conn_id = gcp_conn_id
self.impersonation_chain = impersonation_chain
def execute(self, context: Context) -> None:
self.hook = ExperimentRunHook(
gcp_conn_id=self.gcp_conn_id,
impersonation_chain=self.impersonation_chain,
)
try:
self.hook.delete_experiment_run(
project_id=self.project_id,
location=self.location,
experiment_name=self.experiment_name,
experiment_run_name=self.experiment_run_name,
)
except exceptions.NotFound:
raise AirflowException(f"Experiment Run with name {self.experiment_run_name} not found")
self.log.info("Deleted experiment run: %s", self.experiment_run_name)
| {
"repo_id": "apache/airflow",
"file_path": "providers/google/src/airflow/providers/google/cloud/operators/vertex_ai/experiment_service.py",
"license": "Apache License 2.0",
"lines": 383,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_experiment_service.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import os
from datetime import datetime
from google.cloud.aiplatform.compat.types import execution_v1 as gca_execution
from airflow.models.dag import DAG
from airflow.providers.google.cloud.operators.vertex_ai.experiment_service import (
CreateExperimentOperator,
CreateExperimentRunOperator,
DeleteExperimentOperator,
DeleteExperimentRunOperator,
ListExperimentRunsOperator,
UpdateExperimentRunStateOperator,
)
ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "default")
DAG_ID = "vertex_ai_experiment_service_dag"
REGION = "us-central1"
EXPERIMENT_NAME = f"test-experiment-airflow-operator-{ENV_ID}"
EXPERIMENT_RUN_NAME_1 = f"test-experiment-run-airflow-operator-1-{ENV_ID}"
EXPERIMENT_RUN_NAME_2 = f"test-experiment-run-airflow-operator-2-{ENV_ID}"
with DAG(
dag_id=DAG_ID,
description="Sample DAG with using experiment service.",
schedule="@once",
start_date=datetime(2025, 6, 1),
catchup=False,
tags=["example", "vertex_ai", "experiment_service"],
) as dag:
# [START how_to_cloud_vertex_ai_create_experiment_operator]
create_experiment_task = CreateExperimentOperator(
task_id="create_experiment_task",
project_id=PROJECT_ID,
location=REGION,
experiment_name=EXPERIMENT_NAME,
)
# [END how_to_cloud_vertex_ai_create_experiment_operator]
# [START how_to_cloud_vertex_ai_create_experiment_run_operator]
create_experiment_run_1_task = CreateExperimentRunOperator(
task_id="create_experiment_run_1_task",
project_id=PROJECT_ID,
location=REGION,
experiment_name=EXPERIMENT_NAME,
experiment_run_name=EXPERIMENT_RUN_NAME_1,
)
# [END how_to_cloud_vertex_ai_create_experiment_run_operator]
create_experiment_run_2_task = CreateExperimentRunOperator(
task_id="create_experiment_run_2_task",
project_id=PROJECT_ID,
location=REGION,
experiment_name=EXPERIMENT_NAME,
experiment_run_name=EXPERIMENT_RUN_NAME_2,
)
# [START how_to_cloud_vertex_ai_list_experiment_run_operator]
list_experiment_runs_task = ListExperimentRunsOperator(
task_id="list_experiment_runs_task",
project_id=PROJECT_ID,
location=REGION,
experiment_name=EXPERIMENT_NAME,
)
# [END how_to_cloud_vertex_ai_list_experiment_run_operator]
# [START how_to_cloud_vertex_ai_update_experiment_run_state_operator]
update_experiment_run_state_task = UpdateExperimentRunStateOperator(
task_id="update_experiment_run_state_task",
project_id=PROJECT_ID,
location=REGION,
experiment_name=EXPERIMENT_NAME,
experiment_run_name=EXPERIMENT_RUN_NAME_2,
new_state=gca_execution.Execution.State.COMPLETE,
)
# [END how_to_cloud_vertex_ai_update_experiment_run_state_operator]
# [START how_to_cloud_vertex_ai_delete_experiment_run_operator]
delete_experiment_run_1_task = DeleteExperimentRunOperator(
task_id="delete_experiment_run_1_task",
project_id=PROJECT_ID,
location=REGION,
experiment_name=EXPERIMENT_NAME,
experiment_run_name=EXPERIMENT_RUN_NAME_1,
)
# [END how_to_cloud_vertex_ai_delete_experiment_run_operator]
delete_experiment_run_2_task = DeleteExperimentRunOperator(
task_id="delete_experiment_run_2_task",
project_id=PROJECT_ID,
location=REGION,
experiment_name=EXPERIMENT_NAME,
experiment_run_name=EXPERIMENT_RUN_NAME_2,
)
# [START how_to_cloud_vertex_ai_delete_experiment_operator]
delete_experiment_task = DeleteExperimentOperator(
task_id="delete_experiment_task",
project_id=PROJECT_ID,
location=REGION,
experiment_name=EXPERIMENT_NAME,
)
# [END how_to_cloud_vertex_ai_delete_experiment_operator]
(
create_experiment_task
>> [create_experiment_run_1_task, create_experiment_run_2_task]
>> list_experiment_runs_task
>> update_experiment_run_state_task
>> [delete_experiment_run_1_task, delete_experiment_run_2_task]
>> delete_experiment_task
)
from tests_common.test_utils.watcher import watcher
# This test needs watcher in order to properly mark success/failure
# when "tearDown" task with trigger rule is part of the DAG
list(dag.tasks) >> watcher()
from tests_common.test_utils.system_tests import get_test_run # noqa: E402
# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest)
test_run = get_test_run(dag)
| {
"repo_id": "apache/airflow",
"file_path": "providers/google/tests/system/google/cloud/vertex_ai/example_vertex_ai_experiment_service.py",
"license": "Apache License 2.0",
"lines": 125,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/google/tests/unit/google/cloud/hooks/vertex_ai/test_experiment_service.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from unittest import mock
from google.cloud import aiplatform
from airflow.providers.google.cloud.hooks.vertex_ai.experiment_service import (
ExperimentHook,
ExperimentRunHook,
)
from unit.google.cloud.utils.base_gcp_mock import (
mock_base_gcp_hook_default_project_id,
mock_base_gcp_hook_no_default_project_id,
)
TEST_GCP_CONN_ID: str = "test-gcp-conn-id"
TEST_REGION: str = "test-region"
TEST_PROJECT_ID: str = "test-project-id"
TEST_EXPERIMENT_NAME = "test_experiment_name"
TEST_EXPERIMENT_RUN_NAME = "test_experiment_run_name"
TEST_EXPERIMENT_DESCRIPTION = "test_description"
TEST_TARGET_STATE = aiplatform.gapic.Execution.State.COMPLETE
TEST_TENSORBOARD = False
TEST_DELETE_BACKING_TENSORBOARD_RUNS = False
BASE_STRING = "airflow.providers.google.common.hooks.base_google.{}"
EXPERIMENT_SERVICE_STRING = "airflow.providers.google.cloud.hooks.vertex_ai.experiment_service.{}"
class TestExperimentWithDefaultProjectIdHook:
def setup_method(self):
with mock.patch(
BASE_STRING.format("GoogleBaseHook.__init__"), new=mock_base_gcp_hook_default_project_id
):
self.hook = ExperimentHook(gcp_conn_id=TEST_GCP_CONN_ID)
@mock.patch(EXPERIMENT_SERVICE_STRING.format("aiplatform.init"))
def test_create_experiment(self, mock_init) -> None:
self.hook.create_experiment(
project_id=TEST_PROJECT_ID,
location=TEST_REGION,
experiment_name=TEST_EXPERIMENT_NAME,
experiment_description=TEST_EXPERIMENT_DESCRIPTION,
experiment_tensorboard=TEST_TENSORBOARD,
)
mock_init.assert_called_with(
project=TEST_PROJECT_ID,
location=TEST_REGION,
experiment=TEST_EXPERIMENT_NAME,
experiment_description=TEST_EXPERIMENT_DESCRIPTION,
experiment_tensorboard=TEST_TENSORBOARD,
)
@mock.patch(EXPERIMENT_SERVICE_STRING.format("aiplatform.Experiment"))
def test_delete_experiment(self, mock_experiment) -> None:
self.hook.delete_experiment(
project_id=TEST_PROJECT_ID,
location=TEST_REGION,
experiment_name=TEST_EXPERIMENT_NAME,
delete_backing_tensorboard_runs=TEST_DELETE_BACKING_TENSORBOARD_RUNS,
)
mock_experiment.assert_called_with(
project=TEST_PROJECT_ID,
location=TEST_REGION,
experiment_name=TEST_EXPERIMENT_NAME,
)
mock_experiment.return_value.delete.assert_called_with(
delete_backing_tensorboard_runs=TEST_DELETE_BACKING_TENSORBOARD_RUNS
)
class TestExperimentWithoutDefaultProjectIdHook:
def setup_method(self):
with mock.patch(
BASE_STRING.format("GoogleBaseHook.__init__"), new=mock_base_gcp_hook_no_default_project_id
):
self.hook = ExperimentHook(gcp_conn_id=TEST_GCP_CONN_ID)
@mock.patch(EXPERIMENT_SERVICE_STRING.format("aiplatform.init"))
def test_create_experiment(self, mock_init) -> None:
self.hook.create_experiment(
project_id=TEST_PROJECT_ID,
location=TEST_REGION,
experiment_name=TEST_EXPERIMENT_NAME,
experiment_description=TEST_EXPERIMENT_DESCRIPTION,
experiment_tensorboard=TEST_TENSORBOARD,
)
mock_init.assert_called_with(
project=TEST_PROJECT_ID,
location=TEST_REGION,
experiment=TEST_EXPERIMENT_NAME,
experiment_description=TEST_EXPERIMENT_DESCRIPTION,
experiment_tensorboard=TEST_TENSORBOARD,
)
@mock.patch(EXPERIMENT_SERVICE_STRING.format("aiplatform.Experiment"))
def test_delete_experiment(self, mock_experiment) -> None:
self.hook.delete_experiment(
project_id=TEST_PROJECT_ID,
location=TEST_REGION,
experiment_name=TEST_EXPERIMENT_NAME,
delete_backing_tensorboard_runs=TEST_DELETE_BACKING_TENSORBOARD_RUNS,
)
mock_experiment.assert_called_with(
project=TEST_PROJECT_ID,
location=TEST_REGION,
experiment_name=TEST_EXPERIMENT_NAME,
)
mock_experiment.return_value.delete.assert_called_with(
delete_backing_tensorboard_runs=TEST_DELETE_BACKING_TENSORBOARD_RUNS
)
class TestExperimentRunWithDefaultProjectIdHook:
def setup_method(self):
with mock.patch(
BASE_STRING.format("GoogleBaseHook.__init__"), new=mock_base_gcp_hook_default_project_id
):
self.hook = ExperimentRunHook(gcp_conn_id=TEST_GCP_CONN_ID)
@mock.patch(EXPERIMENT_SERVICE_STRING.format("aiplatform.ExperimentRun"))
def test_create_experiment_run(self, mock_experiment_run) -> None:
self.hook.create_experiment_run(
project_id=TEST_PROJECT_ID,
location=TEST_REGION,
experiment_name=TEST_EXPERIMENT_NAME,
experiment_run_name=TEST_EXPERIMENT_RUN_NAME,
experiment_run_tensorboard=TEST_TENSORBOARD,
)
mock_experiment_run.create.assert_called_with(
project=TEST_PROJECT_ID,
location=TEST_REGION,
experiment=TEST_EXPERIMENT_NAME,
run_name=TEST_EXPERIMENT_RUN_NAME,
state=aiplatform.gapic.Execution.State.NEW,
tensorboard=TEST_TENSORBOARD,
)
@mock.patch(EXPERIMENT_SERVICE_STRING.format("aiplatform.ExperimentRun"))
def test_delete_experiment_run(self, mock_experiment_run) -> None:
self.hook.delete_experiment_run(
project_id=TEST_PROJECT_ID,
location=TEST_REGION,
experiment_name=TEST_EXPERIMENT_NAME,
experiment_run_name=TEST_EXPERIMENT_RUN_NAME,
)
mock_experiment_run.assert_called_with(
project=TEST_PROJECT_ID,
location=TEST_REGION,
experiment=TEST_EXPERIMENT_NAME,
run_name=TEST_EXPERIMENT_RUN_NAME,
)
mock_experiment_run.return_value.delete.assert_called_with(
delete_backing_tensorboard_run=TEST_DELETE_BACKING_TENSORBOARD_RUNS
)
class TestExperimentRunWithoutDefaultProjectIdHook:
def setup_method(self):
with mock.patch(
BASE_STRING.format("GoogleBaseHook.__init__"), new=mock_base_gcp_hook_no_default_project_id
):
self.hook = ExperimentRunHook(gcp_conn_id=TEST_GCP_CONN_ID)
@mock.patch(EXPERIMENT_SERVICE_STRING.format("aiplatform.ExperimentRun"))
def test_create_experiment_run(self, mock_experiment_run) -> None:
self.hook.create_experiment_run(
project_id=TEST_PROJECT_ID,
location=TEST_REGION,
experiment_name=TEST_EXPERIMENT_NAME,
experiment_run_name=TEST_EXPERIMENT_RUN_NAME,
experiment_run_tensorboard=TEST_TENSORBOARD,
)
mock_experiment_run.create.assert_called_with(
project=TEST_PROJECT_ID,
location=TEST_REGION,
experiment=TEST_EXPERIMENT_NAME,
run_name=TEST_EXPERIMENT_RUN_NAME,
state=aiplatform.gapic.Execution.State.NEW,
tensorboard=TEST_TENSORBOARD,
)
@mock.patch(EXPERIMENT_SERVICE_STRING.format("aiplatform.ExperimentRun"))
def test_delete_experiment_run(self, mock_experiment_run) -> None:
self.hook.delete_experiment_run(
project_id=TEST_PROJECT_ID,
location=TEST_REGION,
experiment_name=TEST_EXPERIMENT_NAME,
experiment_run_name=TEST_EXPERIMENT_RUN_NAME,
)
mock_experiment_run.assert_called_with(
project=TEST_PROJECT_ID,
location=TEST_REGION,
experiment=TEST_EXPERIMENT_NAME,
run_name=TEST_EXPERIMENT_RUN_NAME,
)
mock_experiment_run.return_value.delete.assert_called_with(
delete_backing_tensorboard_run=TEST_DELETE_BACKING_TENSORBOARD_RUNS
)
| {
"repo_id": "apache/airflow",
"file_path": "providers/google/tests/unit/google/cloud/hooks/vertex_ai/test_experiment_service.py",
"license": "Apache License 2.0",
"lines": 195,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/google/tests/unit/google/cloud/operators/vertex_ai/test_experiment_service.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from unittest import mock
from google.cloud import aiplatform
from airflow.providers.google.cloud.operators.vertex_ai.experiment_service import (
CreateExperimentOperator,
CreateExperimentRunOperator,
DeleteExperimentOperator,
DeleteExperimentRunOperator,
ListExperimentRunsOperator,
UpdateExperimentRunStateOperator,
)
VERTEX_AI_PATH = "airflow.providers.google.cloud.operators.vertex_ai.experiment_service.{}"
TASK_ID = "test_task_id"
GCP_PROJECT = "test-project"
GCP_LOCATION = "test-location"
GCP_CONN_ID = "test-conn"
IMPERSONATION_CHAIN = ["ACCOUNT_1", "ACCOUNT_2", "ACCOUNT_3"]
TEST_EXPERIMENT_NAME = "test_experiment_name"
TEST_EXPERIMENT_RUN_NAME = "test_experiment_run_name"
TEST_EXPERIMENT_DESCRIPTION = "test_description"
TEST_TARGET_STATE = aiplatform.gapic.Execution.State.COMPLETE
TEST_TENSORBOARD = None
TEST_DELETE_BACKING_TENSORBOARD_RUNS = True
class TestVertexAICreateExperimentOperator:
@mock.patch(VERTEX_AI_PATH.format("ExperimentHook"))
def test_execute(self, mock_hook):
op = CreateExperimentOperator(
task_id=TASK_ID,
project_id=GCP_PROJECT,
location=GCP_LOCATION,
experiment_name=TEST_EXPERIMENT_NAME,
experiment_description=TEST_EXPERIMENT_DESCRIPTION,
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
)
op.execute(context={"ti": mock.MagicMock()})
mock_hook.assert_called_once_with(
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
)
mock_hook.return_value.create_experiment.assert_called_once_with(
project_id=GCP_PROJECT,
location=GCP_LOCATION,
experiment_name=TEST_EXPERIMENT_NAME,
experiment_description=TEST_EXPERIMENT_DESCRIPTION,
experiment_tensorboard=TEST_TENSORBOARD,
)
class TestVertexAIDeleteExperimentOperator:
@mock.patch(VERTEX_AI_PATH.format("ExperimentHook"))
def test_execute(self, mock_hook):
op = DeleteExperimentOperator(
task_id=TASK_ID,
project_id=GCP_PROJECT,
location=GCP_LOCATION,
experiment_name=TEST_EXPERIMENT_NAME,
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
delete_backing_tensorboard_runs=TEST_DELETE_BACKING_TENSORBOARD_RUNS,
)
op.execute(context={"ti": mock.MagicMock()})
mock_hook.assert_called_once_with(
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
)
mock_hook.return_value.delete_experiment.assert_called_once_with(
project_id=GCP_PROJECT,
location=GCP_LOCATION,
experiment_name=TEST_EXPERIMENT_NAME,
delete_backing_tensorboard_runs=TEST_DELETE_BACKING_TENSORBOARD_RUNS,
)
class TestVertexAICreateExperimentRunOperator:
@mock.patch(VERTEX_AI_PATH.format("ExperimentRunHook"))
def test_execute(self, mock_hook):
op = CreateExperimentRunOperator(
task_id=TASK_ID,
project_id=GCP_PROJECT,
location=GCP_LOCATION,
experiment_name=TEST_EXPERIMENT_NAME,
experiment_run_name=TEST_EXPERIMENT_RUN_NAME,
experiment_run_tensorboard=TEST_TENSORBOARD,
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
)
op.execute(context={"ti": mock.MagicMock()})
mock_hook.assert_called_once_with(
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
)
mock_hook.return_value.create_experiment_run.assert_called_once_with(
project_id=GCP_PROJECT,
location=GCP_LOCATION,
experiment_name=TEST_EXPERIMENT_NAME,
experiment_run_name=TEST_EXPERIMENT_RUN_NAME,
experiment_run_tensorboard=TEST_TENSORBOARD,
run_after_creation=False,
)
class TestVertexAIListExperimentRunsOperator:
@mock.patch(VERTEX_AI_PATH.format("ExperimentRunHook"))
def test_execute(self, mock_hook):
op = ListExperimentRunsOperator(
task_id=TASK_ID,
project_id=GCP_PROJECT,
location=GCP_LOCATION,
experiment_name=TEST_EXPERIMENT_NAME,
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
)
op.execute(context={"ti": mock.MagicMock()})
mock_hook.assert_called_once_with(
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
)
mock_hook.return_value.list_experiment_runs.assert_called_once_with(
project_id=GCP_PROJECT,
location=GCP_LOCATION,
experiment_name=TEST_EXPERIMENT_NAME,
)
class TestVertexAIUpdateExperimentRunStateOperator:
@mock.patch(VERTEX_AI_PATH.format("ExperimentRunHook"))
def test_execute(self, mock_hook):
op = UpdateExperimentRunStateOperator(
task_id=TASK_ID,
project_id=GCP_PROJECT,
location=GCP_LOCATION,
experiment_name=TEST_EXPERIMENT_NAME,
experiment_run_name=TEST_EXPERIMENT_RUN_NAME,
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
new_state=TEST_TARGET_STATE,
)
op.execute(context={"ti": mock.MagicMock()})
mock_hook.assert_called_once_with(
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
)
mock_hook.return_value.update_experiment_run_state.assert_called_once_with(
project_id=GCP_PROJECT,
location=GCP_LOCATION,
experiment_name=TEST_EXPERIMENT_NAME,
experiment_run_name=TEST_EXPERIMENT_RUN_NAME,
new_state=TEST_TARGET_STATE,
)
class TestVertexAIDeleteExperimentRunOperator:
@mock.patch(VERTEX_AI_PATH.format("ExperimentRunHook"))
def test_execute(self, mock_hook):
op = DeleteExperimentRunOperator(
task_id=TASK_ID,
project_id=GCP_PROJECT,
location=GCP_LOCATION,
experiment_name=TEST_EXPERIMENT_NAME,
experiment_run_name=TEST_EXPERIMENT_RUN_NAME,
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
)
op.execute(context={"ti": mock.MagicMock()})
mock_hook.assert_called_once_with(
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
)
mock_hook.return_value.delete_experiment_run.assert_called_once_with(
project_id=GCP_PROJECT,
location=GCP_LOCATION,
experiment_name=TEST_EXPERIMENT_NAME,
experiment_run_name=TEST_EXPERIMENT_RUN_NAME,
)
| {
"repo_id": "apache/airflow",
"file_path": "providers/google/tests/unit/google/cloud/operators/vertex_ai/test_experiment_service.py",
"license": "Apache License 2.0",
"lines": 181,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-core/tests/unit/api_fastapi/common/db/test_dags.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from datetime import datetime, timezone
import pytest
from airflow._shared.timezones.timezone import utcnow
from airflow.api_fastapi.common.db.dags import generate_dag_with_latest_run_query
from airflow.api_fastapi.common.parameters import SortParam
from airflow.models import DagModel
from airflow.models.dagrun import DagRun
from airflow.utils.state import DagRunState
from tests_common.test_utils.db import clear_db_dag_bundles, clear_db_dags, clear_db_runs
pytestmark = pytest.mark.db_test
class TestGenerateDagWithLatestRunQuery:
"""Unit tests for generate_dag_with_latest_run_query function."""
@staticmethod
def _clear_db():
clear_db_runs()
clear_db_dags()
clear_db_dag_bundles()
@pytest.fixture(autouse=True)
def setup_teardown(self):
"""Setup and teardown for each test."""
self._clear_db()
yield
self._clear_db()
@pytest.fixture
def dag_with_queued_run(self, session, testing_dag_bundle):
"""Returns a DAG with a QUEUED DagRun and null start_date."""
dag_id = "dag_with_queued_run"
# Create DagModel
dag_model = DagModel(
dag_id=dag_id,
bundle_name="testing",
is_stale=False,
is_paused=False,
fileloc="/tmp/dag.py",
)
session.add(dag_model)
session.flush()
# Create DagRun with start_date=None (QUEUED state)
dagrun = DagRun(
dag_id=dag_id,
run_id="manual__queued",
run_type="manual",
logical_date=utcnow(),
state=DagRunState.QUEUED,
start_date=None,
)
session.add(dagrun)
session.commit()
return dag_model, dagrun
@pytest.fixture
def dag_with_running_run(self, session):
"""Returns a DAG with a RUNNING DagRun and a valid start_date."""
dag_id = "dag_with_running_run"
# Create DagModel
dag_model = DagModel(
dag_id=dag_id,
bundle_name="testing",
is_stale=False,
is_paused=False,
fileloc="/tmp/dag2.py",
)
session.add(dag_model)
session.flush()
# Create DagRun with start_date set (RUNNING state)
start_time = utcnow()
dagrun = DagRun(
dag_id=dag_id,
run_id="manual__running",
run_type="manual",
logical_date=start_time,
state=DagRunState.RUNNING,
start_date=start_time,
)
session.add(dagrun)
session.commit()
return dag_model, dagrun
def test_includes_queued_run_without_start_date(self, dag_with_queued_run, session):
"""DAGs with QUEUED runs and null start_date should be included when no filters are applied, and joined DagRun state must not be None."""
dag_model, _ = dag_with_queued_run
query = generate_dag_with_latest_run_query(
max_run_filters=[],
order_by=SortParam(allowed_attrs=["dag_id"], model=DagModel).set_value(["dag_id"]),
)
# Also fetch joined DagRun's state and start_date
extended_query = query.add_columns(DagRun.state, DagRun.start_date)
result = session.execute(extended_query).fetchall()
dag_row = next((row for row in result if row[0].dag_id == dag_model.dag_id), None)
assert dag_row is not None
dagrun_state = dag_row[1]
assert dagrun_state is not None, "Joined DagRun state must not be None"
def test_includes_queued_run_when_ordering_by_state(
self, dag_with_queued_run, dag_with_running_run, session
):
"""DAGs with QUEUED runs and null start_date, and RUNNING runs must all have joined DagRun info not None."""
queued_dag_model, _ = dag_with_queued_run
running_dag_model, _ = dag_with_running_run
query = generate_dag_with_latest_run_query(
max_run_filters=[],
order_by=SortParam(allowed_attrs=["last_run_state"], model=DagModel).set_value(
["last_run_state"]
),
)
extended_query = query.add_columns(DagRun.state, DagRun.start_date)
result = session.execute(extended_query).fetchall()
# QUEUED DAG
queued_row = next((row for row in result if row[0].dag_id == queued_dag_model.dag_id), None)
assert queued_row is not None
assert queued_row[1] is not None, "Joined DagRun state for QUEUED DAG must not be None"
# RUNNING DAG
running_row = next((row for row in result if row[0].dag_id == running_dag_model.dag_id), None)
assert running_row is not None
assert running_row[1] is not None, "Joined DagRun state for RUNNING DAG must not be None"
assert running_row[2] is not None, "Joined DagRun start_date for RUNNING DAG must not be None"
def test_includes_queued_run_when_ordering_by_start_date(
self, dag_with_queued_run, dag_with_running_run, session
):
"""DAGs with QUEUED runs and RUNNING runs must all have joined DagRun info not None when ordering by start_date."""
queued_dag_model, _ = dag_with_queued_run
running_dag_model, _ = dag_with_running_run
query = generate_dag_with_latest_run_query(
max_run_filters=[],
order_by=SortParam(allowed_attrs=["last_run_start_date"], model=DagModel).set_value(
["last_run_start_date"]
),
)
extended_query = query.add_columns(DagRun.state, DagRun.start_date)
result = session.execute(extended_query).fetchall()
# QUEUED DAG
queued_row = next((row for row in result if row[0].dag_id == queued_dag_model.dag_id), None)
assert queued_row is not None
assert queued_row[1] is not None, "Joined DagRun state for QUEUED DAG must not be None"
# RUNNING DAG
running_row = next((row for row in result if row[0].dag_id == running_dag_model.dag_id), None)
assert running_row is not None
assert running_row[1] is not None, "Joined DagRun state for RUNNING DAG must not be None"
assert running_row[2] is not None, "Joined DagRun start_date for RUNNING DAG must not be None"
@pytest.mark.usefixtures("testing_dag_bundle")
def test_latest_queued_run_without_start_date_is_included(self, session):
"""Even if the latest DagRun is QUEUED+start_date=None, joined DagRun state must not be None."""
dag_id = "dag_with_multiple_runs"
dag_model = DagModel(
dag_id=dag_id,
bundle_name="testing",
is_stale=False,
is_paused=False,
fileloc="/tmp/dag3.py",
)
session.add(dag_model)
session.flush()
older_run = DagRun(
dag_id=dag_id,
run_id="manual__older",
run_type="manual",
logical_date=datetime(2025, 1, 1, tzinfo=timezone.utc),
state=DagRunState.SUCCESS,
start_date=datetime(2025, 1, 1, tzinfo=timezone.utc),
)
session.add(older_run)
newer_run = DagRun(
dag_id=dag_id,
run_id="manual__newer_queued",
run_type="manual",
logical_date=utcnow(),
state=DagRunState.QUEUED,
start_date=None,
)
session.add(newer_run)
session.commit()
query = generate_dag_with_latest_run_query(
max_run_filters=[],
order_by=SortParam(allowed_attrs=["last_run_state"], model=DagModel).set_value(
["last_run_state"]
),
)
extended_query = query.add_columns(DagRun.state, DagRun.start_date)
result = session.execute(extended_query).fetchall()
dag_row = next((row for row in result if row[0].dag_id == dag_id), None)
assert dag_row is not None
assert dag_row[1] is not None, (
"Even if latest DagRun is QUEUED+start_date=None, state must not be None"
)
def test_queued_runs_with_null_start_date_are_properly_joined(
self, dag_with_queued_run, dag_with_running_run, session
):
"""
Verifies that DAGs with null start_date are properly joined in the query.
If a WHERE clause filters out null start_dates, these DAGs would be excluded.
This test ensures they are still present and joined correctly.
"""
queued_dag_model, _ = dag_with_queued_run
running_dag_model, _ = dag_with_running_run
query = generate_dag_with_latest_run_query(
max_run_filters=[],
order_by=SortParam(allowed_attrs=["last_run_state"], model=DagModel).set_value(
["last_run_state"]
),
)
extended_query = query.add_columns(DagRun.state, DagRun.start_date)
result = session.execute(extended_query).fetchall()
# Find results for each DAG
queued_dag_result = None
running_dag_result = None
for row in result:
dag_model = row[0]
if dag_model.dag_id == queued_dag_model.dag_id:
queued_dag_result = row
elif dag_model.dag_id == running_dag_model.dag_id:
running_dag_result = row
# Assert both DAGs are present
assert queued_dag_result is not None, f"Queued DAG {queued_dag_model.dag_id} should be in results"
assert running_dag_result is not None, f"Running DAG {running_dag_model.dag_id} should be in results"
# if WHERE start_date IS NOT NULL is present,
# the queued DAG should have NO DagRun information joined (state=None, start_date=None)
# But the running DAG should have DagRun information joined
queued_dagrun_state = queued_dag_result[1]
running_dagrun_state = running_dag_result[1]
assert queued_dagrun_state is not None, (
"Queued DAG should have DagRun state joined, but got None. "
"This suggests the WHERE start_date IS NOT NULL condition is excluding it."
)
assert running_dagrun_state is not None, "Running DAG should have DagRun state joined"
@pytest.mark.usefixtures("testing_dag_bundle")
def test_filters_by_dag_ids_when_provided(self, session):
"""
Verify that when dag_ids is provided, only those DAGs and their runs are queried.
This is a performance optimization: both the main DAG query and the DagRun subquery
should only process accessible DAGs when the user has limited access.
"""
dag_ids = ["dag_accessible_1", "dag_accessible_2", "dag_inaccessible_3"]
for dag_id in dag_ids:
dag_model = DagModel(
dag_id=dag_id,
bundle_name="testing",
is_stale=False,
is_paused=False,
fileloc=f"/tmp/{dag_id}.py",
)
session.add(dag_model)
session.flush()
# Create 2 runs for each DAG
for run_idx in range(2):
dagrun = DagRun(
dag_id=dag_id,
run_id=f"manual__{run_idx}",
run_type="manual",
logical_date=datetime(2024, 1, 1 + run_idx, tzinfo=timezone.utc),
state=DagRunState.SUCCESS,
start_date=datetime(2024, 1, 1 + run_idx, 1, tzinfo=timezone.utc),
)
session.add(dagrun)
session.commit()
# User has access to only 2 DAGs
accessible_dag_ids = {"dag_accessible_1", "dag_accessible_2"}
# Query with dag_ids filter
query_filtered = generate_dag_with_latest_run_query(
max_run_filters=[],
order_by=SortParam(allowed_attrs=["last_run_state"], model=DagModel).set_value(
["last_run_state"]
),
dag_ids=accessible_dag_ids,
)
# Query without dag_ids filter
query_unfiltered = generate_dag_with_latest_run_query(
max_run_filters=[],
order_by=SortParam(allowed_attrs=["last_run_state"], model=DagModel).set_value(
["last_run_state"]
),
)
result_filtered = session.execute(query_filtered.add_columns(DagRun.state)).fetchall()
result_unfiltered = session.execute(query_unfiltered.add_columns(DagRun.state)).fetchall()
# Filtered query should only return accessible DAGs
filtered_dag_ids = {row[0].dag_id for row in result_filtered}
assert filtered_dag_ids == accessible_dag_ids
# Unfiltered query returns all DAGs
unfiltered_dag_ids = {row[0].dag_id for row in result_unfiltered}
assert unfiltered_dag_ids == set(dag_ids)
# All accessible DAGs should have DagRun info
filtered_dags_with_runs = {row[0].dag_id for row in result_filtered if row[1] is not None}
assert filtered_dags_with_runs == accessible_dag_ids
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/tests/unit/api_fastapi/common/db/test_dags.py",
"license": "Apache License 2.0",
"lines": 296,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-core/src/airflow/api_fastapi/core_api/datamodels/ui/calendar.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from datetime import datetime
from typing import Literal
from airflow.api_fastapi.core_api.base import BaseModel
from airflow.utils.state import DagRunState
class CalendarTimeRangeResponse(BaseModel):
"""Represents a summary of DAG runs for a specific calendar time range."""
date: datetime
state: Literal[
DagRunState.QUEUED,
DagRunState.RUNNING,
DagRunState.SUCCESS,
DagRunState.FAILED,
"planned",
]
count: int
class CalendarTimeRangeCollectionResponse(BaseModel):
"""Response model for calendar time range results."""
total_entries: int
dag_runs: list[CalendarTimeRangeResponse]
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/api_fastapi/core_api/datamodels/ui/calendar.py",
"license": "Apache License 2.0",
"lines": 36,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:airflow-core/src/airflow/api_fastapi/core_api/routes/ui/calendar.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import Annotated, Literal
from fastapi import Depends
from airflow.api_fastapi.auth.managers.models.resource_details import DagAccessEntity
from airflow.api_fastapi.common.dagbag import DagBagDep, get_latest_version_of_dag
from airflow.api_fastapi.common.db.common import SessionDep
from airflow.api_fastapi.common.parameters import RangeFilter, datetime_range_filter_factory
from airflow.api_fastapi.common.router import AirflowRouter
from airflow.api_fastapi.core_api.datamodels.ui.calendar import CalendarTimeRangeCollectionResponse
from airflow.api_fastapi.core_api.security import requires_access_dag
from airflow.api_fastapi.core_api.services.ui.calendar import CalendarService
from airflow.models.dagrun import DagRun
calendar_router = AirflowRouter(prefix="/calendar", tags=["Calendar"])
@calendar_router.get(
"/{dag_id}",
dependencies=[
Depends(
requires_access_dag(
method="GET",
access_entity=DagAccessEntity.TASK_INSTANCE,
)
),
Depends(
requires_access_dag(
method="GET",
access_entity=DagAccessEntity.RUN,
)
),
],
)
def get_calendar(
dag_id: str,
session: SessionDep,
dag_bag: DagBagDep,
logical_date: Annotated[RangeFilter, Depends(datetime_range_filter_factory("logical_date", DagRun))],
granularity: Literal["hourly", "daily"] = "daily",
) -> CalendarTimeRangeCollectionResponse:
"""Get calendar data for a DAG including historical and planned DAG runs."""
dag = get_latest_version_of_dag(dag_bag, dag_id, session)
calendar_service = CalendarService()
return calendar_service.get_calendar_data(
dag_id=dag_id,
session=session,
dag=dag,
logical_date=logical_date,
granularity=granularity,
)
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/api_fastapi/core_api/routes/ui/calendar.py",
"license": "Apache License 2.0",
"lines": 63,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:airflow-core/src/airflow/api_fastapi/core_api/services/ui/calendar.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import collections
from collections.abc import Iterator, Sequence
from datetime import datetime
from typing import Literal, cast
import sqlalchemy as sa
import structlog
from croniter.croniter import croniter
from pendulum import DateTime
from sqlalchemy.engine import Row
from sqlalchemy.orm import InstrumentedAttribute, Session
from airflow._shared.timezones import timezone
from airflow.api_fastapi.common.parameters import RangeFilter
from airflow.api_fastapi.core_api.datamodels.ui.calendar import (
CalendarTimeRangeCollectionResponse,
CalendarTimeRangeResponse,
)
from airflow.models.dagrun import DagRun
from airflow.serialization.definitions.dag import SerializedDAG
from airflow.timetables._cron import CronMixin
from airflow.timetables.base import DataInterval, TimeRestriction
from airflow.timetables.simple import ContinuousTimetable
from airflow.utils.sqlalchemy import get_dialect_name
log = structlog.get_logger(logger_name=__name__)
class CalendarService:
"""Service class for calendar-related operations."""
MAX_PLANNED_RUNS: int = 2000
def get_calendar_data(
self,
dag_id: str,
session: Session,
dag: SerializedDAG,
logical_date: RangeFilter,
granularity: Literal["hourly", "daily"] = "daily",
) -> CalendarTimeRangeCollectionResponse:
"""
Get calendar data for a DAG including historical and planned runs.
Args:
dag_id: The DAG ID
session: Database session
dag: The DAG object
logical_date: Date range filter
granularity: Time granularity ("hourly" or "daily")
Returns:
List of calendar time range results
"""
historical_data, raw_dag_states = self._get_historical_dag_runs(
dag_id,
session,
logical_date,
granularity,
)
planned_data = self._get_planned_dag_runs(dag, raw_dag_states, logical_date, granularity)
all_data = historical_data + planned_data
return CalendarTimeRangeCollectionResponse(
total_entries=len(all_data),
dag_runs=all_data,
)
def _get_historical_dag_runs(
self,
dag_id: str,
session: Session,
logical_date: RangeFilter,
granularity: Literal["hourly", "daily"],
) -> tuple[list[CalendarTimeRangeResponse], Sequence[Row]]:
"""Get historical DAG runs from the database."""
dialect = get_dialect_name(session)
time_expression = self._get_time_truncation_expression(DagRun.logical_date, granularity, dialect)
select_stmt = (
sa.select(
time_expression.label("datetime"),
DagRun.state,
sa.func.max(DagRun.data_interval_start).label("data_interval_start"),
sa.func.max(DagRun.data_interval_end).label("data_interval_end"),
sa.func.count("*").label("count"),
)
.where(DagRun.dag_id == dag_id)
.group_by(time_expression, DagRun.state)
.order_by(time_expression.asc())
)
select_stmt = logical_date.to_orm(select_stmt)
dag_states = session.execute(select_stmt).all()
calendar_results = [
CalendarTimeRangeResponse(
# ds.datetime in sqlite and mysql is a string, in postgresql it is a datetime
date=ds.datetime,
state=ds.state,
count=int(ds._mapping["count"]),
)
for ds in dag_states
]
return calendar_results, dag_states
def _get_planned_dag_runs(
self,
dag: SerializedDAG,
raw_dag_states: Sequence[Row],
logical_date: RangeFilter,
granularity: Literal["hourly", "daily"],
) -> list[CalendarTimeRangeResponse]:
"""Get planned DAG runs based on the DAG's timetable."""
if not self._should_calculate_planned_runs(dag, raw_dag_states):
return []
last_data_interval = self._get_last_data_interval(raw_dag_states)
if not last_data_interval:
return []
year = last_data_interval.end.year
restriction = TimeRestriction(
timezone.coerce_datetime(dag.start_date) if dag.start_date else None,
timezone.coerce_datetime(dag.end_date) if dag.end_date else None,
False,
)
if isinstance(dag.timetable, CronMixin):
return self._calculate_cron_planned_runs(dag, last_data_interval, year, logical_date, granularity)
return self._calculate_timetable_planned_runs(
dag, last_data_interval, year, restriction, logical_date, granularity
)
def _should_calculate_planned_runs(self, dag: SerializedDAG, raw_dag_states: Sequence[Row]) -> bool:
"""Check if we should calculate planned runs."""
return (
bool(raw_dag_states)
and bool(raw_dag_states[-1].data_interval_start)
and bool(raw_dag_states[-1].data_interval_end)
and not isinstance(dag.timetable, ContinuousTimetable)
)
def _get_last_data_interval(self, raw_dag_states: Sequence[Row]) -> DataInterval | None:
"""Extract the last data interval from raw database results."""
if not raw_dag_states:
return None
last_state = raw_dag_states[-1]
if not (last_state.data_interval_start and last_state.data_interval_end):
return None
return DataInterval(
timezone.coerce_datetime(last_state.data_interval_start),
timezone.coerce_datetime(last_state.data_interval_end),
)
def _calculate_cron_planned_runs(
self,
dag: SerializedDAG,
last_data_interval: DataInterval,
year: int,
logical_date: RangeFilter,
granularity: Literal["hourly", "daily"],
) -> list[CalendarTimeRangeResponse]:
"""Calculate planned runs for cron-based timetables."""
dates: dict[datetime, int] = collections.Counter()
dates_iter: Iterator[datetime | None] = croniter(
cast("CronMixin", dag.timetable)._expression,
start_time=last_data_interval.end,
ret_type=datetime,
)
for dt in dates_iter:
if dt is None or dt.year != year:
break
if dag.end_date and dt > dag.end_date:
break
if not self._is_date_in_range(dt, logical_date):
continue
dates[self._truncate_datetime_for_granularity(dt, granularity)] += 1
return [
CalendarTimeRangeResponse(date=dt, state="planned", count=count) for dt, count in dates.items()
]
def _calculate_timetable_planned_runs(
self,
dag: SerializedDAG,
last_data_interval: DataInterval,
year: int,
restriction: TimeRestriction,
logical_date: RangeFilter,
granularity: Literal["hourly", "daily"],
) -> list[CalendarTimeRangeResponse]:
"""Calculate planned runs for generic timetables."""
dates: dict[datetime, int] = collections.Counter()
prev_logical_date = DateTime.min
total_planned = 0
while total_planned < self.MAX_PLANNED_RUNS:
curr_info = dag.timetable.next_dagrun_info(
last_automated_data_interval=last_data_interval,
restriction=restriction,
)
if curr_info is None: # No more DAG runs to schedule
break
if not curr_info.logical_date:
# todo: AIP-76 this is likely a partitioned dag. needs implementation
break
if curr_info.logical_date <= prev_logical_date: # Timetable not progressing, stopping
break
if curr_info.logical_date.year != year: # Crossed year boundary
break
if not curr_info.data_interval:
# todo: AIP-76 this is likely a partitioned dag. needs implementation
break
if not self._is_date_in_range(curr_info.logical_date, logical_date):
last_data_interval = curr_info.data_interval
prev_logical_date = curr_info.logical_date
total_planned += 1
continue
last_data_interval = curr_info.data_interval
dt = self._truncate_datetime_for_granularity(curr_info.logical_date, granularity)
dates[dt] += 1
prev_logical_date = curr_info.logical_date
total_planned += 1
return [
CalendarTimeRangeResponse(date=dt, state="planned", count=count) for dt, count in dates.items()
]
def _get_time_truncation_expression(
self,
column: InstrumentedAttribute[datetime | None],
granularity: Literal["hourly", "daily"],
dialect: str | None,
) -> sa.sql.elements.ColumnElement:
"""
Get database-specific time truncation expression for SQLAlchemy.
We want to return always timestamp for both hourly and daily truncation.
Unfortunately different databases have different functions for truncating datetime, so we need to handle
them separately.
Args:
column: The datetime column to truncate
granularity: Either "hourly" or "daily"
dialect: Database dialect ("postgresql", "mysql", "sqlite")
Returns:
SQLAlchemy expression for time truncation
Raises:
ValueError: If the dialect is not supported
"""
if granularity == "hourly":
if dialect == "postgresql":
expression = sa.func.date_trunc("hour", column)
elif dialect == "mysql":
expression = sa.func.date_format(column, "%Y-%m-%dT%H:00:00Z")
elif dialect == "sqlite":
expression = sa.func.strftime("%Y-%m-%dT%H:00:00Z", column)
else:
raise ValueError(f"Unsupported dialect: {dialect}")
else:
if dialect == "postgresql":
expression = sa.func.timezone("UTC", sa.func.cast(sa.func.cast(column, sa.Date), sa.DateTime))
elif dialect == "mysql":
expression = sa.func.date_format(column, "%Y-%m-%dT%00:00:00Z")
elif dialect == "sqlite":
expression = sa.func.strftime("%Y-%m-%dT00:00:00Z", column)
else:
raise ValueError(f"Unsupported dialect: {dialect}")
return expression
def _truncate_datetime_for_granularity(
self,
dt: datetime,
granularity: Literal["hourly", "daily"],
) -> datetime:
"""
Truncate datetime based on granularity for planned tasks grouping.
Args:
dt: The datetime to truncate
granularity: Either "hourly" or "daily"
Returns:
Truncated datetime
"""
if granularity == "hourly":
return dt.replace(minute=0, second=0, microsecond=0)
return dt.replace(hour=0, minute=0, second=0, microsecond=0)
def _is_date_in_range(self, dt: datetime, logical_date: RangeFilter) -> bool:
"""Check if a date is within the specified range filter."""
if not logical_date.value:
return True
if logical_date.value.lower_bound_gte and dt < logical_date.value.lower_bound_gte:
return False
if logical_date.value.lower_bound_gt and dt <= logical_date.value.lower_bound_gt:
return False
if logical_date.value.upper_bound_lte and dt > logical_date.value.upper_bound_lte:
return False
if logical_date.value.upper_bound_lt and dt >= logical_date.value.upper_bound_lt:
return False
return True
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/api_fastapi/core_api/services/ui/calendar.py",
"license": "Apache License 2.0",
"lines": 288,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:airflow-core/tests/unit/api_fastapi/core_api/routes/ui/test_calendar.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from datetime import datetime
import pytest
from airflow.providers.standard.operators.empty import EmptyOperator
from airflow.utils.session import provide_session
from airflow.utils.state import DagRunState
from tests_common.test_utils.asserts import assert_queries_count
from tests_common.test_utils.db import clear_db_dags, clear_db_runs
pytestmark = pytest.mark.db_test
class TestCalendar:
DAG_NAME = "test_dag1"
@pytest.fixture(autouse=True)
@provide_session
def setup_dag_runs(self, dag_maker, session=None) -> None:
clear_db_runs()
clear_db_dags()
with dag_maker(
self.DAG_NAME,
schedule="0 0,1 * * *",
start_date=datetime(2025, 1, 1),
end_date=datetime(2025, 1, 3, 2),
catchup=True,
serialized=True,
session=session,
):
EmptyOperator(task_id="test_task1")
dag_maker.create_dagrun(run_id="run_1", state=DagRunState.FAILED, logical_date=datetime(2025, 1, 1))
dag_maker.create_dagrun(
run_id="run_2",
state=DagRunState.SUCCESS,
logical_date=datetime(2025, 1, 1, 1),
)
dag_maker.create_dagrun(run_id="run_3", state=DagRunState.RUNNING, logical_date=datetime(2025, 1, 2))
dag_maker.sync_dagbag_to_db()
session.commit()
def teardown_method(self) -> None:
clear_db_runs()
clear_db_dags()
@pytest.mark.parametrize(
("query_params", "result"),
[
(
{},
{
"total_entries": 5,
"dag_runs": [
{"date": "2025-01-01T00:00:00Z", "state": "failed", "count": 1},
{"date": "2025-01-01T00:00:00Z", "state": "success", "count": 1},
{"date": "2025-01-02T00:00:00Z", "state": "running", "count": 1},
{"date": "2025-01-02T00:00:00Z", "state": "planned", "count": 1},
{"date": "2025-01-03T00:00:00Z", "state": "planned", "count": 2},
],
},
),
(
{"logical_date_gte": "2025-01-01T00:00:00Z", "logical_date_lte": "2025-01-01T23:23:59Z"},
{
"total_entries": 2,
"dag_runs": [
{"date": "2025-01-01T00:00:00Z", "state": "failed", "count": 1},
{"date": "2025-01-01T00:00:00Z", "state": "success", "count": 1},
],
},
),
(
{"logical_date_gte": "2025-01-02T00:00:00Z", "logical_date_lte": "2025-01-02T23:23:59Z"},
{
"total_entries": 2,
"dag_runs": [
{"date": "2025-01-02T00:00:00Z", "state": "running", "count": 1},
{"date": "2025-01-02T00:00:00Z", "state": "planned", "count": 1},
],
},
),
],
)
def test_daily_calendar(self, test_client, query_params, result):
with assert_queries_count(4):
response = test_client.get(f"/calendar/{self.DAG_NAME}", params=query_params)
assert response.status_code == 200
body = response.json()
print(body)
assert body == result
@pytest.mark.parametrize(
("query_params", "result"),
[
(
{"granularity": "hourly"},
{
"total_entries": 6,
"dag_runs": [
{"date": "2025-01-01T00:00:00Z", "state": "failed", "count": 1},
{"date": "2025-01-01T01:00:00Z", "state": "success", "count": 1},
{"date": "2025-01-02T00:00:00Z", "state": "running", "count": 1},
{"date": "2025-01-02T01:00:00Z", "state": "planned", "count": 1},
{"date": "2025-01-03T00:00:00Z", "state": "planned", "count": 1},
{"date": "2025-01-03T01:00:00Z", "state": "planned", "count": 1},
],
},
),
(
{
"granularity": "hourly",
"logical_date_gte": "2025-01-02T00:00:00Z",
"logical_date_lte": "2025-01-02T23:23:59Z",
},
{
"total_entries": 2,
"dag_runs": [
{"date": "2025-01-02T00:00:00Z", "state": "running", "count": 1},
{"date": "2025-01-02T01:00:00Z", "state": "planned", "count": 1},
],
},
),
(
{
"granularity": "hourly",
"logical_date_gte": "2025-01-02T00:00:00Z",
"logical_date_lte": "2025-01-02T23:23:59Z",
"logical_date_gt": "2025-01-02T00:00:00Z",
"logical_date_lt": "2025-01-02T23:23:59Z",
},
{
"total_entries": 0,
"dag_runs": [],
},
),
(
{
"granularity": "hourly",
"logical_date_gte": "2025-01-02T00:00:00Z",
"logical_date_lte": "2025-01-02T23:23:59Z",
"logical_date_gt": "2025-01-01T23:00:00Z",
"logical_date_lt": "2025-01-03T00:00:00Z",
},
{
"total_entries": 2,
"dag_runs": [
{"date": "2025-01-02T00:00:00Z", "state": "running", "count": 1},
{"date": "2025-01-02T01:00:00Z", "state": "planned", "count": 1},
],
},
),
],
)
def test_hourly_calendar(self, setup_dag_runs, test_client, query_params, result):
with assert_queries_count(4):
response = test_client.get(f"/calendar/{self.DAG_NAME}", params=query_params)
assert response.status_code == 200
body = response.json()
assert body == result
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/tests/unit/api_fastapi/core_api/routes/ui/test_calendar.py",
"license": "Apache License 2.0",
"lines": 167,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/zendesk/src/airflow/providers/zendesk/version_compat.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
def get_base_airflow_version_tuple() -> tuple[int, int, int]:
from packaging.version import Version
from airflow import __version__
airflow_version = Version(__version__)
return airflow_version.major, airflow_version.minor, airflow_version.micro
AIRFLOW_V_3_0_PLUS = get_base_airflow_version_tuple() >= (3, 0, 0)
AIRFLOW_V_3_1_PLUS: bool = get_base_airflow_version_tuple() >= (3, 1, 0)
__all__ = [
"AIRFLOW_V_3_0_PLUS",
"AIRFLOW_V_3_1_PLUS",
]
| {
"repo_id": "apache/airflow",
"file_path": "providers/zendesk/src/airflow/providers/zendesk/version_compat.py",
"license": "Apache License 2.0",
"lines": 28,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/git/src/airflow/providers/git/version_compat.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# NOTE! THIS FILE IS COPIED MANUALLY IN OTHER PROVIDERS DELIBERATELY TO AVOID ADDING UNNECESSARY
# DEPENDENCIES BETWEEN PROVIDERS. IF YOU WANT TO ADD CONDITIONAL CODE IN YOUR PROVIDER THAT DEPENDS
# ON AIRFLOW VERSION, PLEASE COPY THIS FILE TO THE ROOT PACKAGE OF YOUR PROVIDER AND IMPORT
# THOSE CONSTANTS FROM IT RATHER THAN IMPORTING THEM FROM ANOTHER PROVIDER OR TEST CODE
#
from __future__ import annotations
def get_base_airflow_version_tuple() -> tuple[int, int, int]:
from packaging.version import Version
from airflow import __version__
airflow_version = Version(__version__)
return airflow_version.major, airflow_version.minor, airflow_version.micro
AIRFLOW_V_3_0_PLUS = get_base_airflow_version_tuple() >= (3, 0, 0)
AIRFLOW_V_3_1_PLUS: bool = get_base_airflow_version_tuple() >= (3, 1, 0)
__all__ = ["AIRFLOW_V_3_0_PLUS", "AIRFLOW_V_3_1_PLUS"]
| {
"repo_id": "apache/airflow",
"file_path": "providers/git/src/airflow/providers/git/version_compat.py",
"license": "Apache License 2.0",
"lines": 31,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/sendgrid/src/airflow/providers/sendgrid/version_compat.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
def get_base_airflow_version_tuple() -> tuple[int, int, int]:
from packaging.version import Version
from airflow import __version__
airflow_version = Version(__version__)
return airflow_version.major, airflow_version.minor, airflow_version.micro
AIRFLOW_V_3_0_PLUS = get_base_airflow_version_tuple() >= (3, 0, 0)
AIRFLOW_V_3_1_PLUS: bool = get_base_airflow_version_tuple() >= (3, 1, 0)
__all__ = ["AIRFLOW_V_3_0_PLUS", "AIRFLOW_V_3_1_PLUS"]
| {
"repo_id": "apache/airflow",
"file_path": "providers/sendgrid/src/airflow/providers/sendgrid/version_compat.py",
"license": "Apache License 2.0",
"lines": 25,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/pagerduty/src/airflow/providers/pagerduty/version_compat.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# NOTE! THIS FILE IS COPIED MANUALLY IN OTHER PROVIDERS DELIBERATELY TO AVOID ADDING UNNECESSARY
# DEPENDENCIES BETWEEN PROVIDERS. IF YOU WANT TO ADD CONDITIONAL CODE IN YOUR PROVIDER THAT DEPENDS
# ON AIRFLOW VERSION, PLEASE COPY THIS FILE TO THE ROOT PACKAGE OF YOUR PROVIDER AND IMPORT
# THOSE CONSTANTS FROM IT RATHER THAN IMPORTING THEM FROM ANOTHER PROVIDER OR TEST CODE
#
from __future__ import annotations
def get_base_airflow_version_tuple() -> tuple[int, int, int]:
from packaging.version import Version
from airflow import __version__
airflow_version = Version(__version__)
return airflow_version.major, airflow_version.minor, airflow_version.micro
AIRFLOW_V_3_0_PLUS = get_base_airflow_version_tuple() >= (3, 0, 0)
AIRFLOW_V_3_1_PLUS: bool = get_base_airflow_version_tuple() >= (3, 1, 0)
__all__ = [
"AIRFLOW_V_3_0_PLUS",
"AIRFLOW_V_3_1_PLUS",
]
| {
"repo_id": "apache/airflow",
"file_path": "providers/pagerduty/src/airflow/providers/pagerduty/version_compat.py",
"license": "Apache License 2.0",
"lines": 34,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/apache/pinot/src/airflow/providers/apache/pinot/version_compat.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# NOTE! THIS FILE IS COPIED MANUALLY IN OTHER PROVIDERS DELIBERATELY TO AVOID ADDING UNNECESSARY
# DEPENDENCIES BETWEEN PROVIDERS. IF YOU WANT TO ADD CONDITIONAL CODE IN YOUR PROVIDER THAT DEPENDS
# ON AIRFLOW VERSION, PLEASE COPY THIS FILE TO THE ROOT PACKAGE OF YOUR PROVIDER AND IMPORT
# THOSE CONSTANTS FROM IT RATHER THAN IMPORTING THEM FROM ANOTHER PROVIDER OR TEST CODE
#
from __future__ import annotations
def get_base_airflow_version_tuple() -> tuple[int, int, int]:
from packaging.version import Version
from airflow import __version__
airflow_version = Version(__version__)
return airflow_version.major, airflow_version.minor, airflow_version.micro
AIRFLOW_V_3_1_PLUS: bool = get_base_airflow_version_tuple() >= (3, 1, 0)
__all__ = [
"AIRFLOW_V_3_1_PLUS",
]
| {
"repo_id": "apache/airflow",
"file_path": "providers/apache/pinot/src/airflow/providers/apache/pinot/version_compat.py",
"license": "Apache License 2.0",
"lines": 32,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/amazon/src/airflow/providers/amazon/aws/bundles/s3.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import os
from pathlib import Path
import structlog
from airflow.dag_processing.bundles.base import BaseDagBundle
from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
from airflow.providers.amazon.aws.hooks.s3 import S3Hook
from airflow.providers.common.compat.sdk import AirflowException
class S3DagBundle(BaseDagBundle):
"""
S3 DAG bundle - exposes a directory in S3 as a DAG bundle.
This allows Airflow to load DAGs directly from an S3 bucket.
:param aws_conn_id: Airflow connection ID for AWS. Defaults to AwsBaseHook.default_conn_name.
:param bucket_name: The name of the S3 bucket containing the DAG files.
:param prefix: Optional subdirectory within the S3 bucket where the DAGs are stored.
If None, DAGs are assumed to be at the root of the bucket (Optional).
"""
supports_versioning = False
def __init__(
self,
*,
aws_conn_id: str = AwsBaseHook.default_conn_name,
bucket_name: str,
prefix: str = "",
**kwargs,
) -> None:
super().__init__(**kwargs)
self.aws_conn_id = aws_conn_id
self.bucket_name = bucket_name
self.prefix = prefix
# Local path where S3 DAGs are downloaded
self.s3_dags_dir: Path = self.base_dir
log = structlog.get_logger(__name__)
self._log = log.bind(
bundle_name=self.name,
version=self.version,
bucket_name=self.bucket_name,
prefix=self.prefix,
aws_conn_id=self.aws_conn_id,
)
self._s3_hook: S3Hook | None = None
def _initialize(self):
with self.lock():
if not self.s3_dags_dir.exists():
self._log.info("Creating local DAGs directory: %s", self.s3_dags_dir)
os.makedirs(self.s3_dags_dir)
if not self.s3_dags_dir.is_dir():
raise AirflowException(f"Local DAGs path: {self.s3_dags_dir} is not a directory.")
if not self.s3_hook.check_for_bucket(bucket_name=self.bucket_name):
raise AirflowException(f"S3 bucket '{self.bucket_name}' does not exist.")
if self.prefix:
# don't check when prefix is ""
if not self.s3_hook.check_for_prefix(
bucket_name=self.bucket_name, prefix=self.prefix, delimiter="/"
):
raise AirflowException(
f"S3 prefix 's3://{self.bucket_name}/{self.prefix}' does not exist."
)
self.refresh()
def initialize(self) -> None:
self._initialize()
super().initialize()
@property
def s3_hook(self):
if self._s3_hook is None:
try:
self._s3_hook: S3Hook = S3Hook(aws_conn_id=self.aws_conn_id) # Initialize S3 hook.
except AirflowException as e:
self._log.warning("Could not create S3Hook for connection %s: %s", self.aws_conn_id, e)
return self._s3_hook
def __repr__(self):
return (
f"<S3DagBundle("
f"name={self.name!r}, "
f"bucket_name={self.bucket_name!r}, "
f"prefix={self.prefix!r}, "
f"version={self.version!r}"
f")>"
)
def get_current_version(self) -> str | None:
"""Return the current version of the DAG bundle. Currently not supported."""
return None
@property
def path(self) -> Path:
"""Return the local path to the DAG files."""
return self.s3_dags_dir # Path where DAGs are downloaded.
def refresh(self) -> None:
"""Refresh the DAG bundle by re-downloading the DAGs from S3."""
if self.version:
raise AirflowException("Refreshing a specific version is not supported")
with self.lock():
self._log.debug(
"Downloading DAGs from s3://%s/%s to %s", self.bucket_name, self.prefix, self.s3_dags_dir
)
self.s3_hook.sync_to_local_dir(
bucket_name=self.bucket_name,
s3_prefix=self.prefix,
local_dir=self.s3_dags_dir,
delete_stale=True,
)
def view_url(self, version: str | None = None) -> str | None:
"""
Return a URL for viewing the DAGs in S3. Currently, versioning is not supported.
This method is deprecated and will be removed when the minimum supported Airflow version is 3.1.
Use `view_url_template` instead.
"""
return self.view_url_template()
def view_url_template(self) -> str | None:
"""Return a URL for viewing the DAGs in S3. Currently, versioning is not supported."""
if self.version:
raise AirflowException("S3 url with version is not supported")
if hasattr(self, "_view_url_template") and self._view_url_template:
# Because we use this method in the view_url method, we need to handle
# backward compatibility for Airflow versions that doesn't have the
# _view_url_template attribute. Should be removed when we drop support for Airflow 3.0
return self._view_url_template
# https://<bucket-name>.s3.<region>.amazonaws.com/<object-key>
url = f"https://{self.bucket_name}.s3"
if self.s3_hook.region_name:
url += f".{self.s3_hook.region_name}"
url += ".amazonaws.com"
if self.prefix:
url += f"/{self.prefix}"
return url
| {
"repo_id": "apache/airflow",
"file_path": "providers/amazon/src/airflow/providers/amazon/aws/bundles/s3.py",
"license": "Apache License 2.0",
"lines": 140,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/amazon/tests/unit/amazon/aws/bundles/test_s3.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import os
from unittest.mock import MagicMock, call
import boto3
import pytest
from moto import mock_aws
import airflow.version
from airflow.models import Connection
from airflow.providers.amazon.aws.hooks.s3 import S3Hook
from airflow.providers.common.compat.sdk import AirflowException
from tests_common.test_utils.config import conf_vars
AWS_CONN_ID_WITH_REGION = "s3_dags_connection"
AWS_CONN_ID_REGION = "eu-central-1"
AWS_CONN_ID_DEFAULT = "aws_default"
S3_BUCKET_NAME = "my-airflow-dags-bucket"
S3_BUCKET_PREFIX = "project1/dags"
if airflow.version.version.strip().startswith("3"):
from airflow.providers.amazon.aws.bundles.s3 import S3DagBundle
@pytest.fixture
def mocked_s3_resource():
with mock_aws():
yield boto3.resource("s3")
@pytest.fixture
def s3_client():
with mock_aws():
yield boto3.client("s3")
@pytest.fixture
def s3_bucket(mocked_s3_resource, s3_client):
bucket = mocked_s3_resource.create_bucket(Bucket=S3_BUCKET_NAME)
s3_client.put_object(Bucket=bucket.name, Key=S3_BUCKET_PREFIX + "/dag_01.py", Body=b"test data")
s3_client.put_object(Bucket=bucket.name, Key=S3_BUCKET_PREFIX + "/dag_02.py", Body=b"test data")
s3_client.put_object(
Bucket=bucket.name, Key=S3_BUCKET_PREFIX + "/subproject1/dag_a.py", Body=b"test data"
)
s3_client.put_object(
Bucket=bucket.name, Key=S3_BUCKET_PREFIX + "/subproject1/dag_b.py", Body=b"test data"
)
return bucket
@pytest.fixture(autouse=True)
def bundle_temp_dir(tmp_path):
with conf_vars({("dag_processor", "dag_bundle_storage_path"): str(tmp_path)}):
yield tmp_path
@pytest.mark.skipif(not airflow.version.version.strip().startswith("3"), reason="Airflow >=3.0.0 test")
class TestS3DagBundle:
@pytest.fixture(autouse=True)
def setup_connections(self, create_connection_without_db):
create_connection_without_db(
Connection(
conn_id=AWS_CONN_ID_DEFAULT,
conn_type="aws",
extra={
"config_kwargs": {"s3": {"bucket_name": S3_BUCKET_NAME}},
},
)
)
create_connection_without_db(
Connection(
conn_id=AWS_CONN_ID_WITH_REGION,
conn_type="aws",
extra={
"config_kwargs": {"s3": {"bucket_name": S3_BUCKET_NAME}},
"region_name": AWS_CONN_ID_REGION,
},
)
)
def test_view_url_generates_presigned_url(self):
bundle = S3DagBundle(
name="test", aws_conn_id=AWS_CONN_ID_DEFAULT, prefix="project1/dags", bucket_name=S3_BUCKET_NAME
)
url: str = bundle.view_url("test_version")
assert url.startswith("https://my-airflow-dags-bucket.s3.amazonaws.com/project1/dags")
def test_view_url_template_generates_presigned_url(self):
bundle = S3DagBundle(
name="test", aws_conn_id=AWS_CONN_ID_DEFAULT, prefix="project1/dags", bucket_name=S3_BUCKET_NAME
)
url: str = bundle.view_url_template()
assert url.startswith("https://my-airflow-dags-bucket.s3.amazonaws.com/project1/dags")
def test_supports_versioning(self):
bundle = S3DagBundle(
name="test", aws_conn_id=AWS_CONN_ID_DEFAULT, prefix="project1/dags", bucket_name=S3_BUCKET_NAME
)
assert S3DagBundle.supports_versioning is False
# set version, it's not supported
bundle.version = "test_version"
with pytest.raises(AirflowException, match="Refreshing a specific version is not supported"):
bundle.refresh()
with pytest.raises(AirflowException, match="S3 url with version is not supported"):
bundle.view_url("test_version")
def test_correct_bundle_path_used(self):
bundle = S3DagBundle(
name="test", aws_conn_id=AWS_CONN_ID_DEFAULT, prefix="project1_dags", bucket_name="airflow_dags"
)
assert str(bundle.base_dir) == str(bundle.s3_dags_dir)
def test_s3_bucket_and_prefix_validated(self, s3_bucket):
hook = S3Hook(aws_conn_id=AWS_CONN_ID_DEFAULT)
assert hook.check_for_bucket(s3_bucket.name) is True
bundle = S3DagBundle(
name="test",
aws_conn_id=AWS_CONN_ID_WITH_REGION,
prefix="project1_dags",
bucket_name="non-existing-bucket",
)
with pytest.raises(AirflowException, match="S3 bucket.*non-existing-bucket.*does not exist.*"):
bundle.initialize()
bundle = S3DagBundle(
name="test",
aws_conn_id=AWS_CONN_ID_WITH_REGION,
prefix="non-existing-prefix",
bucket_name=S3_BUCKET_NAME,
)
with pytest.raises(AirflowException, match="S3 prefix.*non-existing-prefix.*does not exist.*"):
bundle.initialize()
bundle = S3DagBundle(
name="test",
aws_conn_id=AWS_CONN_ID_WITH_REGION,
prefix=S3_BUCKET_PREFIX,
bucket_name=S3_BUCKET_NAME,
)
# initialize succeeds, with correct prefix and bucket
bundle.initialize()
assert bundle.s3_hook.region_name == AWS_CONN_ID_REGION
bundle = S3DagBundle(
name="test",
aws_conn_id=AWS_CONN_ID_WITH_REGION,
prefix="",
bucket_name=S3_BUCKET_NAME,
)
# initialize succeeds, with empty prefix
bundle.initialize()
assert bundle.s3_hook.region_name == AWS_CONN_ID_REGION
def _upload_fixtures(self, bucket: str, fixtures_dir: str) -> None:
client = boto3.client("s3")
fixtures_paths = [
os.path.join(path, filename) for path, _, files in os.walk(fixtures_dir) for filename in files
]
for path in fixtures_paths:
key = os.path.relpath(path, fixtures_dir)
client.upload_file(Filename=path, Bucket=bucket, Key=key)
def test_refresh(self, s3_bucket, s3_client):
bundle = S3DagBundle(
name="test",
aws_conn_id=AWS_CONN_ID_WITH_REGION,
prefix=S3_BUCKET_PREFIX,
bucket_name=S3_BUCKET_NAME,
)
bundle._log.debug = MagicMock()
# Create a pytest Call object to compare against the call_args_list of the _log.debug mock
download_log_call = call(
"Downloading DAGs from s3://%s/%s to %s", S3_BUCKET_NAME, S3_BUCKET_PREFIX, bundle.s3_dags_dir
)
bundle.initialize()
assert bundle._log.debug.call_count == 1
assert bundle._log.debug.call_args_list == [download_log_call]
bundle.refresh()
assert bundle._log.debug.call_count == 2
assert bundle._log.debug.call_args_list == [download_log_call, download_log_call]
bundle.refresh()
assert bundle._log.debug.call_count == 3
assert bundle._log.debug.call_args_list == [download_log_call, download_log_call, download_log_call]
def test_refresh_without_prefix(self, s3_bucket, s3_client):
bundle = S3DagBundle(
name="test",
aws_conn_id=AWS_CONN_ID_WITH_REGION,
bucket_name=S3_BUCKET_NAME,
)
bundle._log.debug = MagicMock()
download_log_call = call(
"Downloading DAGs from s3://%s/%s to %s", S3_BUCKET_NAME, "", bundle.s3_dags_dir
)
assert bundle.prefix == ""
bundle.initialize()
bundle.refresh()
assert bundle._log.debug.call_count == 2
assert bundle._log.debug.call_args_list == [download_log_call, download_log_call]
| {
"repo_id": "apache/airflow",
"file_path": "providers/amazon/tests/unit/amazon/aws/bundles/test_s3.py",
"license": "Apache License 2.0",
"lines": 190,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/google/src/airflow/providers/google/cloud/hooks/cloud_logging.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from collections.abc import Sequence
from typing import TYPE_CHECKING
from google.cloud.logging_v2.services.config_service_v2 import ConfigServiceV2Client
from google.cloud.logging_v2.types import (
CreateSinkRequest,
DeleteSinkRequest,
GetSinkRequest,
ListSinksRequest,
LogSink,
UpdateSinkRequest,
)
from airflow.providers.google.common.consts import CLIENT_INFO
from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID, GoogleBaseHook
if TYPE_CHECKING:
from google.protobuf.field_mask_pb2 import FieldMask
class CloudLoggingHook(GoogleBaseHook):
"""
Hook for Google Cloud Logging Log Sinks API.
:param gcp_conn_id: The connection ID to use when fetching connection info.
:param impersonation_chain: Optional service account to impersonate.
"""
def __init__(
self,
gcp_conn_id: str = "google_cloud_default",
impersonation_chain: str | Sequence[str] | None = None,
**kwargs,
) -> None:
super().__init__(gcp_conn_id=gcp_conn_id, impersonation_chain=impersonation_chain, **kwargs)
self._client: ConfigServiceV2Client | None = None
def get_conn(self) -> ConfigServiceV2Client:
"""Return the Google Cloud Logging Config client."""
if not self._client:
self._client = ConfigServiceV2Client(credentials=self.get_credentials(), client_info=CLIENT_INFO)
return self._client
def get_parent(self, project_id):
return f"projects/{project_id}"
@GoogleBaseHook.fallback_to_default_project_id
def create_sink(
self, sink: LogSink | dict, unique_writer_identity: bool = True, project_id: str = PROVIDE_PROJECT_ID
) -> LogSink:
if isinstance(sink, dict):
sink = LogSink(**sink)
request = CreateSinkRequest(
parent=self.get_parent(project_id), sink=sink, unique_writer_identity=unique_writer_identity
)
return self.get_conn().create_sink(request=request)
@GoogleBaseHook.fallback_to_default_project_id
def get_sink(self, sink_name: str, project_id: str = PROVIDE_PROJECT_ID) -> LogSink:
request = GetSinkRequest(sink_name=f"projects/{project_id}/sinks/{sink_name}")
return self.get_conn().get_sink(request=request)
@GoogleBaseHook.fallback_to_default_project_id
def list_sinks(self, page_size: int | None = None, project_id: str = PROVIDE_PROJECT_ID) -> list[LogSink]:
request = ListSinksRequest(parent=self.get_parent(project_id), page_size=page_size)
return list(self.get_conn().list_sinks(request=request))
@GoogleBaseHook.fallback_to_default_project_id
def delete_sink(self, sink_name: str, project_id: str = PROVIDE_PROJECT_ID) -> None:
request = DeleteSinkRequest(sink_name=f"projects/{project_id}/sinks/{sink_name}")
self.get_conn().delete_sink(request=request)
@GoogleBaseHook.fallback_to_default_project_id
def update_sink(
self,
sink_name: str,
sink: LogSink | dict,
unique_writer_identity: bool,
update_mask: FieldMask | dict,
project_id: str = PROVIDE_PROJECT_ID,
) -> LogSink:
if isinstance(sink, dict):
sink = LogSink(**sink)
request = UpdateSinkRequest(
sink_name=f"projects/{project_id}/sinks/{sink_name}",
sink=sink,
unique_writer_identity=unique_writer_identity,
update_mask=update_mask,
)
return self.get_conn().update_sink(request=request)
| {
"repo_id": "apache/airflow",
"file_path": "providers/google/src/airflow/providers/google/cloud/hooks/cloud_logging.py",
"license": "Apache License 2.0",
"lines": 94,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/google/tests/system/google/cloud/cloud_logging_sink/example_cloud_logging_sink.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import os
from datetime import datetime
# [START howto_operator_import_protobuf_obj]
from google.cloud.logging_v2.types import LogSink
from google.protobuf.field_mask_pb2 import FieldMask
# [END howto_operator_import_protobuf_obj]
from airflow.models.dag import DAG
from airflow.providers.google.cloud.operators.cloud_logging_sink import (
CloudLoggingCreateSinkOperator,
CloudLoggingDeleteSinkOperator,
CloudLoggingListSinksOperator,
CloudLoggingUpdateSinkOperator,
)
from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator
try:
from airflow.sdk import TriggerRule
except ImportError:
# Compatibility for Airflow < 3.1
from airflow.utils.trigger_rule import TriggerRule # type: ignore[no-redef,attr-defined]
PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "default")
ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
DAG_ID = "gcp_cloud_logging_sink"
BUCKET_NAME = f"bucket_{DAG_ID}_{ENV_ID}"
SINK_NAME = "example-airflow-test-sink"
CONN_ID = "google_cloud_default"
with DAG(
dag_id=DAG_ID,
schedule="@once",
start_date=datetime(2024, 1, 1),
catchup=False,
tags=["example", "gcp", "cloud-logging"],
) as dag:
create_bucket = GCSCreateBucketOperator(task_id="create_bucket", bucket_name=BUCKET_NAME)
# [START howto_operator_cloud_logging_create_sink_native_obj]
create_sink = CloudLoggingCreateSinkOperator(
task_id="create_sink",
project_id=PROJECT_ID,
sink_config={
"name": SINK_NAME,
"destination": f"storage.googleapis.com/{BUCKET_NAME}",
"description": "Create with full sink_config",
"filter": "severity>=INFO",
"disabled": False,
"exclusions": [
{
"name": "exclude-debug",
"description": "Skip debug logs",
"filter": "severity=DEBUG",
"disabled": True,
},
{
"name": "exclude-cloudsql",
"description": "Skip CloudSQL logs",
"filter": 'resource.type="cloudsql_database"',
"disabled": False,
},
],
},
gcp_conn_id=CONN_ID,
)
# [END howto_operator_cloud_logging_create_sink_native_obj]
# [START howto_operator_cloud_logging_update_sink_protobuf_obj]
update_sink_config = CloudLoggingUpdateSinkOperator(
task_id="update_sink_config",
sink_name=SINK_NAME,
project_id=PROJECT_ID,
sink_config=LogSink(
{
"description": "Update #1: GCE logs only",
"filter": 'resource.type="gce_instance"',
"disabled": False,
}
),
update_mask=FieldMask(paths=["description", "filter", "disabled"]),
unique_writer_identity=True,
gcp_conn_id=CONN_ID,
)
# [END howto_operator_cloud_logging_update_sink_protobuf_obj]
# [START howto_operator_cloud_logging_list_sinks]
list_sinks_after = CloudLoggingListSinksOperator(
task_id="list_sinks_after_update",
project_id=PROJECT_ID,
gcp_conn_id=CONN_ID,
)
# [END howto_operator_cloud_logging_list_sinks]
# [START howto_operator_cloud_logging_delete_sink]
delete_sink = CloudLoggingDeleteSinkOperator(
task_id="delete_sink",
sink_name=SINK_NAME,
project_id=PROJECT_ID,
gcp_conn_id=CONN_ID,
)
# [END howto_operator_cloud_logging_delete_sink]
delete_bucket = GCSDeleteBucketOperator(
task_id="delete_bucket", bucket_name=BUCKET_NAME, trigger_rule=TriggerRule.ALL_DONE
)
(create_bucket >> create_sink >> update_sink_config >> list_sinks_after >> delete_sink >> delete_bucket)
from tests_common.test_utils.watcher import watcher
list(dag.tasks) >> watcher()
from tests_common.test_utils.system_tests import get_test_run # noqa: E402
test_run = get_test_run(dag)
| {
"repo_id": "apache/airflow",
"file_path": "providers/google/tests/system/google/cloud/cloud_logging_sink/example_cloud_logging_sink.py",
"license": "Apache License 2.0",
"lines": 118,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/google/tests/unit/google/cloud/hooks/test_cloud_logging.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from unittest import mock
import pytest
from google.cloud.logging_v2.types import (
CreateSinkRequest,
DeleteSinkRequest,
GetSinkRequest,
ListSinksRequest,
LogSink,
UpdateSinkRequest,
)
from airflow.providers.google.cloud.hooks.cloud_logging import CloudLoggingHook
from unit.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id
BASE_STRING = "airflow.providers.google.common.hooks.base_google.{}"
CLOUDLOGGING_HOOK_CLIENT = "airflow.providers.google.cloud.hooks.cloud_logging.CloudLoggingHook.get_conn"
PROJECT_ID = "gcp-project-id"
SINK_NAME = "my-logs-sink"
UNIQUE_WRITER_IDENTITY = True
sink_config = {
"name": SINK_NAME,
"destination": "storage.googleapis.com/test-log-sink-af",
"description": "Create with full sink_config",
"filter": "severity>=INFO",
"disabled": False,
"exclusions": [
{
"name": "exclude-debug",
"description": "Skip debug logs",
"filter": "severity=DEBUG",
"disabled": True,
},
{
"name": "exclude-cloudsql",
"description": "Skip CloudSQL logs",
"filter": 'resource.type="cloudsql_database"',
"disabled": False,
},
],
}
GCP_CONN_ID = "google_cloud_default"
class TestCloudLoggingHook:
def setup_method(self):
with mock.patch(
BASE_STRING.format("GoogleBaseHook.__init__"),
new=mock_base_gcp_hook_default_project_id,
):
self.hook = CloudLoggingHook(gcp_conn_id=GCP_CONN_ID)
@mock.patch(
"airflow.providers.google.common.hooks.base_google.GoogleBaseHook.__init__",
new=mock_base_gcp_hook_default_project_id,
)
@mock.patch("airflow.providers.google.cloud.hooks.cloud_logging.ConfigServiceV2Client")
@mock.patch("airflow.providers.google.common.hooks.base_google.GoogleBaseHook.get_credentials")
def test_get_conn(self, mock_get_credentials, mock_client_class):
mock_credentials = mock.Mock()
mock_get_credentials.return_value = mock_credentials
hook = CloudLoggingHook(gcp_conn_id=GCP_CONN_ID)
conn = hook.get_conn()
mock_client_class.assert_called_once_with(credentials=mock_credentials, client_info=mock.ANY)
assert conn == mock_client_class.return_value
@mock.patch(CLOUDLOGGING_HOOK_CLIENT)
def test_create_sink(self, mock_config_client):
sink = LogSink(**sink_config)
expected_request = CreateSinkRequest(
parent=f"projects/{PROJECT_ID}", sink=sink, unique_writer_identity=UNIQUE_WRITER_IDENTITY
)
self.hook.create_sink(
sink=sink,
project_id=PROJECT_ID,
unique_writer_identity=UNIQUE_WRITER_IDENTITY,
)
mock_config_client.return_value.create_sink.assert_called_once_with(request=expected_request)
@mock.patch(CLOUDLOGGING_HOOK_CLIENT)
def test_get_sink(self, mock_config_client):
expected_request = GetSinkRequest(sink_name=f"projects/{PROJECT_ID}/sinks/{SINK_NAME}")
self.hook.get_sink(sink_name=SINK_NAME, project_id=PROJECT_ID)
mock_config_client.return_value.get_sink.assert_called_once_with(request=expected_request)
@mock.patch(CLOUDLOGGING_HOOK_CLIENT)
def test_list_sinks(self, mock_config_client):
expected_request = ListSinksRequest(parent=f"projects/{PROJECT_ID}")
self.hook.list_sinks(project_id=PROJECT_ID)
mock_config_client.return_value.list_sinks.assert_called_once_with(request=expected_request)
@mock.patch(CLOUDLOGGING_HOOK_CLIENT)
def test_delete_sink(self, mock_config_client):
expected_request = DeleteSinkRequest(sink_name=f"projects/{PROJECT_ID}/sinks/{SINK_NAME}")
self.hook.delete_sink(sink_name=SINK_NAME, project_id=PROJECT_ID)
mock_config_client.return_value.delete_sink.assert_called_once_with(request=expected_request)
@mock.patch(CLOUDLOGGING_HOOK_CLIENT)
def test_update_sink_success(self, mock_config_client):
sink_config = {
"destination": f"bigquery.googleapis.com/projects/{PROJECT_ID}/datasets/your_dataset",
"bigquery_options": {"use_partitioned_tables": True},
}
update_mask = {"paths": ["destination", "bigquery_options"]}
expected_request = UpdateSinkRequest(
sink_name=f"projects/{PROJECT_ID}/sinks/{SINK_NAME}",
sink=sink_config,
update_mask=update_mask,
unique_writer_identity=UNIQUE_WRITER_IDENTITY,
)
self.hook.update_sink(
sink_name=SINK_NAME,
sink=sink_config,
update_mask=update_mask,
unique_writer_identity=UNIQUE_WRITER_IDENTITY,
project_id=PROJECT_ID,
)
mock_config_client.return_value.update_sink.assert_called_once_with(request=expected_request)
@mock.patch(CLOUDLOGGING_HOOK_CLIENT)
def test_create_sink_dict_input(self, mock_config_client):
expected_sink = LogSink(**sink_config)
expected_request = CreateSinkRequest(
parent=f"projects/{PROJECT_ID}", sink=expected_sink, unique_writer_identity=UNIQUE_WRITER_IDENTITY
)
self.hook.create_sink(
sink=sink_config, unique_writer_identity=UNIQUE_WRITER_IDENTITY, project_id=PROJECT_ID
)
mock_config_client.return_value.create_sink.assert_called_once_with(request=expected_request)
def test_update_sink_invalid_dict_format(self):
with pytest.raises(ValueError, match="Unknown field for LogSink: invalid_key"):
self.hook.update_sink(
sink_name=SINK_NAME,
sink={"invalid_key": "value"},
update_mask={"paths": ["invalid_key"]},
unique_writer_identity=UNIQUE_WRITER_IDENTITY,
project_id=PROJECT_ID,
)
@mock.patch(CLOUDLOGGING_HOOK_CLIENT)
def test_update_sink_failure(self, mock_config_client):
updated_sink = LogSink(name=SINK_NAME, destination="storage.googleapis.com/new-bucket")
updated_mask = {"paths": ["name", "destination"]}
mock_config_client.return_value.update_sink.side_effect = Exception("Permission denied")
with pytest.raises(Exception, match="Permission denied"):
self.hook.update_sink(
sink_name=SINK_NAME,
sink=updated_sink,
update_mask=updated_mask,
unique_writer_identity=UNIQUE_WRITER_IDENTITY,
project_id=PROJECT_ID,
)
mock_config_client.return_value.update_sink.assert_called_once()
@mock.patch(CLOUDLOGGING_HOOK_CLIENT)
def test_list_sinks_empty(self, mock_config_client):
mock_config_client.return_value.list_sinks.return_value = []
sinks = self.hook.list_sinks(project_id=PROJECT_ID)
assert sinks == []
mock_config_client.return_value.list_sinks.assert_called_once()
| {
"repo_id": "apache/airflow",
"file_path": "providers/google/tests/unit/google/cloud/hooks/test_cloud_logging.py",
"license": "Apache License 2.0",
"lines": 163,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/google/tests/unit/google/cloud/operators/test_cloud_logging_sink.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
This module contains various unit tests for GCP Cloud Logging Sink Operators
"""
from __future__ import annotations
import re
from datetime import datetime
from unittest import mock
import pytest
from google.api_core.exceptions import AlreadyExists, GoogleAPICallError, InvalidArgument, NotFound
from google.cloud.exceptions import GoogleCloudError
from google.cloud.logging_v2.types import LogSink
from google.protobuf.field_mask_pb2 import FieldMask
from airflow import DAG
from airflow.providers.common.compat.sdk import AirflowException
from airflow.providers.google.cloud.operators.cloud_logging_sink import (
CloudLoggingCreateSinkOperator,
CloudLoggingDeleteSinkOperator,
CloudLoggingListSinksOperator,
CloudLoggingUpdateSinkOperator,
)
CLOUD_LOGGING_HOOK_PATH = "airflow.providers.google.cloud.operators.cloud_logging_sink.CloudLoggingHook"
TASK_ID = "test-task"
SINK_NAME = "test-sink"
PROJECT_ID = "test-project"
UNIQUE_WRITER_IDENTITY = True
create_test_cases = [
(
{
"name": SINK_NAME,
"description": "Creating sink with pubsub",
"destination": "pubsub.googleapis.com/projects/test-project/topics/test-topic",
"filter": "severity=INFO",
"exclusions": [
{
"name": "exclude-debug",
"description": "Skip debug logs",
"filter": "severity=DEBUG",
"disabled": True,
},
{
"name": "exclude-cloudsql",
"description": "Skip CloudSQL logs",
"filter": 'resource.type="cloudsql_database"',
"disabled": False,
},
],
}
),
(
{
"name": SINK_NAME,
"description": "Creating bq destination",
"destination": "bigquery.googleapis.com/projects/test-project/datasets/your_dataset",
"filter": "severity=ERROR",
"exclusions": [
{
"name": "exclude-healthchecks",
"description": "Exclude App Engine health check logs",
"filter": 'resource.type="gae_app" AND protoPayload.status.code=200 AND protoPayload.resource="/_ah/health"',
"disabled": False,
},
{
"name": "exclude-load-balancer-logs",
"description": "Exclude HTTP 200 logs from load balancer",
"filter": 'resource.type="http_load_balancer" AND httpRequest.status=200',
"disabled": False,
},
{
"name": "exclude-gke-events",
"description": "Exclude normal Kubernetes events",
"filter": 'resource.type="k8s_event" AND jsonPayload.reason="Scheduled"',
"disabled": False,
},
],
"bigquery_options": {"use_partitioned_tables": True},
}
),
]
create_test_ids = ["create_pubsub", "create_bq"]
update_test_cases = [
(
{
"name": "sink-1",
"destination": "storage.googleapis.com/my-bucket-1",
"filter": "severity>=ERROR",
"description": "Storage sink updated",
"disabled": False,
},
{"paths": ["filter", "description", "disabled"]},
),
(
{
"name": "sink-2",
"destination": "pubsub.googleapis.com/projects/my-project/topics/my-topic",
"filter": 'resource.type="gce_instance"',
"description": "Pub/Sub sink updated",
"disabled": True,
},
{"paths": ["destination", "disabled"]},
),
]
update_test_ids = ["update_storage_sink", "update_pubsub_sink"]
sink = LogSink(name=SINK_NAME, destination="pubsub.googleapis.com/projects/my-project/topics/my-topic")
def _assert_common_template_fields(template_fields):
assert "project_id" in template_fields
assert "gcp_conn_id" in template_fields
assert "impersonation_chain" in template_fields
class TestCloudLoggingCreateSinkOperator:
def test_template_fields(self):
operator = CloudLoggingCreateSinkOperator(task_id=TASK_ID, project_id=PROJECT_ID, sink_config=sink)
assert "sink_config" in operator.template_fields
assert "unique_writer_identity" in operator.template_fields
_assert_common_template_fields(operator.template_fields)
def test_missing_required_params(self):
with pytest.raises(AirflowException) as excinfo:
CloudLoggingCreateSinkOperator(
task_id=TASK_ID,
sink_config=None,
project_id=None,
).execute(context={})
assert "Required parameters are missing: ['project_id', 'sink_config']." in str(excinfo.value)
@mock.patch(CLOUD_LOGGING_HOOK_PATH)
@pytest.mark.parametrize("sink_config", create_test_cases, ids=create_test_ids)
def test_create_with_pubsub_sink(self, hook_mock, sink_config):
hook_instance = hook_mock.return_value
hook_instance.create_sink.return_value = LogSink(**sink_config)
operator = CloudLoggingCreateSinkOperator(
task_id=TASK_ID,
sink_config=sink_config,
project_id=PROJECT_ID,
unique_writer_identity=UNIQUE_WRITER_IDENTITY,
)
operator.execute(context=mock.MagicMock())
hook_instance.create_sink.assert_called_once_with(
project_id=PROJECT_ID, sink=sink_config, unique_writer_identity=UNIQUE_WRITER_IDENTITY
)
@mock.patch(CLOUD_LOGGING_HOOK_PATH)
@pytest.mark.parametrize("sink_config", create_test_cases, ids=create_test_ids)
def test_create_sink_already_exists(self, hook_mock, sink_config):
hook_instance = hook_mock.return_value
hook_instance.create_sink.side_effect = AlreadyExists("Sink already exists")
hook_instance.get_sink.return_value = LogSink(**sink_config)
operator = CloudLoggingCreateSinkOperator(
task_id=TASK_ID,
sink_config=sink_config,
project_id=PROJECT_ID,
)
result = operator.execute(context=mock.MagicMock())
hook_instance.create_sink.assert_called_once_with(
project_id=PROJECT_ID, sink=sink_config, unique_writer_identity=False
)
assert result["name"] == sink_config["name"]
assert result["destination"] == sink_config["destination"]
@mock.patch(CLOUD_LOGGING_HOOK_PATH)
@pytest.mark.parametrize("sink_config", create_test_cases, ids=create_test_ids)
def test_create_sink_raises_error(self, hook_mock, sink_config):
hook_instance = hook_mock.return_value
hook_instance.create_sink.side_effect = GoogleCloudError("Failed to create sink")
hook_instance.get_sink.return_value = sink_config
operator = CloudLoggingCreateSinkOperator(
task_id=TASK_ID,
sink_config=sink_config,
project_id=PROJECT_ID,
)
with pytest.raises(GoogleCloudError, match="Failed to create sink"):
operator.execute(context=mock.MagicMock())
hook_instance.create_sink.assert_called_once()
@mock.patch(CLOUD_LOGGING_HOOK_PATH)
@pytest.mark.parametrize(
"impersonation_chain",
[
["user1@project.iam.gserviceaccount.com", "user2@project.iam.gserviceaccount.com"],
"user2@project.iam.gserviceaccount.com",
],
)
def test_create_with_impersonation_chain(self, hook_mock, impersonation_chain):
hook_instance = hook_mock.return_value
hook_instance.create_sink.return_value = sink
operator = CloudLoggingCreateSinkOperator(
task_id=TASK_ID,
sink_config=sink,
impersonation_chain=impersonation_chain,
project_id=PROJECT_ID,
)
operator.execute(context=mock.MagicMock())
hook_mock.assert_called_once_with(
gcp_conn_id="google_cloud_default",
impersonation_chain=impersonation_chain,
)
def test_missing_rendered_field_raises(self):
with DAG(
dag_id="test_render_native",
start_date=datetime(1997, 9, 25),
render_template_as_native_obj=True,
) as dag:
operator = CloudLoggingCreateSinkOperator(
task_id=TASK_ID,
sink_config="{{ var.value.sink_config }}",
project_id="{{ var.value.project_id }}",
dag=dag,
)
context = {
"var": {"value": {"project_id": PROJECT_ID, "sink_config": None}},
}
operator.render_template_fields(context)
with pytest.raises(
AirflowException,
match=re.escape(
"Required parameters are missing: ['sink_config']. These must be passed as keyword parameters."
),
):
operator.execute(context)
@mock.patch(CLOUD_LOGGING_HOOK_PATH)
@pytest.mark.parametrize("sink_config", create_test_cases, ids=create_test_ids)
def test_template_rendering(self, hook_mock, sink_config):
with DAG(
dag_id="test_render_native",
start_date=datetime(2024, 1, 1),
render_template_as_native_obj=True,
) as dag:
operator = CloudLoggingCreateSinkOperator(
task_id=TASK_ID,
sink_config="{{ var.value.sink_config }}",
project_id="{{ var.value.project_id }}",
dag=dag,
)
context = {
"var": {"value": {"project_id": PROJECT_ID, "sink_config": sink_config}},
}
hook_instance = hook_mock.return_value
hook_instance.create_sink.return_value = LogSink(**sink_config)
operator.render_template_fields(context)
operator.execute(context)
assert isinstance(operator.sink_config, dict)
assert operator.sink_config == sink_config
@mock.patch(CLOUD_LOGGING_HOOK_PATH)
def test_create_with_empty_sink_name_raises(self, hook_mock):
sink.name = None
hook_instance = hook_mock.return_value
hook_instance.create_sink.side_effect = InvalidArgument("Required parameter 'sink.name' is empty")
with pytest.raises(
InvalidArgument,
match="400 Required parameter 'sink.name' is empty",
):
CloudLoggingCreateSinkOperator(task_id=TASK_ID, sink_config=sink, project_id=PROJECT_ID).execute(
context={}
)
class TestCloudLoggingDeleteSinkOperator:
def test_template_fields(self):
operator = CloudLoggingDeleteSinkOperator(
task_id=TASK_ID,
sink_name=SINK_NAME,
project_id=PROJECT_ID,
)
assert "sink_name" in operator.template_fields
_assert_common_template_fields(operator.template_fields)
def test_missing_required_params(self):
with pytest.raises(AirflowException) as excinfo:
CloudLoggingDeleteSinkOperator(
task_id=TASK_ID,
sink_name=None,
project_id=None,
).execute(context={})
assert "Required parameters are missing" in str(excinfo.value)
@mock.patch(CLOUD_LOGGING_HOOK_PATH)
def test_delete_sink_success(self, hook_mock):
hook_instance = hook_mock.return_value
hook_instance.delete_sink.return_value = None
operator = CloudLoggingDeleteSinkOperator(
task_id=TASK_ID,
sink_name=SINK_NAME,
project_id=PROJECT_ID,
)
context = mock.MagicMock()
operator.execute(context=context)
hook_instance.delete_sink.assert_called_once()
@mock.patch(CLOUD_LOGGING_HOOK_PATH)
def test_delete_sink_raises_error(self, hook_mock):
hook_instance = hook_mock.return_value
hook_instance.delete_sink.side_effect = GoogleCloudError("Internal Error")
operator = CloudLoggingDeleteSinkOperator(
task_id=TASK_ID,
sink_name=SINK_NAME,
project_id=PROJECT_ID,
)
with pytest.raises(GoogleCloudError):
operator.execute(context=mock.MagicMock())
hook_instance.delete_sink.assert_called_once()
@mock.patch(CLOUD_LOGGING_HOOK_PATH)
def test_missing_rendered_field_raises(self, hook_mock):
with DAG(
dag_id="test_render_native",
start_date=datetime(2024, 1, 1),
render_template_as_native_obj=True,
) as dag:
operator = CloudLoggingDeleteSinkOperator(
task_id=TASK_ID,
sink_name="{{ var.value.sink_name }}",
project_id="{{ var.value.project_id }}",
dag=dag,
)
context = {
"var": {"value": {"project_id": PROJECT_ID, "sink_name": None}},
}
operator.render_template_fields(context)
with pytest.raises(
AirflowException,
match=re.escape(
"Required parameters are missing: ['sink_name']. These must be passed as keyword parameters."
),
):
operator.execute(context)
@mock.patch(CLOUD_LOGGING_HOOK_PATH)
@pytest.mark.parametrize("sink_config", create_test_cases, ids=create_test_ids)
def test_template_rendering(self, hook_mock, sink_config):
with DAG(
dag_id="test_render_native",
start_date=datetime(2024, 1, 1),
render_template_as_native_obj=True,
) as dag:
operator = CloudLoggingDeleteSinkOperator(
task_id=TASK_ID,
sink_name="{{ var.value.sink_name }}",
project_id="{{ var.value.project_id }}",
dag=dag,
)
context = {
"var": {"value": {"project_id": PROJECT_ID, "sink_name": SINK_NAME}},
}
hook_instance = hook_mock.return_value
hook_instance.delete_sink.return_value = None
operator.render_template_fields(context)
operator.execute(context)
assert operator.project_id == PROJECT_ID
assert operator.sink_name == SINK_NAME
class TestCloudLoggingListSinksOperator:
def test_template_fields(self):
operator = CloudLoggingListSinksOperator(
task_id=TASK_ID,
project_id=PROJECT_ID,
)
assert "project_id" in operator.template_fields
def test_missing_required_params(self):
with pytest.raises(AirflowException) as excinfo:
CloudLoggingListSinksOperator(
task_id=TASK_ID,
project_id=None,
).execute(context={})
assert "Required parameters are missing" in str(excinfo.value)
@mock.patch(CLOUD_LOGGING_HOOK_PATH)
def test_list_sinks_success(self, hook_mock):
hook_instance = hook_mock.return_value
hook_instance.list_sinks.return_value = [sink, sink]
operator = CloudLoggingListSinksOperator(
task_id=TASK_ID,
project_id=PROJECT_ID,
page_size=50,
)
result = operator.execute(context=mock.MagicMock())
hook_mock.assert_called_once()
_, kwargs = hook_mock.call_args
assert kwargs == {
"gcp_conn_id": "google_cloud_default",
"impersonation_chain": None,
}
hook_instance.list_sinks.assert_called_once_with(
project_id=PROJECT_ID,
page_size=50,
)
assert result == [LogSink.to_dict(sink) for sink in [sink, sink]]
def test_negative_page_size_raises_exception(self):
with pytest.raises(
AirflowException, match="The page_size for the list sinks request must be greater than zero"
):
CloudLoggingListSinksOperator(task_id="fail-task", project_id=PROJECT_ID, page_size=-1).execute(
context={}
)
def test_missing_rendered_field_raises(self):
with DAG(
dag_id="test_render_native",
start_date=datetime(1997, 9, 25),
render_template_as_native_obj=True,
) as dag:
operator = CloudLoggingListSinksOperator(
task_id=TASK_ID, project_id="{{ var.value.project_id }}", dag=dag
)
context = {
"var": {"value": {"project_id": None}},
}
operator.render_template_fields(context)
with pytest.raises(
AirflowException,
match=re.escape(
"Required parameters are missing: ['project_id']. These must be passed as keyword parameters."
),
):
operator.execute(context)
@mock.patch(CLOUD_LOGGING_HOOK_PATH)
def test_template_rendering(self, hook_mock):
with DAG(
dag_id="test_render_native",
start_date=datetime(2024, 1, 1),
render_template_as_native_obj=True,
) as dag:
operator = CloudLoggingListSinksOperator(
task_id=TASK_ID, project_id="{{ var.value.project_id }}", dag=dag
)
context = {
"var": {"value": {"project_id": PROJECT_ID}},
}
hook_instance = hook_mock.return_value
hook_instance.list_sinks.return_value = [sink]
operator.render_template_fields(context)
operator.execute(context)
assert isinstance(operator.project_id, str)
assert operator.project_id == PROJECT_ID
class TestCloudLoggingUpdateSinksOperator:
@pytest.mark.parametrize(("sink_config", "update_mask"), update_test_cases, ids=update_test_ids)
def test_template_fields(self, sink_config, update_mask):
operator = CloudLoggingUpdateSinkOperator(
task_id=TASK_ID,
sink_name=SINK_NAME,
sink_config=sink_config,
update_mask=update_mask,
project_id=PROJECT_ID,
)
assert "sink_config" in operator.template_fields
assert "update_mask" in operator.template_fields
assert "sink_name" in operator.template_fields
_assert_common_template_fields(operator.template_fields)
def test_missing_required_params(self):
with pytest.raises(AirflowException) as excinfo:
CloudLoggingDeleteSinkOperator(
task_id=TASK_ID,
sink_name=None,
project_id=None,
).execute(context={})
assert "Required parameters are missing" in str(excinfo.value)
@mock.patch(CLOUD_LOGGING_HOOK_PATH)
@pytest.mark.parametrize(("sink_config", "update_mask"), update_test_cases, ids=update_test_ids)
def test_update_sink_success(self, hook_mock, sink_config, update_mask):
hook_instance = hook_mock.return_value
hook_instance.get_sink.return_value = sink
sink_ = LogSink(**sink_config)
hook_instance.update_sink.return_value = sink_
operator = CloudLoggingUpdateSinkOperator(
task_id=TASK_ID,
sink_name=SINK_NAME,
project_id=PROJECT_ID,
sink_config=sink_config,
update_mask=update_mask,
)
result = operator.execute(context=mock.MagicMock())
hook_instance.get_sink.assert_called_once_with(sink_name=SINK_NAME, project_id=PROJECT_ID)
hook_instance.update_sink.assert_called_once()
assert result == LogSink.to_dict(sink_)
@mock.patch(CLOUD_LOGGING_HOOK_PATH)
@pytest.mark.parametrize(("sink_config", "update_mask"), update_test_cases, ids=update_test_ids)
def test_update_sink_raises_not_found(self, hook_mock, sink_config, update_mask):
hook_instance = hook_mock.return_value
hook_instance.get_sink.side_effect = NotFound("not found")
operator = CloudLoggingUpdateSinkOperator(
task_id=TASK_ID,
sink_name=SINK_NAME,
sink_config=sink_config,
update_mask=update_mask,
project_id=PROJECT_ID,
)
with pytest.raises(NotFound, match="not found"):
operator.execute(context=mock.MagicMock())
hook_instance.get_sink.assert_called_once()
hook_instance.update_sink.assert_not_called()
@mock.patch(CLOUD_LOGGING_HOOK_PATH)
@pytest.mark.parametrize(("sink_config", "update_mask"), update_test_cases, ids=update_test_ids)
def test_update_sink_raises_generic_error(self, hook_mock, sink_config, update_mask):
hook_instance = hook_mock.return_value
hook_instance.get_sink.side_effect = GoogleAPICallError("something went wrong")
operator = CloudLoggingUpdateSinkOperator(
task_id=TASK_ID,
sink_name=SINK_NAME,
sink_config=sink_config,
update_mask=update_mask,
project_id=PROJECT_ID,
)
with pytest.raises(GoogleAPICallError, match="something went wrong"):
operator.execute(context=mock.MagicMock())
hook_instance.get_sink.assert_called_once()
hook_instance.update_sink.assert_not_called()
@mock.patch(CLOUD_LOGGING_HOOK_PATH)
@pytest.mark.parametrize(
"impersonation_chain",
[
["user1@project.iam.gserviceaccount.com", "user2@project.iam.gserviceaccount.com"],
"user2@project.iam.gserviceaccount.com",
],
)
def test_create_with_impersonation_chain(self, hook_mock, impersonation_chain):
hook_instance = hook_mock.return_value
hook_instance.get_sink.return_value = sink
hook_instance.update_sink.return_value = sink
operator = CloudLoggingUpdateSinkOperator(
task_id=TASK_ID,
sink_config=update_test_cases[0][0],
update_mask=update_test_cases[0][1],
sink_name=SINK_NAME,
impersonation_chain=impersonation_chain,
project_id=PROJECT_ID,
)
operator.execute(context=mock.MagicMock())
hook_mock.assert_called_once_with(
gcp_conn_id="google_cloud_default",
impersonation_chain=impersonation_chain,
)
def test_missing_rendered_field_raises(self):
with DAG(
dag_id="test_render_native",
start_date=datetime(1997, 9, 25),
render_template_as_native_obj=True,
) as dag:
operator = CloudLoggingUpdateSinkOperator(
task_id=TASK_ID,
sink_name="{{ var.value.sink_name }}",
sink_config="{{ var.value.sink_config }}",
update_mask="{{ var.value.update_mask }}",
project_id="{{ var.value.project_id }}",
dag=dag,
)
context = {
"var": {
"value": {
"project_id": PROJECT_ID,
"sink_name": None,
"sink_config": None,
"update_mask": None,
}
},
}
operator.render_template_fields(context)
with pytest.raises(
AirflowException,
match=re.escape(
"Required parameters are missing: ['sink_name', 'sink_config', 'update_mask']. These must be passed as keyword parameters."
),
):
operator.execute(context)
@mock.patch(CLOUD_LOGGING_HOOK_PATH)
@pytest.mark.parametrize(("sink_config", "update_mask"), update_test_cases, ids=update_test_ids)
def test_template_rendering(self, hook_mock, sink_config, update_mask):
with DAG(
dag_id="test_render_native",
start_date=datetime(2024, 1, 1),
render_template_as_native_obj=True,
) as dag:
operator = CloudLoggingUpdateSinkOperator(
task_id=TASK_ID,
sink_name="{{ var.value.sink_name }}",
update_mask="{{ var.value.update_mask }}",
sink_config="{{ var.value.sink_config }}",
project_id="{{ var.value.project_id }}",
unique_writer_identity="{{ var.value.unique_writer_identity }}",
dag=dag,
)
context = {
"var": {
"value": {
"project_id": PROJECT_ID,
"sink_config": sink_config,
"sink_name": SINK_NAME,
"update_mask": update_mask,
"unique_writer_identity": UNIQUE_WRITER_IDENTITY,
}
}
}
hook_instance = hook_mock.return_value
hook_instance.get_sink.return_value = LogSink(name=SINK_NAME)
hook_instance.update_sink.return_value = LogSink(**sink_config)
operator.render_template_fields(context)
result = operator.execute(context=mock.MagicMock())
# Assertions
assert isinstance(operator.sink_config, dict)
assert isinstance(operator.update_mask, dict)
assert isinstance(operator.unique_writer_identity, bool)
assert operator.sink_config["name"] == sink_config["name"]
assert result["name"] == sink_config["name"]
assert operator.update_mask == update_mask
hook_instance.update_sink.assert_called_once_with(
project_id=PROJECT_ID,
sink_name=SINK_NAME,
sink=sink_config,
update_mask=update_mask,
unique_writer_identity=UNIQUE_WRITER_IDENTITY,
)
@mock.patch(CLOUD_LOGGING_HOOK_PATH)
@pytest.mark.parametrize(("sink_config", "update_mask"), update_test_cases, ids=update_test_ids)
def test_template_rendering_with_proto(self, hook_mock, sink_config, update_mask):
sink_obj = LogSink(**sink_config)
mask_obj = FieldMask(paths=update_mask["paths"])
with DAG(
dag_id="test_render_native_proto",
start_date=datetime(2024, 1, 1),
render_template_as_native_obj=True,
) as dag:
operator = CloudLoggingUpdateSinkOperator(
task_id=TASK_ID,
sink_name="{{ var.value.sink_name }}",
update_mask="{{ var.value.update_mask }}",
sink_config="{{ var.value.sink_config }}",
project_id="{{ var.value.project_id }}",
unique_writer_identity="{{ var.value.unique_writer_identity }}",
dag=dag,
)
context = {
"var": {
"value": {
"project_id": PROJECT_ID,
"sink_name": SINK_NAME,
"sink_config": sink_obj,
"update_mask": mask_obj,
"unique_writer_identity": UNIQUE_WRITER_IDENTITY,
}
}
}
hook_instance = hook_mock.return_value
hook_instance.get_sink.return_value = LogSink(name=SINK_NAME)
hook_instance.update_sink.return_value = sink_obj
operator.render_template_fields(context)
result = operator.execute(context=mock.MagicMock())
assert isinstance(operator.sink_config, LogSink)
assert isinstance(operator.update_mask, FieldMask)
assert isinstance(operator.unique_writer_identity, bool)
assert operator.sink_config.name == sink_obj.name
assert result["name"] == sink_obj.name
assert operator.update_mask == mask_obj
assert operator.sink_config == sink_obj
hook_instance.update_sink.assert_called_once_with(
project_id=PROJECT_ID,
sink_name=SINK_NAME,
sink=sink_obj,
update_mask=mask_obj,
unique_writer_identity=UNIQUE_WRITER_IDENTITY,
)
| {
"repo_id": "apache/airflow",
"file_path": "providers/google/tests/unit/google/cloud/operators/test_cloud_logging_sink.py",
"license": "Apache License 2.0",
"lines": 658,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-core/src/airflow/serialization/typing.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from dataclasses import is_dataclass
from typing import Any
def is_pydantic_model(cls: Any) -> bool:
"""
Return True if the class is a pydantic.main.BaseModel.
Checking is done by attributes as it is significantly faster than
using isinstance.
"""
# __pydantic_fields__ is always present on Pydantic V2 models and is a dict[str, FieldInfo]
# __pydantic_validator__ is an internal validator object, always set after model build
# Check if it is not a dataclass to prevent detecting pydantic dataclasses as pydantic models
return (
hasattr(cls, "__pydantic_fields__")
and hasattr(cls, "__pydantic_validator__")
and not is_dataclass(cls)
)
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/serialization/typing.py",
"license": "Apache License 2.0",
"lines": 34,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/openlineage/tests/system/openlineage/example_openlineage_docs_file_dag.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Simple DAGs with md file as doc.
It checks:
- content of DocumentationJobFacet
"""
from __future__ import annotations
from datetime import datetime
from airflow import DAG
from airflow.providers.standard.operators.bash import BashOperator
from system.openlineage.expected_events import get_expected_event_file_path
from system.openlineage.operator import OpenLineageTestOperator
DAG_ID = "openlineage_docs_file_dag"
with DAG(
dag_id=DAG_ID,
start_date=datetime(2021, 1, 1),
schedule=None,
catchup=False,
doc_md="dag_doc.md",
default_args={"retries": 0},
) as dag:
do_nothing_task = BashOperator(task_id="do_nothing_task", bash_command="sleep 1;")
check_events = OpenLineageTestOperator(
task_id="check_events", file_path=get_expected_event_file_path(DAG_ID)
)
do_nothing_task >> check_events
from tests_common.test_utils.system_tests import get_test_run # noqa: E402
# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest)
test_run = get_test_run(dag)
| {
"repo_id": "apache/airflow",
"file_path": "providers/openlineage/tests/system/openlineage/example_openlineage_docs_file_dag.py",
"license": "Apache License 2.0",
"lines": 44,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/fab/src/airflow/providers/fab/version_compat.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# NOTE! THIS FILE IS COPIED MANUALLY IN OTHER PROVIDERS DELIBERATELY TO AVOID ADDING UNNECESSARY
# DEPENDENCIES BETWEEN PROVIDERS. IF YOU WANT TO ADD CONDITIONAL CODE IN YOUR PROVIDER THAT DEPENDS
# ON AIRFLOW VERSION, PLEASE COPY THIS FILE TO THE ROOT PACKAGE OF YOUR PROVIDER AND IMPORT
# THOSE CONSTANTS FROM IT RATHER THAN IMPORTING THEM FROM ANOTHER PROVIDER OR TEST CODE
#
from __future__ import annotations
def get_base_airflow_version_tuple() -> tuple[int, int, int]:
from packaging.version import Version
from airflow import __version__
airflow_version = Version(__version__)
return airflow_version.major, airflow_version.minor, airflow_version.micro
AIRFLOW_V_3_1_PLUS = get_base_airflow_version_tuple() >= (3, 1, 0)
AIRFLOW_V_3_1_1_PLUS = get_base_airflow_version_tuple() >= (3, 1, 1)
AIRFLOW_V_3_2_PLUS = get_base_airflow_version_tuple() >= (3, 2, 0)
| {
"repo_id": "apache/airflow",
"file_path": "providers/fab/src/airflow/providers/fab/version_compat.py",
"license": "Apache License 2.0",
"lines": 31,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:task-sdk/src/airflow/sdk/bases/hook.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import logging
from typing import TYPE_CHECKING, Any
from airflow.sdk.definitions._internal.logging_mixin import LoggingMixin
if TYPE_CHECKING:
from airflow.sdk.definitions.connection import Connection
log = logging.getLogger(__name__)
class BaseHook(LoggingMixin):
"""
Abstract base class for hooks.
Hooks are meant as an interface to
interact with external systems. MySqlHook, HiveHook, PigHook return
object that can handle the connection and interaction to specific
instances of these systems, and expose consistent methods to interact
with them.
:param logger_name: Name of the logger used by the Hook to emit logs.
If set to `None` (default), the logger name will fall back to
`airflow.task.hooks.{class.__module__}.{class.__name__}` (e.g. DbApiHook will have
*airflow.task.hooks.airflow.providers.common.sql.hooks.sql.DbApiHook* as logger).
"""
def __init__(self, logger_name: str | None = None):
super().__init__()
self._log_config_logger_name = "airflow.task.hooks"
self._logger_name = logger_name
@classmethod
def get_connection(cls, conn_id: str) -> Connection:
"""
Get connection, given connection id.
:param conn_id: connection id
:return: connection
"""
from airflow.sdk.definitions.connection import Connection
conn = Connection.get(conn_id)
log.debug("Connection Retrieved '%s' (via task-sdk)", conn.conn_id)
return conn
@classmethod
async def aget_connection(cls, conn_id: str) -> Connection:
"""
Get connection (async), given connection id.
:param conn_id: connection id
:return: connection
"""
from airflow.sdk.definitions.connection import Connection
conn = await Connection.async_get(conn_id)
log.debug("Connection Retrieved '%s' (via task-sdk)", conn.conn_id)
return conn
@classmethod
def get_hook(cls, conn_id: str, hook_params: dict | None = None):
"""
Return default hook for this connection id.
:param conn_id: connection id
:param hook_params: hook parameters
:return: default hook for this connection
"""
connection = cls.get_connection(conn_id)
return connection.get_hook(hook_params=hook_params)
def get_conn(self) -> Any:
"""Return connection for the hook."""
raise NotImplementedError()
@classmethod
def get_connection_form_widgets(cls) -> dict[str, Any]:
return {}
@classmethod
def get_ui_field_behaviour(cls) -> dict[str, Any]:
return {}
| {
"repo_id": "apache/airflow",
"file_path": "task-sdk/src/airflow/sdk/bases/hook.py",
"license": "Apache License 2.0",
"lines": 81,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:airflow-ctl/src/airflowctl/ctl/commands/connection_command.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import json
from pathlib import Path
import rich
from airflowctl.api.client import NEW_API_CLIENT, ClientKind, provide_api_client
from airflowctl.api.datamodels.generated import (
BulkActionOnExistence,
BulkBodyConnectionBody,
BulkCreateActionConnectionBody,
ConnectionBody,
)
@provide_api_client(kind=ClientKind.CLI)
def import_(args, api_client=NEW_API_CLIENT) -> None:
"""Import connections from file."""
filepath = Path(args.file)
current_path = Path.cwd()
filepath = current_path / filepath if not filepath.is_absolute() else filepath
if not filepath.exists():
raise SystemExit(f"Missing connections file {args.file}")
with open(filepath) as file:
try:
connections_json = json.loads(file.read())
except Exception as e:
raise SystemExit(f"Error reading connections file {args.file}: {e}")
try:
connections_data = {
k: ConnectionBody(
connection_id=k,
conn_type=v.get("conn_type"),
host=v.get("host"),
login=v.get("login"),
password=v.get("password"),
port=v.get("port"),
extra=v.get("extra"),
description=v.get("description", ""),
)
for k, v in connections_json.items()
}
connection_create_action = BulkCreateActionConnectionBody(
action="create",
entities=list(connections_data.values()),
action_on_existence=BulkActionOnExistence("fail"),
)
response = api_client.connections.bulk(BulkBodyConnectionBody(actions=[connection_create_action]))
if response.create.errors:
rich.print(f"[red]Failed to import connections: {response.create.errors}[/red]")
raise SystemExit
rich.print(f"[green]Successfully imported {response.create.success} connection(s)[/green]")
except Exception as e:
rich.print(f"[red]Failed to import connections: {e}[/red]")
raise SystemExit
| {
"repo_id": "apache/airflow",
"file_path": "airflow-ctl/src/airflowctl/ctl/commands/connection_command.py",
"license": "Apache License 2.0",
"lines": 67,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:airflow-ctl/tests/airflow_ctl/ctl/commands/test_connections_command.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import json
from unittest.mock import patch
import pytest
from airflowctl.api.client import ClientKind
from airflowctl.api.datamodels.generated import (
BulkActionResponse,
BulkResponse,
ConnectionBody,
ConnectionCollectionResponse,
ConnectionResponse,
)
from airflowctl.ctl import cli_parser
from airflowctl.ctl.commands import connection_command
class TestCliConnectionCommands:
connection_id = "test_connection"
export_file_name = "exported_json.json"
parser = cli_parser.get_parser()
connection_collection_response = ConnectionCollectionResponse(
connections=[
ConnectionResponse(
connection_id=connection_id,
conn_type="test_type",
host="test_host",
login="test_login",
password="test_password",
port=1234,
extra="{}",
description="Test connection description",
)
],
total_entries=1,
)
bulk_response_success = BulkResponse(
create=BulkActionResponse(success=[connection_id], errors=[]), update=None, delete=None
)
bulk_response_error = BulkResponse(
create=BulkActionResponse(
success=[],
errors=[
{
"error": f"The connection with these connection_ids: {{'{connection_id}'}} already exist.",
"status_code": 409,
}
],
),
update=None,
delete=None,
)
def test_import_success(self, api_client_maker, tmp_path, monkeypatch):
api_client = api_client_maker(
path="/api/v2/connections",
response_json=self.bulk_response_success.model_dump(),
expected_http_status_code=200,
kind=ClientKind.CLI,
)
monkeypatch.chdir(tmp_path)
expected_json_path = tmp_path / self.export_file_name
connection_file = {
self.connection_id: {
"conn_type": "test_type",
"host": "test_host",
"login": "test_login",
"password": "test_password",
"port": 1234,
"extra": "{}",
"description": "Test connection description",
"connection_id": self.connection_id,
}
}
expected_json_path.write_text(json.dumps(connection_file))
connection_command.import_(
self.parser.parse_args(["connections", "import", expected_json_path.as_posix()]),
api_client=api_client,
)
def test_import_error(self, api_client_maker, tmp_path, monkeypatch):
api_client = api_client_maker(
path="/api/v2/connections",
response_json=self.bulk_response_error.model_dump(),
expected_http_status_code=200,
kind=ClientKind.CLI,
)
monkeypatch.chdir(tmp_path)
expected_json_path = tmp_path / self.export_file_name
connection_file = {
self.connection_id: {
"conn_type": "test_type",
"host": "test_host",
"login": "test_login",
"password": "test_password",
"port": 1234,
"extra": "{}",
"description": "Test connection description",
"connection_id": self.connection_id,
}
}
expected_json_path.write_text(json.dumps(connection_file))
with pytest.raises(SystemExit):
connection_command.import_(
self.parser.parse_args(["connections", "import", expected_json_path.as_posix()]),
api_client=api_client,
)
def test_import_without_extra_field(self, api_client_maker, tmp_path, monkeypatch):
"""Import succeeds when JSON omits the ``extra`` field (#62653).
Before the fix, ``v.get("extra", {})`` returned ``{}`` (a dict) when
the key was absent, but ``ConnectionBody.extra`` expects ``str | None``,
causing a Pydantic ``ValidationError``.
"""
api_client = api_client_maker(
path="/api/v2/connections",
response_json=self.bulk_response_success.model_dump(),
expected_http_status_code=200,
kind=ClientKind.CLI,
)
monkeypatch.chdir(tmp_path)
json_path = tmp_path / self.export_file_name
# Intentionally omit "extra" (and several other optional keys) to
# mirror a minimal real-world connection JSON export.
connection_file = {
self.connection_id: {
"conn_type": "test_type",
"host": "test_host",
}
}
json_path.write_text(json.dumps(connection_file))
with patch(
"airflowctl.ctl.commands.connection_command.ConnectionBody",
wraps=ConnectionBody,
) as mock_body:
connection_command.import_(
self.parser.parse_args(["connections", "import", json_path.as_posix()]),
api_client=api_client,
)
# Verify that ``extra`` was passed as None (not {} which would fail
# Pydantic validation) and all other absent keys default correctly.
mock_body.assert_called_once_with(
connection_id=self.connection_id,
conn_type="test_type",
host="test_host",
login=None,
password=None,
port=None,
extra=None,
description="",
)
| {
"repo_id": "apache/airflow",
"file_path": "airflow-ctl/tests/airflow_ctl/ctl/commands/test_connections_command.py",
"license": "Apache License 2.0",
"lines": 161,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/microsoft/psrp/src/airflow/providers/microsoft/psrp/version_compat.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
def get_base_airflow_version_tuple() -> tuple[int, int, int]:
from packaging.version import Version
from airflow import __version__
airflow_version = Version(__version__)
return airflow_version.major, airflow_version.minor, airflow_version.micro
AIRFLOW_V_3_0_PLUS = get_base_airflow_version_tuple() >= (3, 0, 0)
AIRFLOW_V_3_1_PLUS: bool = get_base_airflow_version_tuple() >= (3, 1, 0)
__all__ = [
"AIRFLOW_V_3_0_PLUS",
"AIRFLOW_V_3_1_PLUS",
]
| {
"repo_id": "apache/airflow",
"file_path": "providers/microsoft/psrp/src/airflow/providers/microsoft/psrp/version_compat.py",
"license": "Apache License 2.0",
"lines": 28,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/apache/kylin/src/airflow/providers/apache/kylin/version_compat.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# NOTE! THIS FILE IS COPIED MANUALLY IN OTHER PROVIDERS DELIBERATELY TO AVOID ADDING UNNECESSARY
# DEPENDENCIES BETWEEN PROVIDERS. IF YOU WANT TO ADD CONDITIONAL CODE IN YOUR PROVIDER THAT DEPENDS
# ON AIRFLOW VERSION, PLEASE COPY THIS FILE TO THE ROOT PACKAGE OF YOUR PROVIDER AND IMPORT
# THOSE CONSTANTS FROM IT RATHER THAN IMPORTING THEM FROM ANOTHER PROVIDER OR TEST CODE
#
from __future__ import annotations
def get_base_airflow_version_tuple() -> tuple[int, int, int]:
from packaging.version import Version
from airflow import __version__
airflow_version = Version(__version__)
return airflow_version.major, airflow_version.minor, airflow_version.micro
AIRFLOW_V_3_0_PLUS = get_base_airflow_version_tuple() >= (3, 0, 0)
__all__ = [
"AIRFLOW_V_3_0_PLUS",
]
| {
"repo_id": "apache/airflow",
"file_path": "providers/apache/kylin/src/airflow/providers/apache/kylin/version_compat.py",
"license": "Apache License 2.0",
"lines": 32,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/apache/beam/src/airflow/providers/apache/beam/version_compat.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Version compatibility for Apache Beam provider."""
from __future__ import annotations
def get_base_airflow_version_tuple() -> tuple[int, int, int]:
from packaging.version import Version
from airflow import __version__
airflow_version = Version(__version__)
return airflow_version.major, airflow_version.minor, airflow_version.micro
AIRFLOW_V_3_1_PLUS = get_base_airflow_version_tuple() >= (3, 1, 0)
__all__ = [
"AIRFLOW_V_3_1_PLUS",
]
| {
"repo_id": "apache/airflow",
"file_path": "providers/apache/beam/src/airflow/providers/apache/beam/version_compat.py",
"license": "Apache License 2.0",
"lines": 28,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/dingding/src/airflow/providers/dingding/version_compat.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
def get_base_airflow_version_tuple() -> tuple[int, int, int]:
from packaging.version import Version
from airflow import __version__
airflow_version = Version(__version__)
return airflow_version.major, airflow_version.minor, airflow_version.micro
AIRFLOW_V_3_0_PLUS = get_base_airflow_version_tuple() >= (3, 0, 0)
__all__ = [
"AIRFLOW_V_3_0_PLUS",
]
| {
"repo_id": "apache/airflow",
"file_path": "providers/dingding/src/airflow/providers/dingding/version_compat.py",
"license": "Apache License 2.0",
"lines": 26,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/singularity/src/airflow/providers/singularity/version_compat.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
def get_base_airflow_version_tuple() -> tuple[int, int, int]:
from packaging.version import Version
from airflow import __version__
airflow_version = Version(__version__)
return airflow_version.major, airflow_version.minor, airflow_version.micro
AIRFLOW_V_3_0_PLUS = get_base_airflow_version_tuple() >= (3, 0, 0)
__all__ = ["AIRFLOW_V_3_0_PLUS"]
| {
"repo_id": "apache/airflow",
"file_path": "providers/singularity/src/airflow/providers/singularity/version_compat.py",
"license": "Apache License 2.0",
"lines": 24,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/papermill/src/airflow/providers/papermill/version_compat.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
def get_base_airflow_version_tuple() -> tuple[int, int, int]:
from packaging.version import Version
from airflow import __version__
airflow_version = Version(__version__)
return airflow_version.major, airflow_version.minor, airflow_version.micro
AIRFLOW_V_3_0_PLUS = get_base_airflow_version_tuple() >= (3, 0, 0)
AIRFLOW_V_3_1_PLUS = get_base_airflow_version_tuple() >= (3, 1, 0)
__all__ = [
"AIRFLOW_V_3_0_PLUS",
"AIRFLOW_V_3_1_PLUS",
]
| {
"repo_id": "apache/airflow",
"file_path": "providers/papermill/src/airflow/providers/papermill/version_compat.py",
"license": "Apache License 2.0",
"lines": 28,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/apache/tinkerpop/src/airflow/providers/apache/tinkerpop/version_compat.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
def get_base_airflow_version_tuple() -> tuple[int, int, int]:
from packaging.version import Version
from airflow import __version__
airflow_version = Version(__version__)
return airflow_version.major, airflow_version.minor, airflow_version.micro
AIRFLOW_V_3_0_PLUS = get_base_airflow_version_tuple() >= (3, 0, 0)
AIRFLOW_V_3_1_PLUS: bool = get_base_airflow_version_tuple() >= (3, 1, 0)
__all__ = [
"AIRFLOW_V_3_0_PLUS",
"AIRFLOW_V_3_1_PLUS",
]
| {
"repo_id": "apache/airflow",
"file_path": "providers/apache/tinkerpop/src/airflow/providers/apache/tinkerpop/version_compat.py",
"license": "Apache License 2.0",
"lines": 28,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/apache/hive/src/airflow/providers/apache/hive/version_compat.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# NOTE! THIS FILE IS COPIED MANUALLY IN OTHER PROVIDERS DELIBERATELY TO AVOID ADDING UNNECESSARY
# DEPENDENCIES BETWEEN PROVIDERS. IF YOU WANT TO ADD CONDITIONAL CODE IN YOUR PROVIDER THAT DEPENDS
# ON AIRFLOW VERSION, PLEASE COPY THIS FILE TO THE ROOT PACKAGE OF YOUR PROVIDER AND IMPORT
# THOSE CONSTANTS FROM IT RATHER THAN IMPORTING THEM FROM ANOTHER PROVIDER OR TEST CODE
#
from __future__ import annotations
# Re-export from common.compat for backward compatibility
from airflow.providers.common.compat.sdk import (
AIRFLOW_VAR_NAME_FORMAT_MAPPING,
BaseOperator,
BaseSensorOperator,
context_to_airflow_vars,
)
def get_base_airflow_version_tuple() -> tuple[int, int, int]:
from packaging.version import Version
from airflow import __version__
airflow_version = Version(__version__)
return airflow_version.major, airflow_version.minor, airflow_version.micro
AIRFLOW_V_3_0_PLUS = get_base_airflow_version_tuple() >= (3, 0, 0)
AIRFLOW_V_3_1_PLUS: bool = get_base_airflow_version_tuple() >= (3, 1, 0)
__all__ = [
"AIRFLOW_V_3_0_PLUS",
"AIRFLOW_V_3_1_PLUS",
"BaseOperator",
"BaseSensorOperator",
"AIRFLOW_VAR_NAME_FORMAT_MAPPING",
"context_to_airflow_vars",
]
| {
"repo_id": "apache/airflow",
"file_path": "providers/apache/hive/src/airflow/providers/apache/hive/version_compat.py",
"license": "Apache License 2.0",
"lines": 45,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/apache/livy/src/airflow/providers/apache/livy/version_compat.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# NOTE! THIS FILE IS COPIED MANUALLY IN OTHER PROVIDERS DELIBERATELY TO AVOID ADDING UNNECESSARY
# DEPENDENCIES BETWEEN PROVIDERS. IF YOU WANT TO ADD CONDITIONAL CODE IN YOUR PROVIDER THAT DEPENDS
# ON AIRFLOW VERSION, PLEASE COPY THIS FILE TO THE ROOT PACKAGE OF YOUR PROVIDER AND IMPORT
# THOSE CONSTANTS FROM IT RATHER THAN IMPORTING THEM FROM ANOTHER PROVIDER OR TEST CODE
#
from __future__ import annotations
def get_base_airflow_version_tuple() -> tuple[int, int, int]:
from packaging.version import Version
from airflow import __version__
airflow_version = Version(__version__)
return airflow_version.major, airflow_version.minor, airflow_version.micro
AIRFLOW_V_3_0_PLUS = get_base_airflow_version_tuple() >= (3, 0, 0)
__all__ = ["AIRFLOW_V_3_0_PLUS"]
| {
"repo_id": "apache/airflow",
"file_path": "providers/apache/livy/src/airflow/providers/apache/livy/version_compat.py",
"license": "Apache License 2.0",
"lines": 30,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/keycloak/src/airflow/providers/keycloak/auth_manager/datamodels/token.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import Annotated, Literal
from pydantic import Field, RootModel, model_validator
from airflow.api_fastapi.core_api.base import BaseModel, StrictBaseModel
from airflow.providers.keycloak.auth_manager.services.token import (
create_client_credentials_token,
create_token_for,
)
class TokenResponse(BaseModel):
"""Token serializer for responses."""
access_token: str
class TokenPasswordBody(StrictBaseModel):
"""Password grant token serializer for post bodies."""
grant_type: Literal["password"] = "password"
username: str = Field()
password: str = Field()
def create_token(self, expiration_time_in_seconds: int) -> str:
"""Create token using password grant."""
return create_token_for(
self.username, self.password, expiration_time_in_seconds=expiration_time_in_seconds
)
class TokenClientCredentialsBody(StrictBaseModel):
"""Client credentials grant token serializer for post bodies."""
grant_type: Literal["client_credentials"]
client_id: str = Field()
client_secret: str = Field()
def create_token(self, expiration_time_in_seconds: int) -> str:
"""Create token using client credentials grant."""
return create_client_credentials_token(
self.client_id, self.client_secret, expiration_time_in_seconds=expiration_time_in_seconds
)
TokenUnion = Annotated[
TokenPasswordBody | TokenClientCredentialsBody,
Field(discriminator="grant_type"),
]
class TokenBody(RootModel[TokenUnion]):
"""Token request body."""
@model_validator(mode="before")
@classmethod
def default_grant_type(cls, data):
"""Add default grant_type for discrimination."""
if "grant_type" not in data:
data["grant_type"] = "password"
return data
| {
"repo_id": "apache/airflow",
"file_path": "providers/keycloak/src/airflow/providers/keycloak/auth_manager/datamodels/token.py",
"license": "Apache License 2.0",
"lines": 61,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/keycloak/src/airflow/providers/keycloak/auth_manager/routes/token.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import logging
from fastapi import status
from airflow.api_fastapi.common.router import AirflowRouter
from airflow.api_fastapi.core_api.openapi.exceptions import create_openapi_http_exception_doc
from airflow.providers.common.compat.sdk import conf
from airflow.providers.keycloak.auth_manager.datamodels.token import (
TokenBody,
TokenPasswordBody,
TokenResponse,
)
log = logging.getLogger(__name__)
token_router = AirflowRouter(tags=["KeycloakAuthManagerToken"])
@token_router.post(
"/token",
status_code=status.HTTP_201_CREATED,
responses=create_openapi_http_exception_doc([status.HTTP_400_BAD_REQUEST, status.HTTP_403_FORBIDDEN]),
)
def create_token(body: TokenBody) -> TokenResponse:
token = body.root.create_token(
expiration_time_in_seconds=int(conf.getint("api_auth", "jwt_expiration_time"))
)
return TokenResponse(access_token=token)
@token_router.post(
"/token/cli",
status_code=status.HTTP_201_CREATED,
responses=create_openapi_http_exception_doc([status.HTTP_400_BAD_REQUEST, status.HTTP_403_FORBIDDEN]),
)
def create_token_cli(body: TokenPasswordBody) -> TokenResponse:
token = body.create_token(
expiration_time_in_seconds=int(conf.getint("api_auth", "jwt_cli_expiration_time"))
)
return TokenResponse(access_token=token)
| {
"repo_id": "apache/airflow",
"file_path": "providers/keycloak/src/airflow/providers/keycloak/auth_manager/routes/token.py",
"license": "Apache License 2.0",
"lines": 49,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/keycloak/tests/unit/keycloak/auth_manager/routes/test_token.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from unittest.mock import patch
import pytest
from airflow.api_fastapi.app import AUTH_MANAGER_FASTAPI_APP_PREFIX
from tests_common.test_utils.config import conf_vars
class TestTokenRouter:
token = "token"
token_body_dict = {"username": "username", "password": "password"}
@pytest.mark.parametrize(
"body",
[
{"username": "username", "password": "password"},
{"grant_type": "password", "username": "username", "password": "password"},
],
)
@conf_vars(
{
("api_auth", "jwt_expiration_time"): "10",
}
)
@patch("airflow.providers.keycloak.auth_manager.datamodels.token.create_token_for")
def test_create_token_password_grant(self, mock_create_token_for, client, body):
mock_create_token_for.return_value = self.token
response = client.post(
AUTH_MANAGER_FASTAPI_APP_PREFIX + "/token",
json=body,
)
assert response.status_code == 201
assert response.json() == {"access_token": self.token}
@conf_vars(
{
("api_auth", "jwt_cli_expiration_time"): "10",
("api_auth", "jwt_expiration_time"): "10",
}
)
@patch("airflow.providers.keycloak.auth_manager.datamodels.token.create_token_for")
def test_create_token_cli(self, mock_create_token_for, client):
mock_create_token_for.return_value = self.token
response = client.post(
AUTH_MANAGER_FASTAPI_APP_PREFIX + "/token/cli",
json=self.token_body_dict,
)
assert response.status_code == 201
assert response.json() == {"access_token": self.token}
@conf_vars(
{
("api_auth", "jwt_expiration_time"): "10",
}
)
@patch("airflow.providers.keycloak.auth_manager.datamodels.token.create_client_credentials_token")
def test_create_token_client_credentials(self, mock_create_client_credentials_token, client):
mock_create_client_credentials_token.return_value = self.token
response = client.post(
AUTH_MANAGER_FASTAPI_APP_PREFIX + "/token",
json={
"grant_type": "client_credentials",
"client_id": "client_id",
"client_secret": "client_secret",
},
)
assert response.status_code == 201
assert response.json() == {"access_token": self.token}
mock_create_client_credentials_token.assert_called_once_with(
"client_id", "client_secret", expiration_time_in_seconds=10
)
@pytest.mark.parametrize(
"body",
[
{"client_id": "client_id", "client_secret": "client_secret"},
{"grant_type": "password", "client_id": "client_id", "client_secret": "client_secret"},
{"grant_type": "password", "client_id": "client_id", "password": "password"},
{"grant_type": "password", "username": "username", "client_secret": "client_secret"},
{"grant_type": "client_credentials", "username": "username", "password": "password"},
{"grant_type": "client_credentials", "client_id": "client_id", "password": "password"},
{"grant_type": "client_credentials", "username": "username", "client_secret": "client_secret"},
],
)
@conf_vars(
{
("api_auth", "jwt_expiration_time"): "10",
}
)
@patch("airflow.providers.keycloak.auth_manager.datamodels.token.create_client_credentials_token")
def test_create_token_invalid_body(self, mock_create_client_credentials_token, client, body):
mock_create_client_credentials_token.return_value = self.token
response = client.post(
AUTH_MANAGER_FASTAPI_APP_PREFIX + "/token",
json=body,
)
assert response.status_code == 422
mock_create_client_credentials_token.assert_not_called()
| {
"repo_id": "apache/airflow",
"file_path": "providers/keycloak/tests/unit/keycloak/auth_manager/routes/test_token.py",
"license": "Apache License 2.0",
"lines": 107,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/teradata/src/airflow/providers/teradata/hooks/bteq.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import os
import socket
import subprocess
import tempfile
from contextlib import contextmanager
from paramiko import SSHException
from airflow.providers.common.compat.sdk import AirflowException
from airflow.providers.ssh.hooks.ssh import SSHHook
from airflow.providers.teradata.hooks.ttu import TtuHook
from airflow.providers.teradata.utils.bteq_util import (
get_remote_tmp_dir,
identify_os,
prepare_bteq_command_for_local_execution,
prepare_bteq_command_for_remote_execution,
transfer_file_sftp,
verify_bteq_installed,
verify_bteq_installed_remote,
)
from airflow.providers.teradata.utils.constants import Constants
from airflow.providers.teradata.utils.encryption_utils import (
decrypt_remote_file_to_string,
generate_encrypted_file_with_openssl,
generate_random_password,
)
class BteqHook(TtuHook):
"""
Hook for executing BTEQ (Basic Teradata Query) scripts.
This hook provides functionality to execute BTEQ scripts either locally or remotely via SSH.
It extends the `TtuHook` and integrates with Airflow's SSHHook for remote execution.
The BTEQ scripts are used to interact with Teradata databases, allowing users to perform
operations such as querying, data manipulation, and administrative tasks.
Features:
- Supports both local and remote execution of BTEQ scripts.
- Handles connection details, script preparation, and execution.
- Provides robust error handling and logging for debugging.
- Allows configuration of session parameters like output width and encoding.
.. seealso::
- :ref:`hook API connection <howto/connection:teradata>`
:param bteq_script: The BTEQ script to be executed. This can be a string containing the BTEQ commands.
:param remote_working_dir: Temporary directory location on the remote host (via SSH) where the BTEQ script will be transferred and executed. Defaults to `/tmp` if not specified. This is only applicable when `ssh_conn_id` is provided.
:param bteq_script_encoding: Character encoding for the BTEQ script file. Defaults to ASCII if not specified.
:param timeout: Timeout (in seconds) for executing the BTEQ command. Default is 600 seconds (10 minutes).
:param timeout_rc: Return code to use if the BTEQ execution fails due to a timeout. To allow DAG execution to continue after a timeout, include this value in `bteq_quit_rc`. If not specified, a timeout will raise an exception and stop the DAG.
:param bteq_session_encoding: Character encoding for the BTEQ session. Defaults to UTF-8 if not specified.
:param bteq_quit_rc: Accepts a single integer, list, or tuple of return codes. Specifies which BTEQ return codes should be treated as successful, allowing subsequent tasks to continue execution.
"""
def __init__(self, teradata_conn_id: str, ssh_conn_id: str | None = None, *args, **kwargs):
super().__init__(teradata_conn_id, *args, **kwargs)
self.ssh_conn_id = ssh_conn_id
self.ssh_hook = SSHHook(ssh_conn_id=ssh_conn_id) if ssh_conn_id else None
def execute_bteq_script(
self,
bteq_script: str,
remote_working_dir: str | None,
bteq_script_encoding: str | None,
timeout: int,
timeout_rc: int | None,
bteq_session_encoding: str | None,
bteq_quit_rc: int | list[int] | tuple[int, ...] | None,
temp_file_read_encoding: str | None,
) -> int | None:
"""Execute the BTEQ script either in local machine or on remote host based on ssh_conn_id."""
# Remote execution
if self.ssh_hook:
# Write script to local temp file
# Encrypt the file locally
return self.execute_bteq_script_at_remote(
bteq_script,
remote_working_dir,
bteq_script_encoding,
timeout,
timeout_rc,
bteq_session_encoding,
bteq_quit_rc,
temp_file_read_encoding,
)
return self.execute_bteq_script_at_local(
bteq_script,
bteq_script_encoding,
timeout,
timeout_rc,
bteq_quit_rc,
bteq_session_encoding,
temp_file_read_encoding,
)
def execute_bteq_script_at_remote(
self,
bteq_script: str,
remote_working_dir: str | None,
bteq_script_encoding: str | None,
timeout: int,
timeout_rc: int | None,
bteq_session_encoding: str | None,
bteq_quit_rc: int | list[int] | tuple[int, ...] | None,
temp_file_read_encoding: str | None,
) -> int | None:
with (
self.preferred_temp_directory() as tmp_dir,
):
file_path = os.path.join(tmp_dir, "bteq_script.txt")
with open(file_path, "w", encoding=str(temp_file_read_encoding or "UTF-8")) as f:
f.write(bteq_script)
return self._transfer_to_and_execute_bteq_on_remote(
file_path,
remote_working_dir,
bteq_script_encoding,
timeout,
timeout_rc,
bteq_quit_rc,
bteq_session_encoding,
tmp_dir,
)
def _transfer_to_and_execute_bteq_on_remote(
self,
file_path: str,
remote_working_dir: str | None,
bteq_script_encoding: str | None,
timeout: int,
timeout_rc: int | None,
bteq_quit_rc: int | list[int] | tuple[int, ...] | None,
bteq_session_encoding: str | None,
tmp_dir: str,
) -> int | None:
encrypted_file_path = None
remote_encrypted_path = None
try:
if self.ssh_hook and self.ssh_hook.get_conn():
with self.ssh_hook.get_conn() as ssh_client:
if ssh_client is None:
raise AirflowException(Constants.BTEQ_REMOTE_ERROR_MSG)
verify_bteq_installed_remote(ssh_client)
password = generate_random_password() # Encryption/Decryption password
encrypted_file_path = os.path.join(tmp_dir, "bteq_script.enc")
generate_encrypted_file_with_openssl(file_path, password, encrypted_file_path)
if not remote_working_dir:
remote_working_dir = get_remote_tmp_dir(ssh_client)
self.log.debug(
"Transferring encrypted BTEQ script to remote host: %s", remote_working_dir
)
remote_encrypted_path = os.path.join(remote_working_dir or "", "bteq_script.enc")
remote_encrypted_path = remote_encrypted_path.replace("/", "\\")
transfer_file_sftp(ssh_client, encrypted_file_path, remote_encrypted_path)
bteq_command_str = prepare_bteq_command_for_remote_execution(
timeout=timeout,
bteq_script_encoding=bteq_script_encoding or "",
bteq_session_encoding=bteq_session_encoding or "",
timeout_rc=timeout_rc or -1,
)
exit_status, stdout, stderr = decrypt_remote_file_to_string(
ssh_client,
remote_encrypted_path,
password,
bteq_command_str,
)
failure_message = None
password = None # Clear sensitive data
if "Failure" in stderr or "Error" in stderr:
failure_message = stderr
# Raising an exception if there is any failure in bteq and also user wants to fail the
# task otherwise just log the error message as warning to not fail the task.
if (
failure_message
and exit_status != 0
and exit_status
not in (
bteq_quit_rc
if isinstance(bteq_quit_rc, (list, tuple))
else [bteq_quit_rc if bteq_quit_rc is not None else 0]
)
):
raise AirflowException(f"Failed to execute BTEQ script : {failure_message}")
if failure_message:
self.log.warning(failure_message)
return exit_status
else:
raise AirflowException(Constants.BTEQ_REMOTE_ERROR_MSG)
except (OSError, socket.gaierror):
raise AirflowException(Constants.BTEQ_REMOTE_ERROR_MSG)
except SSHException as e:
raise AirflowException(f"{Constants.BTEQ_REMOTE_ERROR_MSG}: {str(e)}")
except AirflowException as e:
raise e
except Exception as e:
raise AirflowException(f"{Constants.BTEQ_REMOTE_ERROR_MSG}: {str(e)}")
finally:
# Remove the local script file
if encrypted_file_path and os.path.exists(encrypted_file_path):
os.remove(encrypted_file_path)
# Cleanup: Delete the remote temporary file
if remote_encrypted_path:
if self.ssh_hook and self.ssh_hook.get_conn():
with self.ssh_hook.get_conn() as ssh_client:
if ssh_client is None:
raise AirflowException(
"Failed to establish SSH connection. `ssh_client` is None."
)
# Detect OS
os_info = identify_os(ssh_client)
if "windows" in os_info:
cleanup_en_command = f'del /f /q "{remote_encrypted_path}"'
else:
cleanup_en_command = f"rm -f '{remote_encrypted_path}'"
self.log.debug("cleaning up remote file: %s", cleanup_en_command)
ssh_client.exec_command(cleanup_en_command)
def execute_bteq_script_at_local(
self,
bteq_script: str,
bteq_script_encoding: str | None,
timeout: int,
timeout_rc: int | None,
bteq_quit_rc: int | list[int] | tuple[int, ...] | None,
bteq_session_encoding: str | None,
temp_file_read_encoding: str | None,
) -> int | None:
verify_bteq_installed()
bteq_command_list = prepare_bteq_command_for_local_execution(
self.get_conn(),
timeout=timeout,
bteq_script_encoding=bteq_script_encoding or "",
bteq_session_encoding=bteq_session_encoding or "",
timeout_rc=timeout_rc or -1,
)
process = subprocess.Popen(
bteq_command_list,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
shell=False,
start_new_session=True,
)
encode_bteq_script = bteq_script.encode(str(temp_file_read_encoding or "UTF-8"))
stdout_data, _ = process.communicate(input=encode_bteq_script)
try:
# https://docs.python.org/3.10/library/subprocess.html#subprocess.Popen.wait timeout is in seconds
process.wait(timeout=timeout + 60) # Adding 1 minute extra for BTEQ script timeout
except subprocess.TimeoutExpired:
self.on_kill()
raise AirflowException(Constants.BTEQ_TIMEOUT_ERROR_MSG, timeout)
conn = self.get_conn()
conn["sp"] = process # For `on_kill` support
failure_message = None
if stdout_data is None:
raise AirflowException(Constants.BTEQ_UNEXPECTED_ERROR_MSG)
decoded_line = ""
for line in stdout_data.splitlines():
try:
decoded_line = line.decode("UTF-8").strip()
except UnicodeDecodeError:
self.log.warning("Failed to decode line: %s", line)
if "Failure" in decoded_line or "Error" in decoded_line:
failure_message = decoded_line
# Raising an exception if there is any failure in bteq and also user wants to fail the
# task otherwise just log the error message as warning to not fail the task.
if (
failure_message
and process.returncode != 0
and process.returncode
not in (
bteq_quit_rc
if isinstance(bteq_quit_rc, (list, tuple))
else [bteq_quit_rc if bteq_quit_rc is not None else 0]
)
):
raise AirflowException(f"{Constants.BTEQ_UNEXPECTED_ERROR_MSG}: {failure_message}")
if failure_message:
self.log.warning(failure_message)
return process.returncode
def on_kill(self):
"""Terminate the subprocess if running."""
conn = self.get_conn()
process = conn.get("sp")
if process:
try:
process.terminate()
process.wait(timeout=5)
except subprocess.TimeoutExpired:
self.log.warning("Subprocess did not terminate in time. Forcing kill...")
process.kill()
except Exception as e:
self.log.error("%s : %s", Constants.BTEQ_UNEXPECTED_ERROR_MSG, str(e))
def get_airflow_home_dir(self) -> str:
"""Get the AIRFLOW_HOME directory."""
return os.environ.get("AIRFLOW_HOME", "~/airflow")
@contextmanager
def preferred_temp_directory(self, prefix="bteq_"):
try:
temp_dir = tempfile.gettempdir()
if not os.path.isdir(temp_dir) or not os.access(temp_dir, os.W_OK):
raise OSError(
f"Failed to execute the BTEQ script due to Temporary directory {temp_dir} is not writable."
)
except Exception:
temp_dir = self.get_airflow_home_dir()
with tempfile.TemporaryDirectory(dir=temp_dir, prefix=prefix) as tmp:
yield tmp
| {
"repo_id": "apache/airflow",
"file_path": "providers/teradata/src/airflow/providers/teradata/hooks/bteq.py",
"license": "Apache License 2.0",
"lines": 312,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/teradata/src/airflow/providers/teradata/hooks/ttu.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import subprocess
from abc import ABC
from typing import Any
from airflow.providers.common.compat.sdk import AirflowException, BaseHook
class TtuHook(BaseHook, ABC):
"""
Abstract base hook for integrating Teradata Tools and Utilities (TTU) in Airflow.
This hook provides common connection handling, resource management, and lifecycle
support for TTU based operations such as BTEQ, TLOAD, and TPT.
It should not be used directly. Instead, it must be subclassed by concrete hooks
like `BteqHook`, `TloadHook`, or `TddlHook` that implement the actual TTU command logic.
Core Features:
- Establishes a reusable Teradata connection configuration.
- Provides context management for safe resource cleanup.
- Manages subprocess termination (e.g., for long-running TTU jobs).
Requirements:
- TTU command-line tools must be installed and accessible via PATH.
- A valid Airflow connection with Teradata credentials must be configured.
"""
def __init__(self, teradata_conn_id: str = "teradata_default", *args, **kwargs) -> None:
super().__init__()
self.teradata_conn_id = teradata_conn_id
self.conn: dict[str, Any] | None = None
def __enter__(self):
return self
def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:
if self.conn is not None:
self.close_conn()
def get_conn(self) -> dict[str, Any]:
"""
Set up and return a Teradata connection dictionary.
This dictionary includes connection credentials and a subprocess placeholder.
Ensures connection is created only once per hook instance.
:return: Dictionary with connection details.
"""
if not self.conn:
connection = self.get_connection(self.teradata_conn_id)
if not connection.login or not connection.password or not connection.host:
raise AirflowException("Missing required connection parameters: login, password, or host.")
self.conn = dict(
login=connection.login,
password=connection.password,
host=connection.host,
database=connection.schema,
sp=None, # Subprocess placeholder
)
return self.conn
def close_conn(self):
"""Terminate any active TTU subprocess and clear the connection."""
if self.conn:
if self.conn.get("sp") and self.conn["sp"].poll() is None:
self.conn["sp"].terminate()
try:
self.conn["sp"].wait(timeout=5)
except subprocess.TimeoutExpired:
self.log.warning("Subprocess did not terminate in time. Forcing kill...")
self.conn["sp"].kill()
self.conn = None
| {
"repo_id": "apache/airflow",
"file_path": "providers/teradata/src/airflow/providers/teradata/hooks/ttu.py",
"license": "Apache License 2.0",
"lines": 76,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/teradata/src/airflow/providers/teradata/operators/bteq.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING, Literal
from airflow.providers.teradata.utils.bteq_util import (
is_valid_encoding,
is_valid_file,
is_valid_remote_bteq_script_file,
prepare_bteq_script_for_local_execution,
prepare_bteq_script_for_remote_execution,
read_file,
)
from airflow.providers.teradata.utils.constants import Constants
if TYPE_CHECKING:
from paramiko import SSHClient
from airflow.providers.common.compat.sdk import Context
from airflow.providers.common.compat.sdk import BaseOperator
from airflow.providers.ssh.hooks.ssh import SSHHook
from airflow.providers.teradata.hooks.bteq import BteqHook
from airflow.providers.teradata.hooks.teradata import TeradataHook
def contains_template(parameter_value):
# Check if the parameter contains Jinja templating syntax
return "{{" in parameter_value and "}}" in parameter_value
class BteqOperator(BaseOperator):
"""
Teradata Operator to execute SQL Statements or BTEQ (Basic Teradata Query) scripts using Teradata BTEQ utility.
This supports execution of BTEQ scripts either locally or remotely via SSH.
The BTEQ scripts are used to interact with Teradata databases, allowing users to perform
operations such as querying, data manipulation, and administrative tasks.
Features:
- Supports both local and remote execution of BTEQ scripts.
- Handles connection details, script preparation, and execution.
- Provides robust error handling and logging for debugging.
- Allows configuration of session parameters like session and BTEQ I/O encoding.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:BteqOperator`
:param sql: SQL statement(s) to be executed using BTEQ. (templated)
:param file_path: Optional path to an existing SQL or BTEQ script file. If provided, this file will be used instead of the `sql` content. This path represents remote file path when executing remotely via SSH, or local file path when executing locally.
:param teradata_conn_id: Reference to a specific Teradata connection.
:param ssh_conn_id: Optional SSH connection ID for remote execution. Used only when executing scripts remotely.
:param remote_working_dir: Temporary directory location on the remote host (via SSH) where the BTEQ script will be transferred and executed. Defaults to `/tmp` if not specified. This is only applicable when `ssh_conn_id` is provided.
:param bteq_session_encoding: Character set encoding for the BTEQ session. Defaults to ASCII if not specified.
:param bteq_script_encoding: Character encoding for the BTEQ script file. Defaults to ASCII if not specified.
:param bteq_quit_rc: Accepts a single integer, list, or tuple of return codes. Specifies which BTEQ return codes should be treated as successful, allowing subsequent tasks to continue execution.
:param timeout: Timeout (in seconds) for executing the BTEQ command. Default is 600 seconds (10 minutes).
:param timeout_rc: Return code to use if the BTEQ execution fails due to a timeout. To allow DAG execution to continue after a timeout, include this value in `bteq_quit_rc`. If not specified, a timeout will raise an exception and stop the DAG.
"""
template_fields = "sql"
ui_color = "#ff976d"
def __init__(
self,
*,
sql: str | None = None,
file_path: str | None = None,
teradata_conn_id: str = TeradataHook.default_conn_name,
ssh_conn_id: str | None = None,
remote_working_dir: str | None = None,
bteq_session_encoding: str | None = None,
bteq_script_encoding: str | None = None,
bteq_quit_rc: int | list[int] | tuple[int, ...] | None = None,
timeout: int | Literal[600] = 600, # Default to 10 minutes
timeout_rc: int | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.sql = sql
self.file_path = file_path
self.teradata_conn_id = teradata_conn_id
self.ssh_conn_id = ssh_conn_id
self.remote_working_dir = remote_working_dir
self.timeout = timeout
self.timeout_rc = timeout_rc
self.bteq_session_encoding = bteq_session_encoding
self.bteq_script_encoding = bteq_script_encoding
self.bteq_quit_rc = bteq_quit_rc
self._hook: BteqHook | None = None
self._ssh_hook: SSHHook | None = None
self.temp_file_read_encoding = "UTF-8"
def execute(self, context: Context) -> int | None:
"""Execute BTEQ code using the BteqHook."""
if not self.sql and not self.file_path:
raise ValueError(Constants.BTEQ_MISSED_PARAMS)
self._hook = BteqHook(teradata_conn_id=self.teradata_conn_id, ssh_conn_id=self.ssh_conn_id)
self._ssh_hook = SSHHook(ssh_conn_id=self.ssh_conn_id) if self.ssh_conn_id else None
# Validate and set BTEQ session and script encoding
if not self.bteq_session_encoding or self.bteq_session_encoding == "ASCII":
self.bteq_session_encoding = ""
if self.bteq_script_encoding == "UTF8":
self.temp_file_read_encoding = "UTF-8"
elif self.bteq_script_encoding == "UTF16":
self.temp_file_read_encoding = "UTF-16"
self.bteq_script_encoding = ""
elif self.bteq_session_encoding == "UTF8" and (
not self.bteq_script_encoding or self.bteq_script_encoding == "ASCII"
):
self.bteq_script_encoding = "UTF8"
elif self.bteq_session_encoding == "UTF16":
if not self.bteq_script_encoding or self.bteq_script_encoding == "ASCII":
self.bteq_script_encoding = "UTF8"
# for file reading in python. Mapping BTEQ encoding to Python encoding
if self.bteq_script_encoding == "UTF8":
self.temp_file_read_encoding = "UTF-8"
elif self.bteq_script_encoding == "UTF16":
self.temp_file_read_encoding = "UTF-16"
# Handling execution on local:
if not self._ssh_hook:
if self.sql:
bteq_script = prepare_bteq_script_for_local_execution(
sql=self.sql,
)
return self._hook.execute_bteq_script(
bteq_script,
self.remote_working_dir,
self.bteq_script_encoding,
self.timeout,
self.timeout_rc,
self.bteq_session_encoding,
self.bteq_quit_rc,
self.temp_file_read_encoding,
)
if self.file_path:
if not is_valid_file(self.file_path):
raise ValueError(Constants.BTEQ_INVALID_PATH % self.file_path)
try:
is_valid_encoding(self.file_path, self.temp_file_read_encoding or "UTF-8")
except UnicodeDecodeError as e:
errmsg = Constants.BTEQ_INVALID_CHARSET % (self.file_path, "UTF-8")
if self.bteq_script_encoding:
errmsg = Constants.BTEQ_INVALID_CHARSET % (self.file_path, self.bteq_script_encoding)
raise ValueError(errmsg) from e
return self._handle_local_bteq_file(
file_path=self.file_path,
context=context,
)
# Execution on Remote machine
elif self._ssh_hook:
# When sql statement is provided as input through sql parameter, Preparing the bteq script
if self.sql:
bteq_script = prepare_bteq_script_for_remote_execution(
conn=self._hook.get_conn(),
sql=self.sql,
)
return self._hook.execute_bteq_script(
bteq_script,
self.remote_working_dir,
self.bteq_script_encoding,
self.timeout,
self.timeout_rc,
self.bteq_session_encoding,
self.bteq_quit_rc,
self.temp_file_read_encoding,
)
if self.file_path:
with self._ssh_hook.get_conn() as ssh_client:
# When .sql or .bteq remote file path is provided as input through file_path parameter, executing on remote machine
if self.file_path and is_valid_remote_bteq_script_file(ssh_client, self.file_path):
return self._handle_remote_bteq_file(
ssh_client=self._ssh_hook.get_conn(),
file_path=self.file_path,
context=context,
)
raise ValueError(Constants.BTEQ_REMOTE_FILE_PATH_INVALID % self.file_path)
else:
raise ValueError(Constants.BTEQ_MISSED_PARAMS)
return None
def _handle_remote_bteq_file(
self,
ssh_client: SSHClient,
file_path: str | None,
context: Context,
) -> int | None:
if file_path:
with ssh_client:
sftp = ssh_client.open_sftp()
try:
with sftp.open(file_path, "r") as remote_file:
original_content = remote_file.read().decode(self.temp_file_read_encoding or "UTF-8")
finally:
sftp.close()
rendered_content = original_content
if contains_template(original_content):
rendered_content = self.render_template(original_content, context)
if self._hook:
bteq_script = prepare_bteq_script_for_remote_execution(
conn=self._hook.get_conn(),
sql=rendered_content,
)
return self._hook.execute_bteq_script_at_remote(
bteq_script,
self.remote_working_dir,
self.bteq_script_encoding,
self.timeout,
self.timeout_rc,
self.bteq_session_encoding,
self.bteq_quit_rc,
self.temp_file_read_encoding,
)
return None
raise ValueError(Constants.BTEQ_MISSED_PARAMS)
def _handle_local_bteq_file(
self,
file_path: str,
context: Context,
) -> int | None:
if file_path and is_valid_file(file_path):
file_content = read_file(file_path, encoding=str(self.temp_file_read_encoding or "UTF-8"))
# Manually render using operator's context
rendered_content = file_content
if contains_template(file_content):
rendered_content = self.render_template(file_content, context)
bteq_script = prepare_bteq_script_for_local_execution(
sql=rendered_content,
)
if self._hook:
result = self._hook.execute_bteq_script(
bteq_script,
self.remote_working_dir,
self.bteq_script_encoding,
self.timeout,
self.timeout_rc,
self.bteq_session_encoding,
self.bteq_quit_rc,
self.temp_file_read_encoding,
)
return result
return None
def on_kill(self) -> None:
"""Handle task termination by invoking the on_kill method of BteqHook."""
if self._hook:
self._hook.on_kill()
else:
self.log.warning("BteqHook was not initialized. Nothing to terminate.")
| {
"repo_id": "apache/airflow",
"file_path": "providers/teradata/src/airflow/providers/teradata/operators/bteq.py",
"license": "Apache License 2.0",
"lines": 248,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/teradata/src/airflow/providers/teradata/utils/bteq_util.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import os
import shutil
import stat
from typing import TYPE_CHECKING, Any
if TYPE_CHECKING:
from paramiko import SSHClient
from airflow.providers.common.compat.sdk import AirflowException
def identify_os(ssh_client: SSHClient) -> str:
stdin, stdout, stderr = ssh_client.exec_command("uname || ver")
return stdout.read().decode().lower()
def verify_bteq_installed():
"""Verify if BTEQ is installed and available in the system's PATH."""
if shutil.which("bteq") is None:
raise AirflowException("BTEQ is not installed or not available in the system's PATH.")
def verify_bteq_installed_remote(ssh_client: SSHClient):
"""Verify if BTEQ is installed on the remote machine."""
# Detect OS
os_info = identify_os(ssh_client)
if "windows" in os_info:
check_cmd = "where bteq"
elif "darwin" in os_info:
# Check if zsh exists first
stdin, stdout, stderr = ssh_client.exec_command("command -v zsh")
zsh_path = stdout.read().strip()
if zsh_path:
check_cmd = 'zsh -l -c "which bteq"'
else:
check_cmd = "which bteq"
else:
check_cmd = "which bteq"
stdin, stdout, stderr = ssh_client.exec_command(check_cmd)
exit_status = stdout.channel.recv_exit_status()
output = stdout.read().strip()
error = stderr.read().strip()
if exit_status != 0 or not output:
raise AirflowException(
f"BTEQ is not installed or not available in PATH. stderr: {error.decode() if error else 'N/A'}"
)
def transfer_file_sftp(ssh_client, local_path, remote_path):
sftp = ssh_client.open_sftp()
sftp.put(local_path, remote_path)
sftp.close()
def get_remote_tmp_dir(ssh_client):
os_info = identify_os(ssh_client)
if "windows" in os_info:
# Try getting Windows temp dir
stdin, stdout, stderr = ssh_client.exec_command("echo %TEMP%")
tmp_dir = stdout.read().decode().strip()
if not tmp_dir:
tmp_dir = "C:\\Temp"
else:
tmp_dir = "/tmp"
return tmp_dir
# We can not pass host details with bteq command when executing on remote machine. Instead, we will prepare .logon in bteq script itself to avoid risk of
# exposing sensitive information
def prepare_bteq_script_for_remote_execution(conn: dict[str, Any], sql: str) -> str:
"""Build a BTEQ script with necessary connection and session commands."""
script_lines = []
host = conn["host"]
login = conn["login"]
password = conn["password"]
script_lines.append(f" .LOGON {host}/{login},{password}")
return _prepare_bteq_script(script_lines, sql)
def prepare_bteq_script_for_local_execution(
sql: str,
) -> str:
"""Build a BTEQ script with necessary connection and session commands."""
script_lines: list[str] = []
return _prepare_bteq_script(script_lines, sql)
def _prepare_bteq_script(script_lines: list[str], sql: str) -> str:
script_lines.append(sql.strip())
script_lines.append(".EXIT")
return "\n".join(script_lines)
def _prepare_bteq_command(
timeout: int,
bteq_script_encoding: str,
bteq_session_encoding: str,
timeout_rc: int,
) -> list[str]:
cmd = ["bteq"]
if bteq_session_encoding and bteq_script_encoding:
cmd.extend(["-e", bteq_script_encoding])
cmd.extend(["-c", bteq_session_encoding])
script_parts = [f".SET EXITONDELAY ON MAXREQTIME {timeout}"]
if timeout_rc is not None and timeout_rc >= 0:
script_parts.append(f"RC {timeout_rc}")
script_parts.append(";")
# Airflow doesn't display the script of BTEQ in UI but only in log so WIDTH is 500 enough
script_parts.append(".SET WIDTH 500;")
cmd.append(" ".join(script_parts))
return cmd
def prepare_bteq_command_for_remote_execution(
timeout: int,
bteq_script_encoding: str,
bteq_session_encoding: str,
timeout_rc: int,
) -> str:
"""Prepare the BTEQ command with necessary parameters."""
cmd = _prepare_bteq_command(timeout, bteq_script_encoding, bteq_session_encoding, timeout_rc)
cmd.append('"')
return " ".join(cmd)
def prepare_bteq_command_for_local_execution(
conn: dict[str, Any],
timeout: int,
bteq_script_encoding: str,
bteq_session_encoding: str,
timeout_rc: int,
) -> list[str]:
"""Prepare the BTEQ command with necessary parameters."""
cmd = _prepare_bteq_command(timeout, bteq_script_encoding, bteq_session_encoding, timeout_rc)
host = conn["host"]
login = conn["login"]
password = conn["password"]
cmd[-1] += f" .LOGON {host}/{login},{password}"
return cmd
def is_valid_file(file_path: str) -> bool:
return os.path.isfile(file_path)
def is_valid_encoding(file_path: str, encoding: str = "UTF-8") -> bool:
"""
Check if the file can be read with the specified encoding.
:param file_path: Path to the file to be checked.
:param encoding: Encoding to use for reading the file.
:return: True if the file can be read with the specified encoding, False otherwise.
"""
with open(file_path, encoding=encoding) as f:
f.read()
return True
def read_file(file_path: str, encoding: str = "UTF-8") -> str:
"""
Read the content of a file with the specified encoding.
:param file_path: Path to the file to be read.
:param encoding: Encoding to use for reading the file.
:return: Content of the file as a string.
"""
if not os.path.isfile(file_path):
raise FileNotFoundError(f"The file {file_path} does not exist.")
with open(file_path, encoding=encoding) as f:
return f.read()
def is_valid_remote_bteq_script_file(ssh_client: SSHClient, remote_file_path: str, logger=None) -> bool:
"""Check if the given remote file path is a valid BTEQ script file."""
if remote_file_path:
sftp_client = ssh_client.open_sftp()
try:
# Get file metadata
file_stat = sftp_client.stat(remote_file_path)
if file_stat.st_mode:
is_regular_file = stat.S_ISREG(file_stat.st_mode)
return is_regular_file
return False
except FileNotFoundError:
if logger:
logger.error("File does not exist on remote at : %s", remote_file_path)
return False
finally:
sftp_client.close()
else:
return False
| {
"repo_id": "apache/airflow",
"file_path": "providers/teradata/src/airflow/providers/teradata/utils/bteq_util.py",
"license": "Apache License 2.0",
"lines": 175,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/teradata/src/airflow/providers/teradata/utils/encryption_utils.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import secrets
import string
import subprocess
def generate_random_password(length=12):
# Define the character set: letters, digits, and special characters
characters = string.ascii_letters + string.digits + string.punctuation
# Generate a random password
password = "".join(secrets.choice(characters) for _ in range(length))
return password
def generate_encrypted_file_with_openssl(file_path: str, password: str, out_file: str):
# Write plaintext temporarily to file
# Run openssl enc with AES-256-CBC, pbkdf2, salt
cmd = [
"openssl",
"enc",
"-aes-256-cbc",
"-salt",
"-pbkdf2",
"-pass",
f"pass:{password}",
"-in",
file_path,
"-out",
out_file,
]
subprocess.run(cmd, check=True)
def decrypt_remote_file_to_string(ssh_client, remote_enc_file, password, bteq_command_str):
# Run openssl decrypt command on remote machine
quoted_password = shell_quote_single(password)
decrypt_cmd = (
f"openssl enc -d -aes-256-cbc -salt -pbkdf2 -pass pass:{quoted_password} -in {remote_enc_file} | "
+ bteq_command_str
)
# Clear password to prevent lingering sensitive data
password = None
quoted_password = None
stdin, stdout, stderr = ssh_client.exec_command(decrypt_cmd)
# Wait for command to finish
exit_status = stdout.channel.recv_exit_status()
output = stdout.read().decode()
err = stderr.read().decode()
return exit_status, output, err
def shell_quote_single(s):
# Escape single quotes in s, then wrap in single quotes
# In shell, to include a single quote inside single quotes, close, add '\'' and reopen
return "'" + s.replace("'", "'\\''") + "'"
| {
"repo_id": "apache/airflow",
"file_path": "providers/teradata/src/airflow/providers/teradata/utils/encryption_utils.py",
"license": "Apache License 2.0",
"lines": 64,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/teradata/tests/system/teradata/example_bteq.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Example Airflow DAG to show usage of BteqOperator.
This DAG assumes Airflow Connection with connection id `TTU_DEFAULT` already exists in locally. It
shows how to use Teradata BTEQ commands with BteqOperator as tasks in
airflow dags using BteqeOperator.
"""
from __future__ import annotations
import datetime
import os
import pytest
from airflow import DAG
try:
from airflow.providers.teradata.operators.bteq import BteqOperator
except ImportError:
pytest.skip("TERADATA provider not available", allow_module_level=True)
# [START bteq_operator_howto_guide]
ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
DAG_ID = "example_bteq"
CONN_ID = "teradata_default"
SSH_CONN_ID = "ssh_default"
host = os.environ.get("host", "localhost")
username = os.environ.get("username", "temp")
password = os.environ.get("password", "temp")
params = {
"host": host,
"username": username,
"password": password,
"DATABASE_NAME": "airflow",
"TABLE_NAME": "my_employees",
"DB_TABLE_NAME": "airflow.my_employees",
}
with DAG(
dag_id=DAG_ID,
start_date=datetime.datetime(2020, 2, 2),
schedule="@once",
catchup=False,
default_args={"teradata_conn_id": CONN_ID, "params": params},
) as dag:
# [START bteq_operator_howto_guide_create_table]
create_table = BteqOperator(
task_id="create_table",
sql=r"""
CREATE SET TABLE {{params.DB_TABLE_NAME}} (
emp_id INT,
emp_name VARCHAR(100),
dept VARCHAR(50)
) PRIMARY INDEX (emp_id);
""",
bteq_quit_rc=[0, 4],
timeout=20,
bteq_session_encoding="UTF8",
bteq_script_encoding="UTF8",
params=params,
)
# [END bteq_operator_howto_guide_create_table]
# [START bteq_operator_howto_guide_populate_table]
populate_table = BteqOperator(
task_id="populate_table",
sql=r"""
INSERT INTO {{params.DB_TABLE_NAME}} VALUES (1, 'John Doe', 'IT');
INSERT INTO {{params.DB_TABLE_NAME}} VALUES (2, 'Jane Smith', 'HR');
""",
params=params,
bteq_session_encoding="UTF8",
bteq_quit_rc=0,
)
# [END bteq_operator_howto_guide_populate_table]
# [START bteq_operator_howto_guide_export_data_to_a_file]
export_to_a_file = BteqOperator(
task_id="export_to_a_file",
sql=r"""
.EXPORT FILE = employees_output.txt;
SELECT * FROM {{params.DB_TABLE_NAME}};
.EXPORT RESET;
""",
bteq_session_encoding="UTF16",
)
# [END bteq_operator_howto_guide_export_data_to_a_file]
# [START bteq_operator_howto_guide_get_it_employees]
get_it_employees = BteqOperator(
task_id="get_it_employees",
sql=r"""
SELECT * FROM {{params.DB_TABLE_NAME}} WHERE dept = 'IT';
""",
bteq_session_encoding="ASCII",
)
# [END bteq_operator_howto_guide_get_it_employees]
# [START bteq_operator_howto_guide_conditional_logic]
cond_logic = BteqOperator(
task_id="cond_logic",
sql=r"""
.IF ERRORCODE <> 0 THEN .GOTO handle_error;
SELECT COUNT(*) FROM {{params.DB_TABLE_NAME}};
.LABEL handle_error;
""",
bteq_script_encoding="UTF8",
)
# [END bteq_operator_howto_guide_conditional_logic]
# [START bteq_operator_howto_guide_error_handling]
error_handling = BteqOperator(
task_id="error_handling",
sql=r"""
DROP TABLE my_temp;
.IF ERRORCODE = 3807 THEN .GOTO table_not_found;
SELECT 'Table dropped successfully.';
.GOTO end;
.LABEL table_not_found;
SELECT 'Table not found - continuing execution';
.LABEL end;
.LOGOFF;
.QUIT 0;
""",
bteq_script_encoding="UTF16",
)
# [END bteq_operator_howto_guide_error_handling]
# [START bteq_operator_howto_guide_drop_table]
drop_table = BteqOperator(
task_id="drop_table",
sql=r"""
DROP TABLE {{params.DB_TABLE_NAME}};
.IF ERRORCODE = 3807 THEN .GOTO end;
.LABEL end;
.LOGOFF;
.QUIT 0;
""",
bteq_script_encoding="ASCII",
)
# [END bteq_operator_howto_guide_drop_table]
# [START bteq_operator_howto_guide_bteq_file_input]
execute_bteq_file = BteqOperator(
task_id="execute_bteq_file",
file_path="providers/teradata/tests/system/teradata/script.bteq",
params=params,
)
# [END bteq_operator_howto_guide_bteq_file_input]
# [START bteq_operator_howto_guide_bteq_file_utf8_input]
execute_bteq_utf8_file = BteqOperator(
task_id="execute_bteq_utf8_file",
file_path="providers/teradata/tests/system/teradata/script.bteq",
params=params,
bteq_script_encoding="UTF8",
)
# [END bteq_operator_howto_guide_bteq_file_utf8_input]
# [START bteq_operator_howto_guide_bteq_file_utf8_session_ascii_input]
execute_bteq_utf8_session_ascii_file = BteqOperator(
task_id="execute_bteq_utf8_session_ascii_file",
file_path="providers/teradata/tests/system/teradata/script.bteq",
params=params,
bteq_script_encoding="UTF8",
bteq_session_encoding="ASCII",
)
# [END bteq_operator_howto_guide_bteq_file_utf8_session_ascii_input]
# [START bteq_operator_howto_guide_bteq_file_utf8_session_utf8_input]
execute_bteq_utf8_session_utf8_file = BteqOperator(
task_id="execute_bteq_utf8_session_utf8_file",
file_path="providers/teradata/tests/system/teradata/script.bteq",
params=params,
bteq_script_encoding="UTF8",
bteq_session_encoding="UTF8",
)
# [END bteq_operator_howto_guide_bteq_file_utf8_session_utf8_input]
# [START bteq_operator_howto_guide_bteq_file_utf8_session_utf16_input]
execute_bteq_utf8_session_utf16_file = BteqOperator(
task_id="execute_bteq_utf8_session_utf16_file",
file_path="providers/teradata/tests/system/teradata/script.bteq",
params=params,
bteq_script_encoding="UTF8",
bteq_session_encoding="UTF16",
)
# [END bteq_operator_howto_guide_bteq_file_utf8_session_utf16_input]
# [START bteq_operator_howto_guide_bteq_file_utf16_input]
execute_bteq_utf16_file = BteqOperator(
task_id="execute_bteq_utf16_file",
file_path="providers/teradata/tests/system/teradata/script_utf16.bteq",
params=params,
bteq_script_encoding="UTF16",
)
# [END bteq_operator_howto_guide_bteq_file_utf16_input]
# [START bteq_operator_howto_guide_bteq_file_utf16_input]
execute_bteq_utf16_session_ascii_file = BteqOperator(
task_id="execute_bteq_utf16_session_ascii_file",
file_path="providers/teradata/tests/system/teradata/script_utf16.bteq",
params=params,
bteq_script_encoding="UTF16",
bteq_session_encoding="ASCII",
)
# [END bteq_operator_howto_guide_bteq_file_utf16_input]
# [START bteq_operator_howto_guide_bteq_file_utf16_session_utf8_input]
execute_bteq_utf16_session_utf8_file = BteqOperator(
task_id="execute_bteq_utf16_session_utf8_file",
file_path="providers/teradata/tests/system/teradata/script_utf16.bteq",
params=params,
bteq_script_encoding="UTF16",
bteq_session_encoding="UTF8",
)
# [END bteq_operator_howto_guide_bteq_file_utf16_session_utf8_input]
# [START bteq_operator_howto_guide_bteq_file_utf16_session_utf8_input]
execute_bteq_utf16_session_utf16_file = BteqOperator(
task_id="execute_bteq_utf16_session_utf16_file",
file_path="providers/teradata/tests/system/teradata/script_utf16.bteq",
params=params,
bteq_script_encoding="UTF16",
bteq_session_encoding="UTF16",
)
# [END bteq_operator_howto_guide_bteq_file_utf16_session_utf8_input]
(
create_table
>> populate_table
>> export_to_a_file
>> get_it_employees
>> cond_logic
>> error_handling
>> drop_table
>> execute_bteq_file
>> execute_bteq_utf8_file
>> execute_bteq_utf8_session_ascii_file
>> execute_bteq_utf8_session_utf8_file
>> execute_bteq_utf8_session_utf16_file
>> execute_bteq_utf16_file
>> execute_bteq_utf16_session_ascii_file
>> execute_bteq_utf16_session_utf8_file
>> execute_bteq_utf16_session_utf16_file
)
# [END bteq_operator_howto_guide]
from tests_common.test_utils.watcher import watcher
# This test needs watcher in order to properly mark success/failure
# when "tearDown" task with trigger rule is part of the DAG
list(dag.tasks) >> watcher()
from tests_common.test_utils.system_tests import get_test_run # noqa: E402
# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest)
test_run = get_test_run(dag)
| {
"repo_id": "apache/airflow",
"file_path": "providers/teradata/tests/system/teradata/example_bteq.py",
"license": "Apache License 2.0",
"lines": 248,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/teradata/tests/system/teradata/example_remote_bteq.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Example Airflow DAG to show usage of BteqOperator.
This DAG assumes Airflow Connection with connection id `TTU_DEFAULT` already exists in locally. It
shows how to use Teradata BTEQ commands with BteqOperator as tasks in
airflow dags using BteqeOperator.
"""
from __future__ import annotations
import datetime
import os
import pytest
from airflow import DAG
try:
from airflow.providers.teradata.operators.bteq import BteqOperator
except ImportError:
pytest.skip("TERADATA provider not available", allow_module_level=True)
# [START bteq_operator_howto_guide]
ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
DAG_ID = "example_remote_bteq"
CONN_ID = "teradata_default"
SSH_CONN_ID = "ssh_default"
host = os.environ.get("host", "localhost")
username = os.environ.get("username", "temp")
password = os.environ.get("password", "temp")
params = {
"host": host,
"username": username,
"password": password,
"DATABASE_NAME": "airflow",
"TABLE_NAME": "my_employees",
"DB_TABLE_NAME": "airflow.my_employees",
}
with DAG(
dag_id=DAG_ID,
start_date=datetime.datetime(2020, 2, 2),
schedule="@once",
catchup=False,
default_args={"teradata_conn_id": CONN_ID, "params": params, "ssh_conn_id": SSH_CONN_ID},
) as dag:
# [START bteq_operator_howto_guide_create_table]
create_table = BteqOperator(
task_id="create_table",
sql=r"""
CREATE SET TABLE {{params.DB_TABLE_NAME}} (
emp_id INT,
emp_name VARCHAR(100),
dept VARCHAR(50)
) PRIMARY INDEX (emp_id);
""",
bteq_quit_rc=[0, 4],
timeout=20,
bteq_session_encoding="UTF8",
bteq_script_encoding="UTF8",
params=params,
)
# [END bteq_operator_howto_guide_create_table]
# [START bteq_operator_howto_guide_populate_table]
populate_table = BteqOperator(
task_id="populate_table",
sql=r"""
INSERT INTO {{params.DB_TABLE_NAME}} VALUES (1, 'John Doe', 'IT');
INSERT INTO {{params.DB_TABLE_NAME}} VALUES (2, 'Jane Smith', 'HR');
""",
params=params,
bteq_session_encoding="UTF8",
bteq_quit_rc=0,
)
# [END bteq_operator_howto_guide_populate_table]
# [START bteq_operator_howto_guide_export_data_to_a_file]
export_to_a_file = BteqOperator(
task_id="export_to_a_file",
sql=r"""
.EXPORT FILE = employees_output.txt;
SELECT * FROM {{params.DB_TABLE_NAME}};
.EXPORT RESET;
""",
bteq_session_encoding="UTF16",
)
# [END bteq_operator_howto_guide_export_data_to_a_file]
# [START bteq_operator_howto_guide_get_it_employees]
get_it_employees = BteqOperator(
task_id="get_it_employees",
sql=r"""
SELECT * FROM {{params.DB_TABLE_NAME}} WHERE dept = 'IT';
""",
bteq_session_encoding="ASCII",
)
# [END bteq_operator_howto_guide_get_it_employees]
# [START bteq_operator_howto_guide_conditional_logic]
cond_logic = BteqOperator(
task_id="cond_logic",
sql=r"""
.IF ERRORCODE <> 0 THEN .GOTO handle_error;
SELECT COUNT(*) FROM {{params.DB_TABLE_NAME}};
.LABEL handle_error;
""",
bteq_script_encoding="UTF8",
)
# [END bteq_operator_howto_guide_conditional_logic]
# [START bteq_operator_howto_guide_error_handling]
error_handling = BteqOperator(
task_id="error_handling",
sql=r"""
DROP TABLE my_temp;
.IF ERRORCODE = 3807 THEN .GOTO table_not_found;
SELECT 'Table dropped successfully.';
.GOTO end;
.LABEL table_not_found;
SELECT 'Table not found - continuing execution';
.LABEL end;
.LOGOFF;
.QUIT 0;
""",
bteq_script_encoding="UTF16",
)
# [END bteq_operator_howto_guide_error_handling]
# [START bteq_operator_howto_guide_drop_table]
drop_table = BteqOperator(
task_id="drop_table",
sql=r"""
DROP TABLE {{params.DB_TABLE_NAME}};
.IF ERRORCODE = 3807 THEN .GOTO end;
.LABEL end;
.LOGOFF;
.QUIT 0;
""",
bteq_script_encoding="ASCII",
)
# [END bteq_operator_howto_guide_drop_table]
# [START bteq_operator_howto_guide_bteq_file_input]
execute_bteq_file = BteqOperator(
task_id="execute_bteq_file",
file_path="/home/devtools/satish/airflow/script.bteq",
params=params,
)
# [END bteq_operator_howto_guide_bteq_file_input]
# [START bteq_operator_howto_guide_bteq_file_utf8_input]
execute_bteq_utf8_file = BteqOperator(
task_id="execute_bteq_utf8_file",
file_path="/home/devtools/satish/airflow/script.bteq",
params=params,
bteq_script_encoding="UTF8",
)
# [END bteq_operator_howto_guide_bteq_file_utf8_input]
# [START bteq_operator_howto_guide_bteq_file_utf8_session_ascii_input]
execute_bteq_utf8_session_ascii_file = BteqOperator(
task_id="execute_bteq_utf8_session_ascii_file",
file_path="/home/devtools/satish/airflow/script.bteq",
params=params,
bteq_script_encoding="UTF8",
bteq_session_encoding="ASCII",
)
# [END bteq_operator_howto_guide_bteq_file_utf8_session_ascii_input]
# [START bteq_operator_howto_guide_bteq_file_utf8_session_utf8_input]
execute_bteq_utf8_session_utf8_file = BteqOperator(
task_id="execute_bteq_utf8_session_utf8_file",
file_path="/home/devtools/satish/airflow/script.bteq",
params=params,
bteq_script_encoding="UTF8",
bteq_session_encoding="UTF8",
)
# [END bteq_operator_howto_guide_bteq_file_utf8_session_utf8_input]
# [START bteq_operator_howto_guide_bteq_file_utf8_session_utf16_input]
execute_bteq_utf8_session_utf16_file = BteqOperator(
task_id="execute_bteq_utf8_session_utf16_file",
file_path="/home/devtools/satish/airflow/script.bteq",
params=params,
bteq_script_encoding="UTF8",
bteq_session_encoding="UTF16",
)
# [END bteq_operator_howto_guide_bteq_file_utf8_session_utf16_input]
# [START bteq_operator_howto_guide_bteq_file_utf16_input]
execute_bteq_utf16_file = BteqOperator(
task_id="execute_bteq_utf16_file",
file_path="/home/devtools/satish/airflow/script_utf16.bteq",
params=params,
bteq_script_encoding="UTF16",
)
# [END bteq_operator_howto_guide_bteq_file_utf16_input]
# [START bteq_operator_howto_guide_bteq_file_utf16_input]
execute_bteq_utf16_session_ascii_file = BteqOperator(
task_id="execute_bteq_utf16_session_ascii_file",
file_path="/home/devtools/satish/airflow/script_utf16.bteq",
params=params,
bteq_script_encoding="UTF16",
bteq_session_encoding="ASCII",
)
# [END bteq_operator_howto_guide_bteq_file_utf16_input]
# [START bteq_operator_howto_guide_bteq_file_utf16_session_utf8_input]
execute_bteq_utf16_session_utf8_file = BteqOperator(
task_id="execute_bteq_utf16_session_utf8_file",
file_path="/home/devtools/satish/airflow/script_utf16.bteq",
params=params,
bteq_script_encoding="UTF16",
bteq_session_encoding="UTF8",
)
# [END bteq_operator_howto_guide_bteq_file_utf16_session_utf8_input]
# [START bteq_operator_howto_guide_bteq_file_utf16_session_utf8_input]
execute_bteq_utf16_session_utf16_file = BteqOperator(
task_id="execute_bteq_utf16_session_utf16_file",
file_path="/home/devtools/satish/airflow/script_utf16.bteq",
params=params,
bteq_script_encoding="UTF16",
bteq_session_encoding="UTF16",
)
# [END bteq_operator_howto_guide_bteq_file_utf16_session_utf8_input]
(
create_table
>> populate_table
>> export_to_a_file
>> get_it_employees
>> cond_logic
>> error_handling
>> drop_table
>> execute_bteq_file
>> execute_bteq_utf8_file
>> execute_bteq_utf8_session_ascii_file
>> execute_bteq_utf8_session_utf8_file
>> execute_bteq_utf8_session_utf16_file
>> execute_bteq_utf16_file
>> execute_bteq_utf16_session_ascii_file
>> execute_bteq_utf16_session_utf8_file
>> execute_bteq_utf16_session_utf16_file
)
# [END bteq_operator_howto_guide]
from tests_common.test_utils.watcher import watcher
# This test needs watcher in order to properly mark success/failure
# when "tearDown" task with trigger rule is part of the DAG
list(dag.tasks) >> watcher()
from tests_common.test_utils.system_tests import get_test_run # noqa: E402
# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest)
test_run = get_test_run(dag)
| {
"repo_id": "apache/airflow",
"file_path": "providers/teradata/tests/system/teradata/example_remote_bteq.py",
"license": "Apache License 2.0",
"lines": 248,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/teradata/tests/unit/teradata/hooks/test_bteq.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import os
import subprocess
from unittest.mock import MagicMock, patch
import pytest
from airflow.providers.common.compat.sdk import AirflowException
from airflow.providers.teradata.hooks.bteq import BteqHook
@pytest.fixture
def dummy_bteq_script():
return "SELECT * FROM dbc.tables;"
@pytest.fixture
def dummy_remote_dir():
return "/tmp"
@pytest.fixture
def dummy_encoding():
return "utf-8"
@pytest.fixture
def dummy_password():
return "dummy_password"
@pytest.fixture
def hook_without_ssh():
return BteqHook(ssh_conn_id=None, teradata_conn_id="teradata_conn")
@patch("airflow.providers.teradata.hooks.bteq.SSHHook")
def test_init_sets_ssh_hook(mock_ssh_hook_class):
mock_ssh_instance = MagicMock()
mock_ssh_hook_class.return_value = mock_ssh_instance
hook = BteqHook(ssh_conn_id="ssh_conn_id", teradata_conn_id="teradata_conn")
# Validate the call and assignment
mock_ssh_hook_class.assert_called_once_with(ssh_conn_id="ssh_conn_id")
assert hook.ssh_hook == mock_ssh_instance
@patch("subprocess.Popen")
@patch.object(
BteqHook,
"get_conn",
return_value={
"host": "localhost",
"login": "user",
"password": "pass",
"sp": None,
},
)
@patch("airflow.providers.teradata.utils.bteq_util.verify_bteq_installed")
@patch("airflow.providers.teradata.utils.bteq_util.prepare_bteq_command_for_local_execution")
def test_execute_bteq_script_at_local_timeout(
mock_prepare_cmd,
mock_verify_bteq,
mock_get_conn,
mock_popen,
):
hook = BteqHook(ssh_conn_id=None, teradata_conn_id="teradata_conn")
# Create mock process with timeout simulation
mock_process = MagicMock()
mock_process.communicate.return_value = (b"some output", None)
mock_process.wait.side_effect = subprocess.TimeoutExpired(cmd="bteq_command", timeout=5)
mock_process.returncode = None
mock_popen.return_value = mock_process
mock_prepare_cmd.return_value = "bteq_command"
with pytest.raises(AirflowException):
hook.execute_bteq_script_at_local(
bteq_script="SELECT * FROM test;",
bteq_script_encoding="utf-8",
timeout=5,
timeout_rc=None,
bteq_quit_rc=0,
bteq_session_encoding=None,
temp_file_read_encoding=None,
)
@patch("subprocess.Popen")
@patch.object(
BteqHook,
"get_conn",
return_value={
"host": "localhost",
"login": "user",
"password": "pass",
"sp": None,
},
)
@patch("airflow.providers.teradata.hooks.bteq.verify_bteq_installed")
@patch("airflow.providers.teradata.hooks.bteq.prepare_bteq_command_for_local_execution")
def test_execute_bteq_script_at_local_success(
mock_prepare_cmd,
mock_verify_bteq,
mock_get_conn,
mock_popen,
):
hook = BteqHook(teradata_conn_id="teradata_conn")
mock_process = MagicMock()
mock_process.communicate.return_value = (b"Output line 1\nOutput line 2\n", None)
mock_process.wait.return_value = 0
mock_process.returncode = 0
mock_popen.return_value = mock_process
mock_prepare_cmd.return_value = "bteq_command"
ret_code = hook.execute_bteq_script_at_local(
bteq_script="SELECT * FROM test;",
bteq_script_encoding="utf-8",
timeout=10,
timeout_rc=None,
bteq_quit_rc=0,
bteq_session_encoding=None,
temp_file_read_encoding=None,
)
mock_verify_bteq.assert_called_once()
mock_prepare_cmd.assert_called_once()
mock_popen.assert_called_once_with(
"bteq_command",
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
shell=False,
start_new_session=True,
)
assert ret_code == 0
@patch("subprocess.Popen")
@patch.object(
BteqHook,
"get_conn",
return_value={
"host": "localhost",
"login": "user",
"password": "pass",
"sp": None,
},
)
@patch("airflow.providers.teradata.hooks.bteq.verify_bteq_installed")
@patch("airflow.providers.teradata.hooks.bteq.prepare_bteq_command_for_local_execution")
def test_execute_bteq_script_at_local_failure_raises(
mock_prepare_cmd,
mock_verify_bteq,
mock_get_conn,
mock_popen,
):
hook = BteqHook(ssh_conn_id=None, teradata_conn_id="teradata_conn")
failure_message = "Failure: some error occurred"
mock_process = MagicMock()
# The output contains "Failure"
mock_process.communicate.return_value = (failure_message.encode("utf-8"), None)
mock_process.wait.return_value = 1
mock_process.returncode = 1
mock_popen.return_value = mock_process
mock_prepare_cmd.return_value = "bteq_command"
with pytest.raises(
AirflowException,
match="Failure while executing BTEQ script due to unexpected error.: Failure: some error occurred",
):
hook.execute_bteq_script_at_local(
bteq_script="SELECT * FROM test;",
bteq_script_encoding="utf-8",
timeout=10,
timeout_rc=None,
bteq_quit_rc=0, # 1 is not allowed here
bteq_session_encoding=None,
temp_file_read_encoding=None,
)
@pytest.fixture(autouse=False)
def patch_ssh_hook_class():
# Patch SSHHook where bteq.py imports it
with patch("airflow.providers.teradata.hooks.bteq.SSHHook") as mock_ssh_hook_class:
mock_ssh_instance = MagicMock()
mock_ssh_hook_class.return_value = mock_ssh_instance
yield mock_ssh_hook_class
@pytest.fixture
def hook_with_ssh(patch_ssh_hook_class):
# Now the BteqHook() call will use the patched SSHHook
return BteqHook(ssh_conn_id="ssh_conn_id", teradata_conn_id="teradata_conn")
@patch("airflow.providers.teradata.hooks.bteq.SSHHook")
@patch("airflow.providers.teradata.hooks.bteq.verify_bteq_installed_remote")
@patch("airflow.providers.teradata.hooks.bteq.generate_random_password", return_value="test_password")
@patch("airflow.providers.teradata.hooks.bteq.generate_encrypted_file_with_openssl")
@patch("airflow.providers.teradata.hooks.bteq.transfer_file_sftp")
@patch(
"airflow.providers.teradata.hooks.bteq.prepare_bteq_command_for_remote_execution",
return_value="bteq_command",
)
@patch(
"airflow.providers.teradata.hooks.bteq.decrypt_remote_file_to_string", return_value=(0, ["output"], [])
)
def test_execute_bteq_script_at_remote_success(
mock_decrypt,
mock_prepare_cmd,
mock_transfer,
mock_encrypt,
mock_password,
mock_verify,
mock_ssh_hook_class,
):
# Mock SSHHook instance and its get_conn() context manager
mock_ssh_hook = MagicMock()
mock_ssh_client = MagicMock()
mock_ssh_hook.get_conn.return_value.__enter__.return_value = mock_ssh_client
mock_ssh_hook_class.return_value = mock_ssh_hook
# Mock exec_command to simulate 'uname || ver'
mock_stdin = MagicMock()
mock_stdout = MagicMock()
mock_stderr = MagicMock()
mock_stdout.read.return_value = b"Linux\n"
mock_ssh_client.exec_command.return_value = (mock_stdin, mock_stdout, mock_stderr)
# Instantiate BteqHook
hook = BteqHook(ssh_conn_id="ssh_conn_id", teradata_conn_id="teradata_conn")
# Call method under test
ret_code = hook.execute_bteq_script_at_remote(
bteq_script="SELECT 1;",
remote_working_dir="/tmp",
bteq_script_encoding="utf-8",
timeout=10,
timeout_rc=None,
bteq_session_encoding="utf-8",
bteq_quit_rc=0,
temp_file_read_encoding=None,
)
# Assert mocks called as expected
mock_verify.assert_called_once_with(mock_ssh_client)
mock_password.assert_called_once()
mock_encrypt.assert_called_once()
mock_transfer.assert_called_once()
mock_prepare_cmd.assert_called_once()
mock_decrypt.assert_called_once()
# Assert the return code is what decrypt_remote_file_to_string returns (0 here)
assert ret_code == 0
def test_on_kill_terminates_process(hook_without_ssh):
process_mock = MagicMock()
# Patch the hook's get_conn method to return a dict with the mocked process
with patch.object(hook_without_ssh, "get_conn", return_value={"sp": process_mock}):
hook_without_ssh.on_kill()
process_mock.terminate.assert_called_once()
process_mock.wait.assert_called_once()
def test_on_kill_no_process(hook_without_ssh):
# Mock get_connection to avoid AirflowNotFoundException
with patch.object(hook_without_ssh, "get_connection", return_value={"host": "dummy_host"}):
# Provide a dummy conn dict to avoid errors
with patch.object(hook_without_ssh, "get_conn", return_value={"sp": None}):
# This should not raise any exceptions even if sp (process) is None
hook_without_ssh.on_kill()
@patch("airflow.providers.teradata.hooks.bteq.verify_bteq_installed_remote")
def test_transfer_to_and_execute_bteq_on_remote_ssh_failure(mock_verify, hook_with_ssh):
# Patch get_conn to simulate SSH failure by returning None
hook_with_ssh.ssh_hook.get_conn = MagicMock(return_value=None)
# Patch helper functions used in the tested function to avoid side effects
with (
patch("airflow.providers.teradata.hooks.bteq.generate_random_password", return_value="password"),
patch("airflow.providers.teradata.hooks.bteq.generate_encrypted_file_with_openssl"),
patch("airflow.providers.teradata.hooks.bteq.transfer_file_sftp"),
patch(
"airflow.providers.teradata.hooks.bteq.prepare_bteq_command_for_remote_execution",
return_value="cmd",
),
patch(
"airflow.providers.teradata.hooks.bteq.decrypt_remote_file_to_string", return_value=(0, [], [])
),
):
with pytest.raises(AirflowException) as excinfo:
hook_with_ssh._transfer_to_and_execute_bteq_on_remote(
file_path="/tmp/fakefile",
remote_working_dir="/tmp",
bteq_script_encoding="utf-8",
timeout=10,
timeout_rc=None,
bteq_quit_rc=0,
bteq_session_encoding="utf-8",
tmp_dir="/tmp",
)
assert (
"Failed to establish a SSH connection to the remote machine for executing the BTEQ script."
in str(excinfo.value)
)
@patch("airflow.providers.teradata.hooks.bteq.verify_bteq_installed_remote")
@patch("airflow.providers.teradata.hooks.bteq.generate_random_password", return_value="testpass")
@patch("airflow.providers.teradata.hooks.bteq.generate_encrypted_file_with_openssl")
@patch("airflow.providers.teradata.hooks.bteq.transfer_file_sftp")
@patch(
"airflow.providers.teradata.hooks.bteq.prepare_bteq_command_for_remote_execution",
return_value="bteq_remote_command",
)
@patch(
"airflow.providers.teradata.hooks.bteq.decrypt_remote_file_to_string",
side_effect=Exception("mocked exception"),
)
def test_remote_execution_cleanup_on_exception(
mock_decrypt,
mock_prepare,
mock_transfer,
mock_generate_enc,
mock_generate_pass,
mock_verify_remote,
hook_with_ssh,
):
temp_dir = "/tmp"
local_file_path = os.path.join(temp_dir, "bteq_script.txt")
remote_working_dir = temp_dir
encrypted_file_path = os.path.join(temp_dir, "bteq_script.enc")
# Create dummy local encrypted file
with open(encrypted_file_path, "w") as f:
f.write("dummy")
# Simulate decrypt failing
mock_decrypt.side_effect = Exception("mocked exception")
# Patch exec_command for remote cleanup (identify_os, rm)
ssh_client = hook_with_ssh.ssh_hook.get_conn.return_value.__enter__.return_value
mock_stdin = MagicMock()
mock_stdout = MagicMock()
mock_stderr = MagicMock()
# For identify_os ("uname || ver")
mock_stdout.read.return_value = b"Linux\n"
ssh_client.exec_command.return_value = (mock_stdin, mock_stdout, mock_stderr)
# Run the test
with pytest.raises(AirflowException, match="mocked exception"):
hook_with_ssh._transfer_to_and_execute_bteq_on_remote(
file_path=local_file_path,
remote_working_dir=remote_working_dir,
bteq_script_encoding="utf-8",
timeout=5,
timeout_rc=None,
bteq_quit_rc=0,
bteq_session_encoding="utf-8",
tmp_dir=temp_dir,
)
# After exception, encrypted file should be deleted
assert not os.path.exists(encrypted_file_path)
| {
"repo_id": "apache/airflow",
"file_path": "providers/teradata/tests/unit/teradata/hooks/test_bteq.py",
"license": "Apache License 2.0",
"lines": 333,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/teradata/tests/unit/teradata/hooks/test_ttu.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import subprocess
from unittest import mock
import pytest
from airflow.providers.common.compat.sdk import AirflowException
from airflow.providers.teradata.hooks.ttu import TtuHook
class TestTtuHook:
@mock.patch("airflow.providers.teradata.hooks.ttu.TtuHook.get_connection")
def test_get_conn_with_valid_params(self, mock_get_connection):
# Setup
mock_conn = mock.MagicMock()
mock_conn.login = "test_user"
mock_conn.password = "test_pass"
mock_conn.host = "test_host"
mock_conn.extra_dejson = {}
mock_get_connection.return_value = mock_conn
# Execute
hook = TtuHook()
conn = hook.get_conn()
# Assert
assert conn["login"] == "test_user"
assert conn["password"] == "test_pass"
assert conn["host"] == "test_host"
@mock.patch("airflow.providers.teradata.hooks.ttu.TtuHook.get_connection")
def test_get_conn_missing_params(self, mock_get_connection):
# Setup
mock_conn = mock.MagicMock()
mock_conn.login = None
mock_conn.password = "test_pass"
mock_conn.host = "test_host"
mock_conn.extra_dejson = {}
mock_get_connection.return_value = mock_conn
# Execute and Assert
hook = TtuHook()
with pytest.raises(AirflowException, match="Missing required connection parameters"):
hook.get_conn()
@mock.patch("subprocess.Popen")
@mock.patch("airflow.providers.teradata.hooks.ttu.TtuHook.get_connection")
def test_close_conn_subprocess_running(self, mock_get_connection, mock_popen):
# Setup
mock_conn = mock.MagicMock()
mock_conn.login = "test_user"
mock_conn.password = "test_pass"
mock_conn.host = "test_host"
mock_conn.extra_dejson = {}
mock_get_connection.return_value = mock_conn
mock_process = mock.MagicMock()
mock_process.poll.return_value = None
mock_popen.return_value = mock_process
# Execute
hook = TtuHook()
conn = hook.get_conn()
conn["sp"] = mock_process
hook.close_conn()
# Assert
mock_process.terminate.assert_called_once()
mock_process.wait.assert_called_once_with(timeout=5)
assert hook.conn is None
@mock.patch("subprocess.Popen")
@mock.patch("airflow.providers.teradata.hooks.ttu.TtuHook.get_connection")
def test_close_conn_subprocess_timeout(self, mock_get_connection, mock_popen):
# Setup
mock_conn = mock.MagicMock()
mock_conn.login = "test_user"
mock_conn.password = "test_pass"
mock_conn.host = "test_host"
mock_conn.extra_dejson = {}
mock_get_connection.return_value = mock_conn
mock_process = mock.MagicMock()
mock_process.poll.return_value = None
mock_process.wait.side_effect = subprocess.TimeoutExpired(cmd="test", timeout=5)
mock_popen.return_value = mock_process
# Execute
hook = TtuHook()
conn = hook.get_conn()
conn["sp"] = mock_process
hook.close_conn()
# Assert
mock_process.terminate.assert_called_once()
mock_process.wait.assert_called_once()
mock_process.kill.assert_called_once()
assert hook.conn is None
@mock.patch("airflow.providers.teradata.hooks.ttu.TtuHook.__exit__")
@mock.patch("airflow.providers.teradata.hooks.ttu.TtuHook.__enter__")
def test_hook_context_manager(self, mock_enter, mock_exit):
# Setup
hook = TtuHook()
mock_enter.return_value = hook
# Execute
with hook as h:
assert h == hook
# Assert
mock_exit.assert_called_once()
# Ensure the exit method was called with the correct parameters
# Context manager's __exit__ is called with (exc_type, exc_val, exc_tb)
args = mock_exit.call_args[0]
assert len(args) == 3 # Verify we have the correct number of arguments
assert args[0] is None # type should be None
assert args[1] is None # value should be None
assert args[2] is None # traceback should be None
| {
"repo_id": "apache/airflow",
"file_path": "providers/teradata/tests/unit/teradata/hooks/test_ttu.py",
"license": "Apache License 2.0",
"lines": 116,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/teradata/tests/unit/teradata/operators/test_bteq.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import logging
import tempfile
import unittest
from unittest import mock
import pytest
from airflow.providers.teradata.hooks.bteq import BteqHook
from airflow.providers.teradata.operators.bteq import BteqOperator
log = logging.getLogger(__name__)
class TestBteqOperator:
@mock.patch.object(BteqHook, "execute_bteq_script")
@mock.patch.object(BteqHook, "__init__", return_value=None)
def test_execute(self, mock_hook_init, mock_execute_bteq):
task_id = "test_bteq_operator"
sql = "SELECT * FROM my_table;"
teradata_conn_id = "teradata_default"
mock_context = {}
# Given
expected_result = "BTEQ execution result"
mock_execute_bteq.return_value = expected_result
operator = BteqOperator(
task_id=task_id,
sql=sql,
teradata_conn_id=teradata_conn_id,
)
# When
result = operator.execute(mock_context)
# Then
mock_hook_init.assert_called_once_with(teradata_conn_id=teradata_conn_id, ssh_conn_id=None)
mock_execute_bteq.assert_called_once_with(sql + "\n.EXIT", None, "", 600, None, "", None, "UTF-8")
assert result == "BTEQ execution result"
@mock.patch.object(BteqHook, "execute_bteq_script")
@mock.patch.object(BteqHook, "__init__", return_value=None)
def test_execute_sql_only(self, mock_hook_init, mock_execute_bteq):
# Arrange
task_id = "test_bteq_operator"
sql = "SELECT * FROM my_table;"
teradata_conn_id = "teradata_default"
mock_context = {}
expected_result = "BTEQ execution result"
mock_execute_bteq.return_value = expected_result
operator = BteqOperator(
task_id=task_id,
sql=sql,
teradata_conn_id=teradata_conn_id,
)
# Manually set _hook since we bypassed __init__
operator._hook = mock.MagicMock()
operator._hook.execute_bteq_script = mock_execute_bteq
# Act
result = operator.execute(mock_context)
# Assert
mock_hook_init.assert_called_once_with(teradata_conn_id=teradata_conn_id, ssh_conn_id=None)
mock_execute_bteq.assert_called_once_with(
sql + "\n.EXIT", # Assuming the prepare_bteq_script_for_local_execution appends ".EXIT"
None, # default remote_working_dir
"", # bteq_script_encoding (default ASCII => empty string)
600, # timeout default
None, # timeout_rc
"", # bteq_session_encoding
None, # bteq_quit_rc
"UTF-8",
)
assert result == expected_result
@mock.patch("airflow.providers.teradata.operators.bteq.BteqHook.execute_bteq_script")
@mock.patch("airflow.providers.teradata.operators.bteq.BteqHook.__init__", return_value=None)
def test_execute_sql_local(self, mock_hook_init, mock_execute_script):
sql = "SELECT * FROM test_table;"
expected_result = 0
mock_execute_script.return_value = expected_result
context = {}
op = BteqOperator(
task_id="test_local_sql",
sql=sql,
teradata_conn_id="td_conn",
)
op._hook = mock.Mock()
op._hook.execute_bteq_script = mock_execute_script
result = op.execute(context)
mock_hook_init.assert_called_once_with(teradata_conn_id="td_conn", ssh_conn_id=None)
mock_execute_script.assert_called_once()
assert result == expected_result
@mock.patch.object(BteqHook, "on_kill")
def test_on_kill(self, mock_on_kill):
task_id = "test_bteq_operator"
sql = "SELECT * FROM my_table;"
# Given
operator = BteqOperator(
task_id=task_id,
sql=sql,
)
operator._hook = BteqHook(None)
# When
operator.on_kill()
# Then
mock_on_kill.assert_called_once()
def test_on_kill_not_initialized(self):
task_id = "test_bteq_operator"
sql = "SELECT * FROM my_table;"
# Given
operator = BteqOperator(
task_id=task_id,
sql=sql,
)
operator._hook = None
# When/Then (no exception should be raised)
operator.on_kill()
def test_template_fields(self):
# Verify template fields are defined correctly
print(BteqOperator.template_fields)
assert BteqOperator.template_fields == "sql"
def test_execute_raises_if_no_sql_or_file(self):
op = BteqOperator(task_id="fail_case", teradata_conn_id="td_conn")
with pytest.raises(
ValueError,
match="Failed to execute BTEQ script due to missing required parameters: either 'sql' or 'file_path' must be provided.",
):
op.execute({})
@mock.patch("airflow.providers.teradata.operators.bteq.is_valid_file", return_value=False)
def test_invalid_file_path(self, mock_is_valid_file):
op = BteqOperator(
task_id="fail_invalid_file",
file_path="/invalid/path.sql",
teradata_conn_id="td_conn",
)
with pytest.raises(ValueError, match="Failed to execute BTEQ script due to invalid file path"):
op.execute({})
@mock.patch("airflow.providers.teradata.operators.bteq.is_valid_file", return_value=True)
@mock.patch(
"airflow.providers.teradata.operators.bteq.is_valid_encoding",
side_effect=UnicodeDecodeError("utf8", b"", 0, 1, "error"),
)
def test_file_encoding_error(self, mock_encoding, mock_valid_file):
op = BteqOperator(
task_id="encoding_fail",
file_path="/tmp/test.sql",
bteq_script_encoding="UTF-8",
teradata_conn_id="td_conn",
)
with pytest.raises(
ValueError,
match="Failed to execute BTEQ script because the provided file.*encoding differs from the specified BTEQ I/O encoding",
):
op.execute({})
@mock.patch("airflow.providers.teradata.operators.bteq.BteqHook.execute_bteq_script")
@mock.patch("airflow.providers.teradata.operators.bteq.is_valid_file", return_value=True)
@mock.patch("airflow.providers.teradata.operators.bteq.is_valid_encoding")
@mock.patch("airflow.providers.teradata.operators.bteq.read_file")
def test_execute_local_file(
self,
mock_read_file,
mock_valid_encoding,
mock_valid_file,
mock_execute_bteq_script,
):
mock_execute_bteq_script.return_value = 0
sql_content = "SELECT * FROM table_name;"
mock_read_file.return_value = sql_content
with tempfile.NamedTemporaryFile("w+", suffix=".sql", delete=False) as tmp_file:
tmp_file.write(sql_content)
tmp_file_path = tmp_file.name
op = BteqOperator(
task_id="test_bteq_local_file",
file_path=tmp_file_path,
teradata_conn_id="teradata_default",
)
result = op.execute(context={})
assert result == 0
mock_execute_bteq_script.assert_called_once()
def test_on_kill_calls_hook(self):
op = BteqOperator(task_id="kill_test", teradata_conn_id="td_conn")
op._hook = mock.Mock()
op.on_kill()
op._hook.on_kill.assert_called_once()
def test_on_kill_logs_if_no_hook(self):
op = BteqOperator(task_id="kill_no_hook", teradata_conn_id="td_conn")
op._hook = None
with mock.patch.object(op.log, "warning") as mock_log_info:
op.on_kill()
mock_log_info.assert_called_once_with("BteqHook was not initialized. Nothing to terminate.")
@mock.patch("airflow.providers.teradata.operators.bteq.BteqHook.execute_bteq_script")
@mock.patch("airflow.providers.teradata.operators.bteq.BteqHook.get_conn")
@mock.patch("airflow.providers.teradata.operators.bteq.SSHHook")
@mock.patch("airflow.providers.teradata.operators.bteq.BteqHook.__init__", return_value=None)
def test_remote_execution_with_sql(
self,
mock_bteq_hook_init,
mock_ssh_hook_class,
mock_get_conn,
mock_execute_bteq_script,
):
mock_execute_bteq_script.return_value = 0
mock_ssh_hook_instance = mock.Mock()
mock_ssh_hook_class.return_value = mock_ssh_hook_instance
op = BteqOperator(
task_id="test_remote_sql",
sql="SELECT * FROM customers;",
ssh_conn_id="ssh_default",
teradata_conn_id="teradata_default",
)
result = op.execute(context={})
mock_bteq_hook_init.assert_called_once_with(
teradata_conn_id="teradata_default", ssh_conn_id="ssh_default"
)
mock_execute_bteq_script.assert_called_once()
assert result == 0
@mock.patch("airflow.providers.common.compat.sdk.BaseOperator.render_template")
def test_render_template_in_sql(self, mock_render):
op = BteqOperator(task_id="render_test", sql="SELECT * FROM {{ params.table }};")
mock_render.return_value = "SELECT * FROM my_table;"
rendered_sql = op.render_template("sql", op.sql, context={"params": {"table": "my_table"}})
assert rendered_sql == "SELECT * FROM my_table;"
@mock.patch("airflow.providers.teradata.operators.bteq.BteqHook.execute_bteq_script", return_value=99)
@mock.patch("airflow.providers.teradata.operators.bteq.BteqHook.__init__", return_value=None)
def test_bteq_timeout_with_custom_rc(self, mock_hook_init, mock_exec):
op = BteqOperator(
task_id="timeout_case",
sql="SELECT 1",
teradata_conn_id="td_conn",
timeout=30,
timeout_rc=99,
bteq_quit_rc=[99],
)
result = op.execute({})
assert result == 99
mock_exec.assert_called_once()
@mock.patch("airflow.providers.teradata.operators.bteq.BteqHook.execute_bteq_script", return_value=42)
@mock.patch("airflow.providers.teradata.operators.bteq.BteqHook.__init__", return_value=None)
def test_bteq_return_code_not_in_quit_rc(self, mock_hook_init, mock_exec):
op = BteqOperator(
task_id="rc_not_allowed", sql="SELECT 1", teradata_conn_id="td_conn", bteq_quit_rc=[0, 1]
)
result = op.execute({})
assert result == 42 # still returns, but caller can fail on RC if desired
if __name__ == "__main__":
unittest.main()
| {
"repo_id": "apache/airflow",
"file_path": "providers/teradata/tests/unit/teradata/operators/test_bteq.py",
"license": "Apache License 2.0",
"lines": 252,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/teradata/tests/unit/teradata/utils/test_bteq_util.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import os
import stat
import unittest
from unittest.mock import MagicMock, call, patch
import pytest
from airflow.providers.common.compat.sdk import AirflowException
from airflow.providers.teradata.utils.bteq_util import (
identify_os,
is_valid_encoding,
is_valid_file,
is_valid_remote_bteq_script_file,
prepare_bteq_script_for_local_execution,
prepare_bteq_script_for_remote_execution,
read_file,
transfer_file_sftp,
verify_bteq_installed,
verify_bteq_installed_remote,
)
class TestBteqUtils:
def test_identify_os_linux(self):
# Arrange
ssh_client = MagicMock()
stdout_mock = MagicMock()
stdout_mock.read.return_value = b"Linux\n"
ssh_client.exec_command.return_value = (MagicMock(), stdout_mock, MagicMock())
# Act
os_info = identify_os(ssh_client)
# Assert
ssh_client.exec_command.assert_called_once_with("uname || ver")
assert os_info == "linux\n"
def test_identify_os_windows(self):
# Arrange
ssh_client = MagicMock()
stdout_mock = MagicMock()
stdout_mock.read.return_value = b"Microsoft Windows [Version 10.0.19045.3324]\n"
ssh_client.exec_command.return_value = (MagicMock(), stdout_mock, MagicMock())
# Act
os_info = identify_os(ssh_client)
# Assert
ssh_client.exec_command.assert_called_once_with("uname || ver")
assert "windows" in os_info
def test_identify_os_macos(self):
# Arrange
ssh_client = MagicMock()
stdout_mock = MagicMock()
stdout_mock.read.return_value = b"Darwin\n"
ssh_client.exec_command.return_value = (MagicMock(), stdout_mock, MagicMock())
# Act
os_info = identify_os(ssh_client)
# Assert
ssh_client.exec_command.assert_called_once_with("uname || ver")
assert os_info == "darwin\n"
def test_identify_os_empty_response(self):
# Arrange
ssh_client = MagicMock()
stdout_mock = MagicMock()
stdout_mock.read.return_value = b""
ssh_client.exec_command.return_value = (MagicMock(), stdout_mock, MagicMock())
# Act
os_info = identify_os(ssh_client)
# Assert
ssh_client.exec_command.assert_called_once_with("uname || ver")
assert os_info == ""
@patch("shutil.which")
def test_verify_bteq_installed_success(self, mock_which):
mock_which.return_value = "/usr/bin/bteq"
# Should not raise
verify_bteq_installed()
mock_which.assert_called_with("bteq")
@patch("shutil.which")
def test_verify_bteq_installed_fail(self, mock_which):
mock_which.return_value = None
with pytest.raises(AirflowException):
verify_bteq_installed()
def test_prepare_bteq_script_for_remote_execution(self):
conn = {"host": "myhost", "login": "user", "password": "pass"}
sql = "SELECT * FROM DUAL;"
script = prepare_bteq_script_for_remote_execution(conn, sql)
assert ".LOGON myhost/user,pass" in script
assert "SELECT * FROM DUAL;" in script
assert ".EXIT" in script
def test_prepare_bteq_script_for_local_execution(self):
sql = "SELECT 1;"
script = prepare_bteq_script_for_local_execution(sql)
assert "SELECT 1;" in script
assert ".EXIT" in script
@patch("airflow.providers.teradata.utils.bteq_util.identify_os", return_value="linux")
def test_verify_bteq_installed_remote_linux(self, mock_os):
ssh_client = MagicMock()
stdout_mock = MagicMock()
stdout_mock.read.return_value = b"/usr/bin/bteq"
stdout_mock.channel.recv_exit_status.return_value = 0
ssh_client.exec_command.return_value = (MagicMock(), stdout_mock, MagicMock())
verify_bteq_installed_remote(ssh_client)
ssh_client.exec_command.assert_called_once_with("which bteq")
@patch("airflow.providers.teradata.utils.bteq_util.identify_os", return_value="windows")
def test_verify_bteq_installed_remote_windows(self, mock_os):
ssh_client = MagicMock()
stdout_mock = MagicMock()
stdout_mock.read.return_value = b"C:\\Program Files\\bteq.exe"
stdout_mock.channel.recv_exit_status.return_value = 0
ssh_client.exec_command.return_value = (MagicMock(), stdout_mock, MagicMock())
verify_bteq_installed_remote(ssh_client)
ssh_client.exec_command.assert_called_once_with("where bteq")
@patch("airflow.providers.teradata.utils.bteq_util.identify_os", return_value="darwin")
def test_verify_bteq_installed_remote_macos(self, mock_os):
ssh_client = MagicMock()
stdout_mock = MagicMock()
stdout_mock.read.return_value = b"/usr/local/bin/bteq"
stdout_mock.channel.recv_exit_status.return_value = 0
ssh_client.exec_command.return_value = (MagicMock(), stdout_mock, MagicMock())
verify_bteq_installed_remote(ssh_client)
ssh_client.exec_command.assert_has_calls(
[
call("command -v zsh"),
call('zsh -l -c "which bteq"'),
]
)
@patch("airflow.providers.teradata.utils.bteq_util.identify_os", return_value="darwin")
def test_verify_bteq_installed_remote_macos_which_called_when_no_zsh(self, mock_os):
ssh_client = MagicMock()
# Mock for "command -v zsh" returning empty (no zsh)
stdin_mock_1 = MagicMock()
stdout_mock_1 = MagicMock()
stderr_mock_1 = MagicMock()
stdout_mock_1.read.return_value = b"" # No zsh path found
stderr_mock_1.read.return_value = b"" # Return empty bytes here!
ssh_client.exec_command.side_effect = [
(stdin_mock_1, stdout_mock_1, stderr_mock_1), # command -v zsh
(MagicMock(), MagicMock(), MagicMock()), # which bteq
]
# Mock for "which bteq" command response
stdin_mock_2 = MagicMock()
stdout_mock_2 = MagicMock()
stderr_mock_2 = MagicMock()
stdout_mock_2.channel.recv_exit_status.return_value = 0
stdout_mock_2.read.return_value = b"/usr/local/bin/bteq"
stderr_mock_2.read.return_value = b"" # Also return bytes here
# Since side_effect was already assigned, override second call manually
ssh_client.exec_command.side_effect = [
(stdin_mock_1, stdout_mock_1, stderr_mock_1), # command -v zsh
(stdin_mock_2, stdout_mock_2, stderr_mock_2), # which bteq
]
verify_bteq_installed_remote(ssh_client)
ssh_client.exec_command.assert_has_calls(
[
call("command -v zsh"),
call("which bteq"),
]
)
@patch("airflow.providers.teradata.utils.bteq_util.identify_os", return_value="darwin")
def test_verify_bteq_installed_remote_macos_which_fails_no_zsh(self, mock_os):
ssh_client = MagicMock()
# Mock for "command -v zsh" returning empty (no zsh)
stdin_mock_1 = MagicMock()
stdout_mock_1 = MagicMock()
stderr_mock_1 = MagicMock()
stdout_mock_1.read.return_value = b"" # No zsh path found
ssh_client.exec_command.side_effect = [
(stdin_mock_1, stdout_mock_1, stderr_mock_1), # command -v zsh
(MagicMock(), MagicMock(), MagicMock()), # which bteq
]
# For which bteq failure
ssh_client.exec_command.return_value[1].channel.recv_exit_status.return_value = 1
ssh_client.exec_command.return_value[1].read.return_value = b""
ssh_client.exec_command.return_value[2].read.return_value = b"command not found"
with pytest.raises(AirflowException) as exc_info:
verify_bteq_installed_remote(ssh_client)
assert "BTEQ is not installed or not available in PATH" in str(exc_info.value)
ssh_client.exec_command.assert_has_calls(
[
call("command -v zsh"),
call("which bteq"),
]
)
@patch("airflow.providers.teradata.utils.bteq_util.identify_os", return_value="linux")
def test_verify_bteq_installed_remote_fail(self, mock_os):
ssh_client = MagicMock()
stdout_mock = MagicMock()
stderr_mock = MagicMock()
stdout_mock.read.return_value = b""
stderr_mock.read.return_value = b"command not found"
stdout_mock.channel.recv_exit_status.return_value = 1
ssh_client.exec_command.return_value = (MagicMock(), stdout_mock, stderr_mock)
with pytest.raises(AirflowException, match="BTEQ is not installed or not available in PATH"):
verify_bteq_installed_remote(ssh_client)
ssh_client.exec_command.assert_called_once_with("which bteq")
@patch("paramiko.SSHClient.exec_command")
def test_verify_bteq_installed_remote_success(self, mock_exec):
mock_stdin = MagicMock()
mock_stdout = MagicMock()
mock_stderr = MagicMock()
mock_stdout.channel.recv_exit_status.return_value = 0
mock_stdout.read.return_value = b"/usr/bin/bteq"
mock_stderr.read.return_value = b""
mock_exec.return_value = (mock_stdin, mock_stdout, mock_stderr)
ssh_client = MagicMock()
ssh_client.exec_command = mock_exec
# Should not raise
verify_bteq_installed_remote(ssh_client)
@patch("paramiko.SSHClient.open_sftp")
def test_transfer_file_sftp(self, mock_open_sftp):
mock_sftp = MagicMock()
mock_open_sftp.return_value = mock_sftp
ssh_client = MagicMock()
ssh_client.open_sftp = mock_open_sftp
transfer_file_sftp(ssh_client, "local_file.txt", "remote_file.txt")
mock_open_sftp.assert_called_once()
mock_sftp.put.assert_called_once_with("local_file.txt", "remote_file.txt")
mock_sftp.close.assert_called_once()
def test_is_valid_file(self):
# create temp file
with open("temp_test_file.txt", "w") as f:
f.write("hello")
assert is_valid_file("temp_test_file.txt") is True
assert is_valid_file("non_existent_file.txt") is False
os.remove("temp_test_file.txt")
def test_is_valid_encoding(self):
# Write a file with UTF-8 encoding
with open("temp_utf8_file.txt", "w", encoding="utf-8") as f:
f.write("hello world")
# Should return True
assert is_valid_encoding("temp_utf8_file.txt", encoding="utf-8") is True
# Cleanup
os.remove("temp_utf8_file.txt")
def test_read_file_success(self):
content = "Sample content"
with open("temp_read_file.txt", "w") as f:
f.write(content)
read_content = read_file("temp_read_file.txt")
assert read_content == content
os.remove("temp_read_file.txt")
def test_read_file_file_not_found(self):
with pytest.raises(FileNotFoundError):
read_file("non_existent_file.txt")
@patch("paramiko.SSHClient.open_sftp")
def test_is_valid_remote_bteq_script_file_exists(self, mock_open_sftp):
mock_sftp = MagicMock()
mock_open_sftp.return_value = mock_sftp
# Mock stat to return a regular file mode
mock_stat = MagicMock()
mock_stat.st_mode = stat.S_IFREG
mock_sftp.stat.return_value = mock_stat
ssh_client = MagicMock()
ssh_client.open_sftp = mock_open_sftp
result = is_valid_remote_bteq_script_file(ssh_client, "/remote/path/to/file")
assert result is True
mock_sftp.close.assert_called_once()
@patch("paramiko.SSHClient.open_sftp")
def test_is_valid_remote_bteq_script_file_not_exists(self, mock_open_sftp):
mock_sftp = MagicMock()
mock_open_sftp.return_value = mock_sftp
# Raise FileNotFoundError for stat
mock_sftp.stat.side_effect = FileNotFoundError
ssh_client = MagicMock()
ssh_client.open_sftp = mock_open_sftp
result = is_valid_remote_bteq_script_file(ssh_client, "/remote/path/to/file")
assert result is False
mock_sftp.close.assert_called_once()
def test_is_valid_remote_bteq_script_file_none_path(self):
ssh_client = MagicMock()
result = is_valid_remote_bteq_script_file(ssh_client, None)
assert result is False
if __name__ == "__main__":
unittest.main()
| {
"repo_id": "apache/airflow",
"file_path": "providers/teradata/tests/unit/teradata/utils/test_bteq_util.py",
"license": "Apache License 2.0",
"lines": 282,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/teradata/tests/unit/teradata/utils/test_encryption_utils.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import string
import unittest
from unittest.mock import MagicMock, patch
from airflow.providers.teradata.utils.encryption_utils import (
decrypt_remote_file_to_string,
generate_encrypted_file_with_openssl,
generate_random_password,
shell_quote_single,
)
class TestEncryptionUtils:
def test_generate_random_password_length(self):
pwd = generate_random_password(16)
assert len(pwd) == 16
# Check characters are in allowed set
allowed_chars = string.ascii_letters + string.digits + string.punctuation
assert (all(c in allowed_chars for c in pwd)) is True
@patch("subprocess.run")
def test_generate_encrypted_file_with_openssl_calls_subprocess(self, mock_run):
file_path = "/tmp/plain.txt"
password = "testpass"
out_file = "/tmp/encrypted.enc"
generate_encrypted_file_with_openssl(file_path, password, out_file)
mock_run.assert_called_once_with(
[
"openssl",
"enc",
"-aes-256-cbc",
"-salt",
"-pbkdf2",
"-pass",
f"pass:{password}",
"-in",
file_path,
"-out",
out_file,
],
check=True,
)
def test_shell_quote_single_simple(self):
s = "simple"
quoted = shell_quote_single(s)
assert quoted == "'simple'"
def test_shell_quote_single_with_single_quote(self):
s = "O'Reilly"
quoted = shell_quote_single(s)
assert quoted == "'O'\\''Reilly'"
def test_decrypt_remote_file_to_string(self):
password = "mysecret"
remote_enc_file = "/remote/encrypted.enc"
bteq_command_str = "bteq -c UTF-8"
ssh_client = MagicMock()
mock_stdin = MagicMock()
mock_stdout = MagicMock()
mock_stderr = MagicMock()
# Setup mock outputs and exit code
mock_stdout.channel.recv_exit_status.return_value = 0
mock_stdout.read.return_value = b"decrypted output"
mock_stderr.read.return_value = b""
ssh_client.exec_command.return_value = (mock_stdin, mock_stdout, mock_stderr)
exit_status, output, err = decrypt_remote_file_to_string(
ssh_client, remote_enc_file, password, bteq_command_str
)
quoted_password = shell_quote_single(password)
expected_cmd = (
f"openssl enc -d -aes-256-cbc -salt -pbkdf2 -pass pass:{quoted_password} -in {remote_enc_file} | "
+ bteq_command_str
)
ssh_client.exec_command.assert_called_once_with(expected_cmd)
assert exit_status == 0
assert output == "decrypted output"
assert err == ""
if __name__ == "__main__":
unittest.main()
| {
"repo_id": "apache/airflow",
"file_path": "providers/teradata/tests/unit/teradata/utils/test_encryption_utils.py",
"license": "Apache License 2.0",
"lines": 90,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.