sample_id stringlengths 21 196 | text stringlengths 105 936k | metadata dict | category stringclasses 6
values |
|---|---|---|---|
ansible/ansible:test/lib/ansible_test/_internal/processes.py | """Wrappers around `ps` for querying running processes."""
from __future__ import annotations
import collections
import dataclasses
import os
import pathlib
import shlex
from ansible_test._internal.util import raw_command
@dataclasses.dataclass(frozen=True)
class ProcessData:
"""Data about a running process."""
pid: int
ppid: int
command: str
@dataclasses.dataclass(frozen=True)
class Process:
"""A process in the process tree."""
pid: int
command: str
parent: Process | None = None
children: tuple[Process, ...] = dataclasses.field(default_factory=tuple)
@property
def args(self) -> list[str]:
"""The list of arguments that make up `command`."""
return shlex.split(self.command)
@property
def path(self) -> pathlib.Path:
"""The path to the process."""
return pathlib.Path(self.args[0])
def get_process_data(pids: list[int] | None = None) -> list[ProcessData]:
"""Return a list of running processes."""
if pids:
args = ['-p', ','.join(map(str, pids))]
else:
args = ['-A']
lines = raw_command(['ps'] + args + ['-o', 'pid,ppid,command'], capture=True)[0].splitlines()[1:]
processes = [ProcessData(pid=int(pid), ppid=int(ppid), command=command) for pid, ppid, command in (line.split(maxsplit=2) for line in lines)]
return processes
def get_process_tree() -> dict[int, Process]:
"""Return the process tree."""
processes = get_process_data()
pid_to_process: dict[int, Process] = {}
pid_to_children: dict[int, list[Process]] = collections.defaultdict(list)
for data in processes:
pid_to_process[data.pid] = process = Process(pid=data.pid, command=data.command)
if data.ppid:
pid_to_children[data.ppid].append(process)
for data in processes:
pid_to_process[data.pid] = dataclasses.replace(
pid_to_process[data.pid],
parent=pid_to_process.get(data.ppid),
children=tuple(pid_to_children[data.pid]),
)
return pid_to_process
def get_current_process() -> Process:
"""Return the current process along with its ancestors and descendants."""
return get_process_tree()[os.getpid()]
| {
"repo_id": "ansible/ansible",
"file_path": "test/lib/ansible_test/_internal/processes.py",
"license": "GNU General Public License v3.0",
"lines": 57,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
ansible/ansible:test/integration/targets/ansible-doc/library/bogus_facts.py | DOCUMENTATION = """
module: bogus_facts
short_description: bad facts returned
version_added: historical
description: bogus facts for testing
"""
| {
"repo_id": "ansible/ansible",
"file_path": "test/integration/targets/ansible-doc/library/bogus_facts.py",
"license": "GNU General Public License v3.0",
"lines": 6,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
ansible/ansible:lib/ansible/_internal/_ansiballz/_builder.py | from __future__ import annotations
import dataclasses
import json
import typing as t
from ansible.module_utils._internal._ansiballz import _extensions
from ansible.module_utils._internal._ansiballz._extensions import _debugpy, _pydevd, _coverage
from ansible.constants import config
class ExtensionManager:
"""AnsiballZ extension manager."""
def __init__(
self,
pydevd: _pydevd.Options | None = None,
debugpy: _debugpy.Options | None = None,
coverage: _coverage.Options | None = None,
) -> None:
options = dict(
_pydevd=pydevd,
_debugpy=debugpy,
_coverage=coverage,
)
self._pydevd = pydevd
self._debugpy = debugpy
self._coverage = coverage
self._extension_names = tuple(name for name, option in options.items() if option)
self._module_names = tuple(f'{_extensions.__name__}.{name}' for name in self._extension_names)
self.source_mapping: dict[str, str] = {}
@property
def debugger_enabled(self) -> bool:
"""Returns True if the debugger extension is enabled, otherwise False."""
return bool(self._pydevd or self._debugpy)
@property
def extension_names(self) -> tuple[str, ...]:
"""Names of extensions to include in the AnsiballZ payload."""
return self._extension_names
@property
def module_names(self) -> tuple[str, ...]:
"""Python module names of extensions to include in the AnsiballZ payload."""
return self._module_names
def get_extensions(self) -> dict[str, dict[str, object]]:
"""Return the configured extensions and their options."""
extension_options: dict[str, t.Any] = {}
if self._debugpy:
extension_options['_debugpy'] = dataclasses.replace(
self._debugpy,
source_mapping=self._get_source_mapping(self._debugpy.source_mapping),
)
if self._pydevd:
extension_options['_pydevd'] = dataclasses.replace(
self._pydevd,
source_mapping=self._get_source_mapping(self._pydevd.source_mapping),
)
if self._coverage:
extension_options['_coverage'] = self._coverage
extensions = {extension: dataclasses.asdict(options) for extension, options in extension_options.items()}
return extensions
def _get_source_mapping(self, debugger_mapping: dict[str, str]) -> dict[str, str]:
"""Get the source mapping, adjusting the source root as needed."""
if debugger_mapping:
source_mapping = {self._translate_path(key, debugger_mapping): value for key, value in self.source_mapping.items()}
else:
source_mapping = self.source_mapping
return source_mapping
@staticmethod
def _translate_path(path: str, debugger_mapping: dict[str, str]) -> str:
"""Translate a local path to a foreign path."""
for replace, match in debugger_mapping.items():
if path.startswith(match):
return replace + path[len(match) :]
return path
@classmethod
def create(cls, task_vars: dict[str, object]) -> t.Self:
"""Create an instance using the provided task vars."""
return cls(
pydevd=cls._get_options('_ANSIBALLZ_PYDEVD_CONFIG', _pydevd.Options, task_vars),
debugpy=cls._get_options('_ANSIBALLZ_DEBUGPY_CONFIG', _debugpy.Options, task_vars),
coverage=cls._get_options('_ANSIBALLZ_COVERAGE_CONFIG', _coverage.Options, task_vars),
)
@classmethod
def _get_options[T](cls, name: str, config_type: type[T], task_vars: dict[str, object]) -> T | None:
"""Parse configuration from the named environment variable as the specified type, or None if not configured."""
if (value := config.get_config_value(name, variables=task_vars)) is None:
return None
data = json.loads(value) if isinstance(value, str) else value
options = config_type(**data)
return options
| {
"repo_id": "ansible/ansible",
"file_path": "lib/ansible/_internal/_ansiballz/_builder.py",
"license": "GNU General Public License v3.0",
"lines": 85,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
ansible/ansible:lib/ansible/module_utils/_internal/_ansiballz/_extensions/_coverage.py | from __future__ import annotations
import atexit
import dataclasses
import importlib.util
import os
import sys
import typing as t
@dataclasses.dataclass(frozen=True)
class Options:
"""Code coverage options."""
config: str
output: str | None
def run(args: dict[str, t.Any]) -> None: # pragma: nocover
"""Bootstrap `coverage` for the current Ansible module invocation."""
options = Options(**args)
if options.output:
# Enable code coverage analysis of the module.
# This feature is for internal testing and may change without notice.
python_version_string = '.'.join(str(v) for v in sys.version_info[:2])
os.environ['COVERAGE_FILE'] = f'{options.output}=python-{python_version_string}=coverage'
import coverage
cov = coverage.Coverage(config_file=options.config)
def atexit_coverage() -> None:
cov.stop()
cov.save()
atexit.register(atexit_coverage)
cov.start()
else:
# Verify coverage is available without importing it.
# This will detect when a module would fail with coverage enabled with minimal overhead.
if importlib.util.find_spec('coverage') is None:
raise RuntimeError('Could not find the `coverage` Python module.')
| {
"repo_id": "ansible/ansible",
"file_path": "lib/ansible/module_utils/_internal/_ansiballz/_extensions/_coverage.py",
"license": "GNU General Public License v3.0",
"lines": 32,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
ansible/ansible:lib/ansible/module_utils/_internal/_ansiballz/_extensions/_pydevd.py | """
Remote debugging support for AnsiballZ modules.
To use with PyCharm:
1) Choose an available port for PyCharm to listen on (e.g. 5678).
2) Create a Python Debug Server using that port.
3) Start the Python Debug Server.
4) Ensure the correct version of `pydevd-pycharm` is installed for the interpreter(s) which will run the code being debugged.
5) Configure Ansible with the `_ANSIBALLZ_PYDEVD_CONFIG` option.
See `Options` below for the structure of the debugger configuration.
Example configuration using an environment variable:
export _ANSIBLE_ANSIBALLZ_PYDEVD_CONFIG='{"module": "pydevd_pycharm", "settrace": {"host": "localhost", "port": 5678, "suspend": false}}'
6) Set any desired breakpoints.
7) Run Ansible commands.
"""
from __future__ import annotations
import dataclasses
import importlib
import json
import os
import pathlib
import typing as t
@dataclasses.dataclass(frozen=True)
class Options:
"""Debugger options for pydevd and its derivatives."""
module: str = 'pydevd'
"""The Python module which will be imported and which provides the `settrace` method."""
settrace: dict[str, object] = dataclasses.field(default_factory=dict)
"""The options to pass to the `{module}.settrace` method."""
source_mapping: dict[str, str] = dataclasses.field(default_factory=dict)
"""
A mapping of source paths to provide to pydevd.
This setting is used internally by AnsiballZ and is not required unless Ansible CLI commands are run from a different system than your IDE.
In that scenario, use this setting instead of configuring source mapping in your IDE.
The key is a path known to the IDE.
The value is the same path as known to the Ansible CLI.
Both file paths and directories are supported.
"""
def run(args: dict[str, t.Any]) -> None: # pragma: nocover
"""Enable remote debugging."""
options = Options(**args)
temp_dir = pathlib.Path(__file__).parent.parent.parent.parent.parent.parent
path_mapping = [[key, str(temp_dir / value)] for key, value in options.source_mapping.items()]
os.environ['PATHS_FROM_ECLIPSE_TO_PYTHON'] = json.dumps(path_mapping)
debugging_module = importlib.import_module(options.module)
debugging_module.settrace(**options.settrace)
pass # when suspend is True, execution pauses here -- it's also a convenient place to put a breakpoint
| {
"repo_id": "ansible/ansible",
"file_path": "lib/ansible/module_utils/_internal/_ansiballz/_extensions/_pydevd.py",
"license": "GNU General Public License v3.0",
"lines": 46,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | documentation |
ansible/ansible:lib/ansible/module_utils/_internal/_ansiballz/_respawn.py | from __future__ import annotations
import inspect
import sys
from ... import basic
from . import _respawn_wrapper
def create_payload() -> str:
"""Create and return an AnsiballZ payload for respawning a module."""
main = sys.modules['__main__']
code = inspect.getsource(_respawn_wrapper)
args = dict(
module_fqn=main._module_fqn,
modlib_path=main._modlib_path,
profile=basic._ANSIBLE_PROFILE,
json_params=basic._ANSIBLE_ARGS,
)
args_string = '\n'.join(f'{key}={value!r},' for key, value in args.items())
wrapper = f"""{code}
if __name__ == "__main__":
_respawn_main(
{args_string}
)
"""
return wrapper
| {
"repo_id": "ansible/ansible",
"file_path": "lib/ansible/module_utils/_internal/_ansiballz/_respawn.py",
"license": "GNU General Public License v3.0",
"lines": 23,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
ansible/ansible:lib/ansible/module_utils/_internal/_ansiballz/_respawn_wrapper.py | from __future__ import annotations
def _respawn_main(
json_params: bytes,
profile: str,
module_fqn: str,
modlib_path: str,
) -> None:
import sys
sys.path.insert(0, modlib_path)
from ansible.module_utils._internal._ansiballz import _loader
_loader.run_module(
json_params=json_params,
profile=profile,
module_fqn=module_fqn,
modlib_path=modlib_path,
extensions={},
init_globals=dict(_respawned=True),
)
| {
"repo_id": "ansible/ansible",
"file_path": "lib/ansible/module_utils/_internal/_ansiballz/_respawn_wrapper.py",
"license": "GNU General Public License v3.0",
"lines": 18,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
ansible/ansible:lib/ansible/_internal/_templating/_template_vars.py | from __future__ import annotations as _annotations
import datetime as _datetime
import os as _os
import pwd as _pwd
import time as _time
from ansible import constants as _constants
from ansible.module_utils._internal import _datatag
def generate_ansible_template_vars(
path: str,
fullpath: str | None = None,
dest_path: str | None = None,
include_ansible_managed: bool = True,
) -> dict[str, object]:
"""
Generate and return a dictionary with variable metadata about the template specified by `fullpath`.
If `fullpath` is `None`, `path` will be used instead.
"""
# deprecated: description="update the ansible.windows collection to inline this logic instead of calling this internal function" core_version="2.23"
if fullpath is None:
fullpath = _os.path.abspath(path)
template_path = fullpath
template_stat = _os.stat(template_path)
template_uid: int | str
try:
template_uid = _pwd.getpwuid(template_stat.st_uid).pw_name
except KeyError:
template_uid = template_stat.st_uid
temp_vars = dict(
template_host=_os.uname()[1],
template_path=path,
template_mtime=_datetime.datetime.fromtimestamp(template_stat.st_mtime),
template_uid=template_uid,
template_run_date=_datetime.datetime.now(),
template_destpath=dest_path,
template_fullpath=fullpath,
)
if include_ansible_managed: # only inject the config default value if the variable wasn't set
temp_vars['ansible_managed'] = _generate_ansible_managed(template_stat)
return temp_vars
def _generate_ansible_managed(template_stat: _os.stat_result) -> str:
"""Generate and return the `ansible_managed` variable."""
# deprecated: description="remove the `_generate_ansible_managed` function and use a constant instead" core_version="2.23"
from ansible.template import trust_as_template
managed_default = _constants.config.get_config_value('DEFAULT_MANAGED_STR')
managed_str = managed_default.format(
# IMPORTANT: These values must be constant strings to avoid template injection.
# Use Jinja template expressions where variables are needed.
host="{{ template_host }}",
uid="{{ template_uid }}",
file="{{ template_path }}",
)
ansible_managed = _time.strftime(managed_str, _time.localtime(template_stat.st_mtime))
ansible_managed = _datatag.AnsibleTagHelper.tag_copy(managed_default, ansible_managed)
ansible_managed = trust_as_template(ansible_managed)
return ansible_managed
| {
"repo_id": "ansible/ansible",
"file_path": "lib/ansible/_internal/_templating/_template_vars.py",
"license": "GNU General Public License v3.0",
"lines": 55,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
ansible/ansible:test/units/_internal/_yaml/test_dumper.py | from __future__ import annotations
import pytest
from ansible.errors import AnsibleUndefinedVariable
from ansible.parsing.utils.yaml import from_yaml
from ansible.parsing.vault import EncryptedString
from ansible.template import Templar, trust_as_template, is_trusted_as_template
from units.mock.vault_helper import VaultTestHelper
undecryptable_value = EncryptedString(
ciphertext="$ANSIBLE_VAULT;1.1;AES256\n"
"35323961353038346165643738646465376139363061353835303739663538343266303232326635336535366264623635666532313563363065623"
"8316530640a663362363763633436373439663031336634333830373964386564646364336538373763613136383663623330373239613163643633"
"633835616438623261650a6361643765343766613931343266623263623231313739643139616233653833",
)
@pytest.mark.parametrize("filter_name", (
"to_yaml",
"to_nice_yaml",
))
def test_yaml_dump(filter_name: str, _vault_secrets_context: VaultTestHelper) -> None:
"""Verify YAML dumping round-trips only values which are expected to be supported."""
payload = dict(
trusted=trust_as_template('trusted'),
untrusted="untrusted",
decryptable=VaultTestHelper().make_encrypted_string("hi mom"),
undecryptable=undecryptable_value,
)
original = dict(a_list=[payload])
templar = Templar(variables=dict(original=original))
result = templar.template(trust_as_template(f"{{{{ original | {filter_name} }}}}"))
data = from_yaml(trust_as_template(result))
assert len(data) == len(original)
result_item = data['a_list'][0]
original_item = original['a_list'][0]
assert result_item['trusted'] == original_item['trusted']
assert result_item['untrusted'] == original_item['untrusted']
assert result_item['decryptable'] == original_item['decryptable']
assert is_trusted_as_template(result_item['trusted'])
assert is_trusted_as_template(result_item['untrusted']) # round-tripping trust is NOT supported
assert not is_trusted_as_template(result_item['decryptable'])
assert result_item['decryptable']._ciphertext == original_item['decryptable']._ciphertext
assert result_item['undecryptable']._ciphertext == original_item['undecryptable']._ciphertext
assert not is_trusted_as_template(result_item['undecryptable'])
def test_yaml_dump_undefined() -> None:
templar = Templar(variables=dict(dict_with_undefined=dict(undefined_value=trust_as_template("{{ bogus }}"))))
with pytest.raises(AnsibleUndefinedVariable):
templar.template(trust_as_template("{{ dict_with_undefined | to_yaml }}"))
@pytest.mark.parametrize("value, expected", (
((1, 2, 3), "[1, 2, 3]\n"),
({1, 2, 3}, "!!set {1: null, 2: null, 3: null}\n"),
("abc", "abc\n"),
(b"abc", "!!binary |\n YWJj\n"),
))
def test_yaml_dump_iterables(value: object, expected: object) -> None:
result = Templar(variables=dict(value=value)).template(trust_as_template("{{ value | to_yaml }}"))
assert result == expected
| {
"repo_id": "ansible/ansible",
"file_path": "test/units/_internal/_yaml/test_dumper.py",
"license": "GNU General Public License v3.0",
"lines": 54,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
ansible/ansible:lib/ansible/_internal/_ssh/_agent_launch.py | from __future__ import annotations
import atexit
import os
import subprocess
from ansible import constants as C
from ansible._internal._errors import _alarm_timeout
from ansible._internal._ssh._ssh_agent import SshAgentClient
from ansible.cli import display
from ansible.errors import AnsibleError
from ansible.module_utils.common.process import get_bin_path
_SSH_AGENT_STDOUT_READ_TIMEOUT = 5 # seconds
def launch_ssh_agent() -> None:
"""If configured via `SSH_AGENT`, launch an ssh-agent for Ansible's use and/or verify access to an existing one."""
try:
_launch_ssh_agent()
except Exception as ex:
raise AnsibleError("Failed to launch ssh agent.") from ex
def _launch_ssh_agent() -> None:
ssh_agent_cfg = C.config.get_config_value('SSH_AGENT')
match ssh_agent_cfg:
case 'none':
display.debug('SSH_AGENT set to none')
return
case 'auto':
try:
ssh_agent_bin = get_bin_path(C.config.get_config_value('SSH_AGENT_EXECUTABLE'))
except ValueError as e:
raise AnsibleError('SSH_AGENT set to auto, but cannot find ssh-agent binary.') from e
ssh_agent_dir = os.path.join(C.DEFAULT_LOCAL_TMP, 'ssh_agent')
os.mkdir(ssh_agent_dir, 0o700)
sock = os.path.join(ssh_agent_dir, 'agent.sock')
display.vvv('SSH_AGENT: starting...')
try:
p = subprocess.Popen(
[ssh_agent_bin, '-D', '-s', '-a', sock],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True,
)
except OSError as e:
raise AnsibleError('Could not start ssh-agent.') from e
atexit.register(p.terminate)
help_text = f'The ssh-agent {ssh_agent_bin!r} might be an incompatible agent.'
expected_stdout = 'SSH_AUTH_SOCK'
try:
with _alarm_timeout.AnsibleTimeoutError.alarm_timeout(_SSH_AGENT_STDOUT_READ_TIMEOUT):
stdout = p.stdout.read(len(expected_stdout))
except _alarm_timeout.AnsibleTimeoutError as e:
display.error_as_warning(
msg=f'Timed out waiting for expected stdout {expected_stdout!r} from ssh-agent.',
exception=e,
help_text=help_text,
)
else:
if stdout != expected_stdout:
display.warning(
msg=f'The ssh-agent output {stdout!r} did not match expected {expected_stdout!r}.',
help_text=help_text,
)
if p.poll() is not None:
raise AnsibleError(
message='The ssh-agent terminated prematurely.',
help_text=f'{help_text}\n\nReturn Code: {p.returncode}\nStandard Error:\n{p.stderr.read()}',
)
display.vvv(f'SSH_AGENT: ssh-agent[{p.pid}] started and bound to {sock}')
case _:
sock = ssh_agent_cfg
try:
with SshAgentClient(sock) as client:
client.list()
except Exception as e:
raise AnsibleError(f'Could not communicate with ssh-agent using auth sock {sock!r}.') from e
os.environ['SSH_AUTH_SOCK'] = os.environ['ANSIBLE_SSH_AGENT'] = sock
| {
"repo_id": "ansible/ansible",
"file_path": "lib/ansible/_internal/_ssh/_agent_launch.py",
"license": "GNU General Public License v3.0",
"lines": 74,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
ansible/ansible:test/units/module_utils/common/warnings/test_error_as_warning.py | from __future__ import annotations
import pytest
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.common import warnings
from ansible.module_utils.common.warnings import error_as_warning
from ansible.module_utils.testing import patch_module_args
pytestmark = pytest.mark.usefixtures("as_target", "module_env_mocker")
def test_error_as_warning() -> None:
try:
raise Exception('hello')
except Exception as ex:
error_as_warning('Warning message', ex)
assert warnings.get_warning_messages() == ('Warning message: hello',)
assert len(warnings.get_warnings()) == 1
def test_error_as_warning_via_module() -> None:
with patch_module_args():
am = AnsibleModule(argument_spec={})
try:
raise Exception('hello')
except Exception as ex:
am.error_as_warning('Warning message', ex)
assert warnings.get_warning_messages() == ('Warning message: hello',)
assert len(warnings.get_warnings()) == 1
| {
"repo_id": "ansible/ansible",
"file_path": "test/units/module_utils/common/warnings/test_error_as_warning.py",
"license": "GNU General Public License v3.0",
"lines": 23,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
ansible/ansible:lib/ansible/_internal/_errors/_alarm_timeout.py | from __future__ import annotations
import contextlib
import signal
import types
import typing as _t
from ansible.module_utils import datatag
class AnsibleTimeoutError(BaseException):
"""A general purpose timeout."""
_MAX_TIMEOUT = 100_000_000
"""
The maximum supported timeout value.
This value comes from BSD's alarm limit, which is due to that function using setitimer.
"""
def __init__(self, timeout: int) -> None:
self.timeout = timeout
super().__init__(f"Timed out after {timeout} second(s).")
@classmethod
@contextlib.contextmanager
def alarm_timeout(cls, timeout: int | None) -> _t.Iterator[None]:
"""
Context for running code under an optional timeout.
Raises an instance of this class if the timeout occurs.
New usages of this timeout mechanism are discouraged.
"""
if timeout is not None:
if not isinstance(timeout, int):
raise TypeError(f"Timeout requires 'int' argument, not {datatag.native_type_name(timeout)!r}.")
if timeout < 0 or timeout > cls._MAX_TIMEOUT:
# On BSD based systems, alarm is implemented using setitimer.
# If out-of-bounds values are passed to alarm, they will return -1, which would be interpreted as an existing timer being set.
# To avoid that, bounds checking is performed in advance.
raise ValueError(f'Timeout {timeout} is invalid, it must be between 0 and {cls._MAX_TIMEOUT}.')
if not timeout:
yield # execute the context manager's body
return # no timeout to deal with, exit immediately
def on_alarm(_signal: int, _frame: types.FrameType) -> None:
raise cls(timeout)
if signal.signal(signal.SIGALRM, on_alarm):
raise RuntimeError("An existing alarm handler was present.")
try:
try:
if signal.alarm(timeout):
raise RuntimeError("An existing alarm was set.")
yield # execute the context manager's body
finally:
# Disable the alarm.
# If the alarm fires inside this finally block, the alarm is still disabled.
# This guarantees the cleanup code in the outer finally block runs without risk of encountering the `TaskTimeoutError` from the alarm.
signal.alarm(0)
finally:
signal.signal(signal.SIGALRM, signal.SIG_DFL)
| {
"repo_id": "ansible/ansible",
"file_path": "lib/ansible/_internal/_errors/_alarm_timeout.py",
"license": "GNU General Public License v3.0",
"lines": 51,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
ansible/ansible:lib/ansible/_internal/_errors/_task_timeout.py | from __future__ import annotations
from collections import abc as _c
from ansible._internal._errors._alarm_timeout import AnsibleTimeoutError
from ansible._internal._errors._error_utils import ContributesToTaskResult
from ansible.module_utils.datatag import deprecate_value
class TaskTimeoutError(AnsibleTimeoutError, ContributesToTaskResult):
"""
A task-specific timeout.
This exception provides a result dictionary via the ContributesToTaskResult mixin.
"""
@property
def result_contribution(self) -> _c.Mapping[str, object]:
help_text = "Configure `DISPLAY_TRACEBACK` to see a traceback on timeout errors."
frame = deprecate_value(
value=help_text,
msg="The `timedout.frame` task result key is deprecated.",
help_text=help_text,
version="2.23",
)
return dict(timedout=dict(frame=frame, period=self.timeout))
| {
"repo_id": "ansible/ansible",
"file_path": "lib/ansible/_internal/_errors/_task_timeout.py",
"license": "GNU General Public License v3.0",
"lines": 20,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
ansible/ansible:test/units/_internal/_errors/test_alarm_timeout.py | from __future__ import annotations
import contextlib
import signal
import time
import typing as t
import pytest
from ansible._internal._errors import _alarm_timeout
from ansible._internal._errors._alarm_timeout import AnsibleTimeoutError
pytestmark = pytest.mark.usefixtures("assert_sigalrm_state")
@pytest.fixture
def assert_sigalrm_state() -> t.Iterator[None]:
"""Fixture to ensure that SIGALRM state is as-expected before and after each test."""
assert signal.alarm(0) == 0 # disable alarm before resetting the default handler
assert signal.signal(signal.SIGALRM, signal.SIG_DFL) == signal.SIG_DFL
try:
yield
finally:
assert signal.alarm(0) == 0
assert signal.signal(signal.SIGALRM, signal.SIG_DFL) == signal.SIG_DFL
@pytest.mark.parametrize("timeout", (0, 1, None))
def test_alarm_timeout_success(timeout: int | None) -> None:
"""Validate a non-timeout success scenario."""
ran = False
with _alarm_timeout.AnsibleTimeoutError.alarm_timeout(timeout):
time.sleep(0.01)
ran = True
assert ran
def test_alarm_timeout_timeout() -> None:
"""Validate a happy-path timeout scenario."""
ran = False
timeout_sec = 1
with pytest.raises(AnsibleTimeoutError) as error:
with _alarm_timeout.AnsibleTimeoutError.alarm_timeout(timeout_sec):
time.sleep(timeout_sec + 1)
ran = True # pragma: nocover
assert not ran
assert error.value.timeout == timeout_sec
@pytest.mark.parametrize("timeout,expected_error_type,expected_error_pattern", (
(-1, ValueError, "Timeout.*invalid.*between"),
(100_000_001, ValueError, "Timeout.*invalid.*between"),
(0.1, TypeError, "requires 'int' argument.*'float'"),
("1", TypeError, "requires 'int' argument.*'str'"),
))
def test_alarm_timeout_bad_values(timeout: t.Any, expected_error_type: type[Exception], expected_error_pattern: str) -> None:
"""Validate behavior for invalid inputs."""
ran = False
with pytest.raises(expected_error_type, match=expected_error_pattern):
with _alarm_timeout.AnsibleTimeoutError.alarm_timeout(timeout):
ran = True # pragma: nocover
assert not ran
def test_alarm_timeout_bad_state() -> None:
"""Validate alarm state error handling."""
def call_it():
ran = False
with pytest.raises(RuntimeError, match="existing alarm"):
with _alarm_timeout.AnsibleTimeoutError.alarm_timeout(1):
ran = True # pragma: nocover
assert not ran
try:
# non-default SIGALRM handler present
signal.signal(signal.SIGALRM, lambda _s, _f: None)
call_it()
finally:
signal.signal(signal.SIGALRM, signal.SIG_DFL)
try:
# alarm already set
signal.alarm(10000)
call_it()
finally:
signal.signal(signal.SIGALRM, signal.SIG_DFL)
ran_outer = ran_inner = False
# nested alarm_timeouts
with pytest.raises(RuntimeError, match="existing alarm"):
with _alarm_timeout.AnsibleTimeoutError.alarm_timeout(1):
ran_outer = True
with _alarm_timeout.AnsibleTimeoutError.alarm_timeout(1):
ran_inner = True # pragma: nocover
assert not ran_inner
assert ran_outer
def test_alarm_timeout_raise():
"""Ensure that an exception raised in the wrapped scope propagates correctly."""
with pytest.raises(NotImplementedError):
with _alarm_timeout.AnsibleTimeoutError.alarm_timeout(1):
raise NotImplementedError()
def test_alarm_timeout_escape_broad_exception():
"""Ensure that the timeout exception can escape a broad exception handler in the wrapped scope."""
with pytest.raises(AnsibleTimeoutError):
with _alarm_timeout.AnsibleTimeoutError.alarm_timeout(1):
with contextlib.suppress(Exception):
time.sleep(3)
| {
"repo_id": "ansible/ansible",
"file_path": "test/units/_internal/_errors/test_alarm_timeout.py",
"license": "GNU General Public License v3.0",
"lines": 90,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
ansible/ansible:test/units/_internal/_errors/test_error_utils.py | from __future__ import annotations
import collections.abc as c
import typing as t
import pytest
from ansible._internal._errors import _error_utils
from ansible.module_utils._internal import _messages
from units.mock.error_helper import raise_exceptions
class _TestContributesError(Exception, _error_utils.ContributesToTaskResult):
@property
def result_contribution(self) -> c.Mapping[str, object]:
return dict(some_flag=True)
class _TestContributesUnreachable(Exception, _error_utils.ContributesToTaskResult):
@property
def omit_failed_key(self) -> bool:
return True
@property
def result_contribution(self) -> c.Mapping[str, object]:
return dict(unreachable=True)
class _TestContributesMsg(Exception, _error_utils.ContributesToTaskResult):
@property
def result_contribution(self) -> c.Mapping[str, object]:
return dict(msg="contributed msg")
@pytest.mark.parametrize("exceptions,expected", (
(
(Exception("e0"), _TestContributesError("e1"), ValueError("e2")),
dict(failed=True, some_flag=True, msg="e0: e1: e2"),
),
(
(Exception("e0"), ValueError("e1"), _TestContributesError("e2")),
dict(failed=True, some_flag=True, msg="e0: e1: e2"),
),
(
(Exception("e0"), _TestContributesUnreachable("e1")),
dict(unreachable=True, msg="e0: e1"),
),
(
(Exception("e0"), _TestContributesMsg()),
dict(failed=True, msg="contributed msg"),
),
))
def test_exception_result_contribution(exceptions: t.Sequence[BaseException], expected: dict[str, t.Any]) -> None:
"""Validate result dict augmentation by exceptions conforming to the ContributeToTaskResult protocol."""
with pytest.raises(Exception) as error:
raise_exceptions(exceptions)
result = _error_utils.result_dict_from_exception(error.value, accept_result_contribution=True)
summary = result.pop('exception')
assert isinstance(summary, _messages.ErrorSummary)
assert result == expected
| {
"repo_id": "ansible/ansible",
"file_path": "test/units/_internal/_errors/test_error_utils.py",
"license": "GNU General Public License v3.0",
"lines": 48,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
ansible/ansible:test/units/_internal/_errors/test_task_timeout.py | from __future__ import annotations
from ansible._internal._errors._task_timeout import TaskTimeoutError
from ansible.module_utils._internal._datatag._tags import Deprecated
def test_task_timeout_result_contribution() -> None:
"""Validate the result contribution shape."""
try:
raise TaskTimeoutError(99)
except TaskTimeoutError as tte:
contrib = tte.result_contribution
assert isinstance(contrib, dict)
timedout = contrib.get('timedout')
assert isinstance(timedout, dict)
frame = timedout.get('frame')
assert isinstance(frame, str)
assert Deprecated.is_tagged_on(frame)
period = timedout.get('period')
assert period == 99
| {
"repo_id": "ansible/ansible",
"file_path": "test/units/_internal/_errors/test_task_timeout.py",
"license": "GNU General Public License v3.0",
"lines": 17,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
ansible/ansible:test/units/mock/error_helper.py | from __future__ import annotations
import collections.abc as c
def raise_exceptions(exceptions: c.Sequence[BaseException]) -> None:
"""
Raise a chain of exceptions from the given exception list.
Exceptions will be raised starting from the end of the list.
"""
if len(exceptions) > 1:
try:
raise_exceptions(exceptions[1:])
except Exception as ex:
raise exceptions[0] from ex
raise exceptions[0]
| {
"repo_id": "ansible/ansible",
"file_path": "test/units/mock/error_helper.py",
"license": "GNU General Public License v3.0",
"lines": 13,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
ansible/ansible:test/units/utils/test_errors.py | from __future__ import annotations
import pytest
from ansible import errors
from units.test_utils.controller.display import emits_warnings
@pytest.mark.parametrize("name", (
"AnsibleFilterTypeError",
"_AnsibleActionDone",
))
def test_deprecated(name: str) -> None:
with emits_warnings(deprecation_pattern='is deprecated'):
getattr(errors, name)
def test_deprecated_attribute_error() -> None:
with pytest.raises(AttributeError):
getattr(errors, 'bogus')
| {
"repo_id": "ansible/ansible",
"file_path": "test/units/utils/test_errors.py",
"license": "GNU General Public License v3.0",
"lines": 14,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
ansible/ansible:test/integration/targets/display-newline/library/noisy.py | from __future__ import annotations
from ansible.module_utils.basic import AnsibleModule
def main() -> None:
m = AnsibleModule({})
m.warn("Hello\r\nNew\rAnsible\nWorld")
m.exit_json()
if __name__ == '__main__':
main()
| {
"repo_id": "ansible/ansible",
"file_path": "test/integration/targets/display-newline/library/noisy.py",
"license": "GNU General Public License v3.0",
"lines": 8,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
ansible/ansible:lib/ansible/_internal/_errors/_error_factory.py | from __future__ import annotations as _annotations
from ansible.module_utils._internal import _errors, _messages
class ControllerEventFactory(_errors.EventFactory):
"""Factory for creating `Event` instances from `BaseException` instances on the controller."""
def _get_msg(self, exception: BaseException) -> str | None:
from ansible.errors import AnsibleError
if not isinstance(exception, AnsibleError):
return super()._get_msg(exception)
return exception._original_message.strip()
def _get_formatted_source_context(self, exception: BaseException) -> str | None:
from ansible.errors import AnsibleError
if not isinstance(exception, AnsibleError):
return super()._get_formatted_source_context(exception)
return exception._formatted_source_context
def _get_help_text(self, exception: BaseException) -> str | None:
from ansible.errors import AnsibleError
if not isinstance(exception, AnsibleError):
return super()._get_help_text(exception)
return exception._help_text
def _get_chain(self, exception: BaseException) -> _messages.EventChain | None:
from ansible._internal._errors import _captured # avoid circular import due to AnsibleError import
if isinstance(exception, _captured.AnsibleCapturedError):
# a captured error provides its own cause event, it never has a normal __cause__
return _messages.EventChain(
msg_reason=_errors.MSG_REASON_DIRECT_CAUSE,
traceback_reason=f'The above {exception.context} exception was the direct cause of the following controller exception:',
event=exception._event,
)
return super()._get_chain(exception)
def _follow_cause(self, exception: BaseException) -> bool:
from ansible.errors import AnsibleError
return not isinstance(exception, AnsibleError) or exception._include_cause_message
def _get_cause(self, exception: BaseException) -> BaseException | None:
# deprecated: description='remove support for orig_exc (deprecated in 2.23)' core_version='2.27'
cause = super()._get_cause(exception)
from ansible.errors import AnsibleError
if not isinstance(exception, AnsibleError):
return cause
try:
from ansible.utils.display import _display
except Exception: # pylint: disable=broad-except # if config is broken, this can raise things other than ImportError
_display = None
if cause:
if exception.orig_exc and exception.orig_exc is not cause and _display:
_display.warning(
msg=f"The `orig_exc` argument to `{type(exception).__name__}` was given, but differed from the cause given by `raise ... from`.",
)
return cause
if exception.orig_exc:
if _display:
# encourage the use of `raise ... from` before deprecating `orig_exc`
_display.warning(
msg=f"The `orig_exc` argument to `{type(exception).__name__}` was given without using `raise ... from orig_exc`.",
)
return exception.orig_exc
return None
def _get_events(self, exception: BaseException) -> tuple[_messages.Event, ...] | None:
if isinstance(exception, BaseExceptionGroup):
return tuple(self._convert_exception(ex) for ex in exception.exceptions)
return None
| {
"repo_id": "ansible/ansible",
"file_path": "lib/ansible/_internal/_errors/_error_factory.py",
"license": "GNU General Public License v3.0",
"lines": 60,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
ansible/ansible:lib/ansible/_internal/_event_formatting.py | from __future__ import annotations as _annotations
import collections.abc as _c
import textwrap as _textwrap
from ansible.module_utils._internal import _event_utils, _messages
def format_event(event: _messages.Event, include_traceback: bool) -> str:
"""Format an event into a verbose message and traceback."""
msg = format_event_verbose_message(event)
if include_traceback:
msg += '\n' + format_event_traceback(event)
msg = msg.strip()
if '\n' in msg:
msg += '\n\n'
else:
msg += '\n'
return msg
def format_event_traceback(event: _messages.Event) -> str:
"""Format an event into a traceback."""
segments: list[str] = []
while event:
segment = event.formatted_traceback or '(traceback missing)\n'
if event.events:
child_tracebacks = [format_event_traceback(child) for child in event.events]
segment += _format_event_children("Sub-Traceback", child_tracebacks)
segments.append(segment)
if event.chain:
segments.append(f'\n{event.chain.traceback_reason}\n\n')
event = event.chain.event
else:
event = None
return ''.join(reversed(segments))
def format_event_verbose_message(event: _messages.Event) -> str:
"""
Format an event into a verbose message.
Help text, contextual information and sub-events will be included.
"""
segments: list[str] = []
original_event = event
while event:
messages = [event.msg]
chain: _messages.EventChain = event.chain
while chain and chain.follow:
if chain.event.events:
break # do not collapse a chained event with sub-events, since they would be lost
if chain.event.formatted_source_context or chain.event.help_text:
if chain.event.formatted_source_context != event.formatted_source_context or chain.event.help_text != event.help_text:
break # do not collapse a chained event with different details, since they would be lost
if chain.event.chain and chain.msg_reason != chain.event.chain.msg_reason:
break # do not collapse a chained event which has a chain with a different msg_reason
messages.append(chain.event.msg)
chain = chain.event.chain
msg = _event_utils.deduplicate_message_parts(messages)
segment = '\n'.join(_get_message_lines(msg, event.help_text, event.formatted_source_context)) + '\n'
if event.events:
child_msgs = [format_event_verbose_message(child) for child in event.events]
segment += _format_event_children("Sub-Event", child_msgs)
segments.append(segment)
if chain and chain.follow:
segments.append(f'\n{chain.msg_reason}\n\n')
event = chain.event
else:
event = None
if len(segments) > 1:
segments.insert(0, _event_utils.format_event_brief_message(original_event) + '\n\n')
return ''.join(segments)
def _format_event_children(label: str, children: _c.Iterable[str]) -> str:
"""Format the given list of child messages into a single string."""
items = list(children)
count = len(items)
lines = ['\n']
for idx, item in enumerate(items):
lines.append(f'+--[ {label} {idx + 1} of {count} ]---\n')
lines.append(_textwrap.indent(f"\n{item}\n", "| ", lambda value: True))
lines.append(f'+--[ End {label} ]---\n')
return ''.join(lines)
def _get_message_lines(message: str, help_text: str | None, formatted_source_context: str | None) -> list[str]:
"""Return a list of message lines constructed from the given message, help text and formatted source context."""
if help_text and not formatted_source_context and '\n' not in message and '\n' not in help_text:
return [f'{message} {help_text}'] # prefer a single-line message with help text when there is no source context
message_lines = [message]
if formatted_source_context:
message_lines.append(formatted_source_context)
if help_text:
message_lines.append('')
message_lines.append(help_text)
return message_lines
| {
"repo_id": "ansible/ansible",
"file_path": "lib/ansible/_internal/_event_formatting.py",
"license": "GNU General Public License v3.0",
"lines": 85,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
ansible/ansible:lib/ansible/module_utils/_internal/_event_utils.py | from __future__ import annotations as _annotations
import typing as _t
from ansible.module_utils._internal import _text_utils, _messages
def deduplicate_message_parts(message_parts: list[str]) -> str:
"""Format the given list of messages into a brief message, while deduplicating repeated elements."""
message_parts = list(reversed(message_parts))
message = message_parts.pop(0)
for message_part in message_parts:
# avoid duplicate messages where the cause was already concatenated to the exception message
if message_part.endswith(message):
message = message_part
else:
message = _text_utils.concat_message(message_part, message)
return message
def format_event_brief_message(event: _messages.Event) -> str:
"""
Format an event into a brief message.
Help text, contextual information and sub-events will be omitted.
"""
message_parts: list[str] = []
while True:
message_parts.append(event.msg)
if not event.chain or not event.chain.follow:
break
event = event.chain.event
return deduplicate_message_parts(message_parts)
def deprecation_as_dict(deprecation: _messages.DeprecationSummary) -> _t.Dict[str, _t.Any]:
"""Returns a dictionary representation of the deprecation object in the format exposed to playbooks."""
from ansible.module_utils._internal._deprecator import INDETERMINATE_DEPRECATOR # circular import from messages
if deprecation.deprecator and deprecation.deprecator != INDETERMINATE_DEPRECATOR:
collection_name = '.'.join(deprecation.deprecator.resolved_name.split('.')[:2])
else:
collection_name = None
result = dict(
msg=format_event_brief_message(deprecation.event),
collection_name=collection_name,
)
if deprecation.date:
result.update(date=deprecation.date)
else:
result.update(version=deprecation.version)
return result
| {
"repo_id": "ansible/ansible",
"file_path": "lib/ansible/module_utils/_internal/_event_utils.py",
"license": "GNU General Public License v3.0",
"lines": 42,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
ansible/ansible:lib/ansible/module_utils/_internal/_stack.py | from __future__ import annotations as _annotations
import inspect as _inspect
import typing as _t
def caller_frame() -> _inspect.FrameInfo | None:
"""Return the caller stack frame, skipping any marked with the `_skip_stackwalk` local."""
_skip_stackwalk = True
return next(iter_stack(), None)
def iter_stack() -> _t.Generator[_inspect.FrameInfo]:
"""Iterate over stack frames, skipping any marked with the `_skip_stackwalk` local."""
_skip_stackwalk = True
for frame_info in _inspect.stack():
if '_skip_stackwalk' in frame_info.frame.f_locals:
continue
yield frame_info
| {
"repo_id": "ansible/ansible",
"file_path": "lib/ansible/module_utils/_internal/_stack.py",
"license": "GNU General Public License v3.0",
"lines": 14,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
ansible/ansible:test/integration/targets/gather_facts-errors/library/fail1.py | from __future__ import annotations
from ansible.module_utils.basic import AnsibleModule
def main() -> None:
module = AnsibleModule({})
module.fail_json("the fail1 module went bang")
if __name__ == '__main__':
main()
| {
"repo_id": "ansible/ansible",
"file_path": "test/integration/targets/gather_facts-errors/library/fail1.py",
"license": "GNU General Public License v3.0",
"lines": 7,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
ansible/ansible:test/integration/targets/gather_facts-errors/library/fail2.py | from __future__ import annotations
from ansible.module_utils.basic import AnsibleModule
def main() -> None:
module = AnsibleModule({})
module.fail_json("the fail2 module went bang")
if __name__ == '__main__':
main()
| {
"repo_id": "ansible/ansible",
"file_path": "test/integration/targets/gather_facts-errors/library/fail2.py",
"license": "GNU General Public License v3.0",
"lines": 7,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
ansible/ansible:test/integration/targets/gather_facts-errors/library/success1.py | from __future__ import annotations
from ansible.module_utils.basic import AnsibleModule
def main() -> None:
module = AnsibleModule({})
module.exit_json()
if __name__ == '__main__':
main()
| {
"repo_id": "ansible/ansible",
"file_path": "test/integration/targets/gather_facts-errors/library/success1.py",
"license": "GNU General Public License v3.0",
"lines": 7,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
ansible/ansible:test/units/_internal/test_event_formatting.py | from __future__ import annotations
import traceback
from ansible._internal._errors import _error_factory
from ansible._internal._event_formatting import format_event_traceback
from units.mock.error_helper import raise_exceptions
import pytest
def test_traceback_formatting() -> None:
"""Verify our traceback formatting mimics the Python traceback formatting."""
with pytest.raises(Exception) as error:
raise_exceptions((
Exception('a'),
Exception('b'),
Exception('c'),
Exception('d'),
))
event = _error_factory.ControllerEventFactory.from_exception(error.value, True)
ansible_tb = format_event_traceback(event)
python_tb = ''.join(traceback.format_exception(error.value))
assert ansible_tb == python_tb
| {
"repo_id": "ansible/ansible",
"file_path": "test/units/_internal/test_event_formatting.py",
"license": "GNU General Public License v3.0",
"lines": 19,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
ansible/ansible:test/units/plugins/test/test_all.py | from __future__ import annotations
import collections
import dataclasses
import math
import pathlib
import tempfile
import typing as t
import pytest
from ansible.parsing.vault import EncryptedString
from ansible.plugins.loader import test_loader
from ansible.plugins.test import AnsibleJinja2Test
from ansible.template import Templar, trust_as_template
from units.test_utils.controller.display import emits_warnings
@dataclasses.dataclass
class Extra:
variables: dict[str, t.Any] | None = None
args: list[t.Any] | None = None
kwargs: dict[str, t.Any] | None = None
func: t.Callable[[Extra], None] | None = None
class MakeLink:
_tempdir: tempfile.TemporaryDirectory[str] | None = None
def __call__(self, *args, **kwargs) -> str:
self._tempdir = tempfile.TemporaryDirectory()
symlink = pathlib.Path(self._tempdir.name) / 'a_symlink'
symlink.symlink_to('something')
return str(symlink)
def __del__(self) -> None:
if self._tempdir:
self._tempdir.cleanup()
def __repr__(self) -> str:
return 'MakeLink'
TEST_DATA_SET: tuple[tuple[t.Any, str, bool, Extra | None], ...] = (
# core
(dict(failed=1), 'failed', True, None),
(dict(failed=0), 'failed', False, None),
(dict(), 'failed', False, None),
(dict(failed=1), 'success', False, None),
(dict(failed=0), 'success', True, None),
(dict(), 'success', True, None),
(dict(unreachable=1), 'reachable', False, None),
(dict(unreachable=0), 'reachable', True, None),
(dict(), 'reachable', True, None),
(dict(unreachable=0), 'unreachable', False, None),
(dict(unreachable=1), 'unreachable', True, None),
(dict(), 'unreachable', False, None),
(dict(timedout=dict(period=99)), 'timedout', True, None),
# (dict(timedout=1), 'timedout', False, None), # oops, bug
(dict(timedout=0), 'timedout', False, None),
(dict(), 'timedout', False, None),
(dict(changed=1), 'changed', True, None),
(dict(changed=0), 'changed', False, None),
(dict(), 'changed', False, None),
# (dict(results=[]), 'changed', True, None), # oops, bug
(dict(results=[dict(changed=1)]), 'changed', True, None),
(dict(results=[dict(changed=0)]), 'changed', False, None),
(dict(), 'changed', False, None),
(dict(skipped=1), 'skipped', True, None),
(dict(skipped=0), 'skipped', False, None),
(dict(), 'skipped', False, None),
(dict(finished=1), 'finished', True, None),
(dict(finished=0), 'finished', False, None),
(dict(), 'finished', True, None),
(dict(started=1), 'started', True, None),
(dict(started=0), 'started', False, None),
(dict(), 'started', True, None),
('"foo"', 'match', True, Extra(args=['"foo"'])),
('"foo"', 'match', False, Extra(args=['"bar"'])),
('"xxfooxx"', 'search', True, Extra(args=['"foo"'])),
('"xxfooxx"', 'search', False, Extra(args=['"bar"'])),
('"fooxx"', 'regex', True, Extra(args=['"FOO"'], kwargs=dict(ignorecase=True, multiline=True, match_type='"match"'))),
('"fooxx"', 'regex', False, Extra(args=['"BAR"'], kwargs=dict(ignorecase=True, multiline=True, match_type='"match"'))),
('1.1', 'version_compare', True, Extra(args=['1.1', '"eq"'])),
('1.1', 'version_compare', False, Extra(args=['1.0', '"eq"'])),
([0], 'any', False, None),
([1], 'any', True, None),
([0], 'all', False, None),
([1], 'all', True, None),
(1, 'truthy', True, None),
(0, 'truthy', False, None),
(1, 'falsy', False, None),
(0, 'falsy', True, None),
('foo', 'vault_encrypted', True, Extra(variables=dict(foo=EncryptedString(ciphertext='$ANSIBLE_VAULT;1.1;BLAH')))),
('foo', 'vault_encrypted', False, Extra(variables=dict(foo='not_encrypted'))),
(repr(str(pathlib.Path(__file__).parent / "dummy_vault.txt")), 'vaulted_file', True, None),
(repr(__file__), 'vaulted_file', False, None),
('q', 'defined', True, None),
('not_defined', 'defined', False, None),
('q', 'undefined', False, None),
('not_defined', 'undefined', True, None),
# files
('"/"', 'directory', True, None),
(repr(__file__), 'directory', False, None),
(repr(__file__), 'file', True, None),
('"/"', 'file', False, None),
('make_link()', 'link', True, Extra(variables=dict(make_link=MakeLink()))),
('"/"', 'link', False, None),
('"/"', 'exists', True, None),
('"/does_not_exist"', 'exists', False, None),
('"/"', 'link_exists', True, None),
('"/does_not_exist"', 'link_exists', False, None),
('"/absolute"', 'abs', True, None),
('"relative"', 'abs', False, None),
('"/"', 'same_file', True, Extra(args=['"/"'])),
(repr(__file__), 'same_file', False, Extra(args=['"/"'])),
('"/"', 'mount', True, None),
('"/not_a_mount_point"', 'mount', False, None),
# mathstuff
([1], 'subset', True, Extra(args=[[1]])),
([0], 'subset', False, Extra(args=[[1]])),
([1], 'superset', True, Extra(args=[[1]])),
([0], 'superset', False, Extra(args=[[1]])),
([0], 'contains', True, Extra(args=[0])),
([1], 'contains', False, Extra(args=[0])),
('nan', 'nan', True, Extra(variables=dict(nan=math.nan))),
('"a string"', 'nan', False, None),
# uri
('"https://ansible.com/"', 'uri', True, None),
(1, 'uri', False, None),
('"https://ansible.com/"', 'url', True, None),
(1, 'url', False, None),
('"urn:https://ansible.com/"', 'urn', True, None),
(1, 'urn', False, None),
)
@pytest.mark.parametrize("value,test,expected,extra", TEST_DATA_SET, ids=str)
def test_truthy_inputs(value: object, test: str, expected: bool, extra: Extra | None) -> None:
"""Ensure test plugins return the expected bool result, not just a truthy/falsey value."""
test_invocation = test
if extra:
test_args = extra.args or []
test_args.extend(f'{k}={v}' for k, v in (extra.kwargs or {}).items())
test_invocation += '(' + ', '.join(str(arg) for arg in test_args) + ')'
expression = f'{value} is {test_invocation}'
with emits_warnings(deprecation_pattern=[]):
result = Templar(variables=extra.variables if extra else None).evaluate_expression(trust_as_template(expression))
assert result is expected
def test_ensure_all_plugins_tested() -> None:
"""Ensure all plugins have at least one entry in the test data set, accounting for functions which have multiple names."""
test_plugins: list[AnsibleJinja2Test] = [plugin for plugin in test_loader.all() if plugin.ansible_name.startswith('ansible.builtin.')]
plugin_aliases: dict[t.Any, set[str]] = collections.defaultdict(set)
for test_plugin in test_plugins:
plugin_aliases[test_plugin.j2_function].add(test_plugin.ansible_name)
missing_entries: list[str] = []
for plugin_names in plugin_aliases.values():
matching_tests = {_expected for _value, test, _expected, _extra in TEST_DATA_SET if f'ansible.builtin.{test}' in plugin_names}
missing = {True, False} - matching_tests
if missing: # pragma: nocover
missing_entries.append(f'{plugin_names}: {missing}')
assert not missing_entries
| {
"repo_id": "ansible/ansible",
"file_path": "test/units/plugins/test/test_all.py",
"license": "GNU General Public License v3.0",
"lines": 148,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
ansible/ansible:lib/ansible/_internal/_testing.py | """
Testing utilities for use in integration tests, not unit tests or non-test code.
Provides better error behavior than Python's `assert` statement.
"""
from __future__ import annotations
import contextlib
import typing as t
class _Checker:
@staticmethod
def check(value: object, msg: str | None = 'Value is not truthy.') -> None:
"""Raise an `AssertionError` if the given `value` is not truthy."""
if not value:
raise AssertionError(msg)
@contextlib.contextmanager
def hard_fail_context(msg: str) -> t.Generator[_Checker]:
"""Enter a context which converts all exceptions to `BaseException` and provides a `Checker` instance for making assertions."""
try:
yield _Checker()
except BaseException as ex:
raise BaseException(f"Hard failure: {msg}") from ex
| {
"repo_id": "ansible/ansible",
"file_path": "lib/ansible/_internal/_testing.py",
"license": "GNU General Public License v3.0",
"lines": 20,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
ansible/ansible:test/integration/targets/inventory/doc_fragments/fragment_with_expression.py | from __future__ import annotations
class ModuleDocFragment:
DOCUMENTATION = """
options:
fragment_expression:
description: a fragment hosted expression that must be trusted whose default resolves to 4
default: 2 + 2
"""
| {
"repo_id": "ansible/ansible",
"file_path": "test/integration/targets/inventory/doc_fragments/fragment_with_expression.py",
"license": "GNU General Public License v3.0",
"lines": 8,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
ansible/ansible:lib/ansible/module_utils/_internal/_deprecator.py | from __future__ import annotations
import re
import pathlib
import sys
import typing as t
from ansible.module_utils._internal import _stack, _messages, _validation, _plugin_info
def deprecator_from_collection_name(collection_name: str | None) -> _messages.PluginInfo | None:
"""Returns an instance with the special `collection` type to refer to a non-plugin or ambiguous caller within a collection."""
# CAUTION: This function is exposed in public API as ansible.module_utils.datatag.deprecator_from_collection_name.
if not collection_name:
return None
_validation.validate_collection_name(collection_name)
return _messages.PluginInfo(
resolved_name=collection_name,
type=None,
)
def get_best_deprecator(*, deprecator: _messages.PluginInfo | None = None, collection_name: str | None = None) -> _messages.PluginInfo:
"""Return the best-available `PluginInfo` for the caller of this method."""
_skip_stackwalk = True
if deprecator and collection_name:
raise ValueError('Specify only one of `deprecator` or `collection_name`.')
return deprecator or deprecator_from_collection_name(collection_name) or get_caller_plugin_info() or INDETERMINATE_DEPRECATOR
def get_caller_plugin_info() -> _messages.PluginInfo | None:
"""Try to get `PluginInfo` for the caller of this method, ignoring marked infrastructure stack frames."""
_skip_stackwalk = True
if frame_info := _stack.caller_frame():
return _path_as_plugininfo(frame_info.filename)
return None # pragma: nocover
def _path_as_plugininfo(path: str) -> _messages.PluginInfo | None:
"""Return a `PluginInfo` instance if the provided `path` refers to a plugin."""
return _path_as_core_plugininfo(path) or _path_as_collection_plugininfo(path)
def _path_as_core_plugininfo(path: str) -> _messages.PluginInfo | None:
"""Return a `PluginInfo` instance if the provided `path` refers to a core plugin."""
try:
relpath = str(pathlib.Path(path).relative_to(_ANSIBLE_MODULE_BASE_PATH))
except ValueError:
return None # not ansible-core
namespace = 'ansible.builtin'
if match := re.match(r'plugins/(?P<plugin_type>\w+)/(?P<plugin_name>\w+)', relpath):
plugin_name = match.group("plugin_name")
plugin_type = _plugin_info.normalize_plugin_type(match.group("plugin_type"))
if plugin_type not in _DEPRECATOR_PLUGIN_TYPES:
# The plugin type isn't a known deprecator type, so we have to assume the caller is intermediate code.
# We have no way of knowing if the intermediate code is deprecating its own feature, or acting on behalf of another plugin.
# Callers in this case need to identify the deprecating plugin name, otherwise only ansible-core will be reported.
# Reporting ansible-core is never wrong, it just may be missing an additional detail (plugin name) in the "on behalf of" case.
return ANSIBLE_CORE_DEPRECATOR
if plugin_name == '__init__':
# The plugin type is known, but the caller isn't a specific plugin -- instead, it's core plugin infrastructure (the base class).
return _messages.PluginInfo(resolved_name=namespace, type=plugin_type)
elif match := re.match(r'modules/(?P<module_name>\w+)', relpath):
# AnsiballZ Python package for core modules
plugin_name = match.group("module_name")
plugin_type = _messages.PluginType.MODULE
elif match := re.match(r'legacy/(?P<module_name>\w+)', relpath):
# AnsiballZ Python package for non-core library/role modules
namespace = 'ansible.legacy'
plugin_name = match.group("module_name")
plugin_type = _messages.PluginType.MODULE
else:
return ANSIBLE_CORE_DEPRECATOR # non-plugin core path, safe to use ansible-core for the same reason as the non-deprecator plugin type case above
name = f'{namespace}.{plugin_name}'
return _messages.PluginInfo(resolved_name=name, type=plugin_type)
def _path_as_collection_plugininfo(path: str) -> _messages.PluginInfo | None:
"""Return a `PluginInfo` instance if the provided `path` refers to a collection plugin."""
if not (match := re.search(r'/ansible_collections/(?P<ns>\w+)/(?P<coll>\w+)/plugins/(?P<plugin_type>\w+)/(?P<plugin_name>\w+)', path)):
return None
plugin_type = _plugin_info.normalize_plugin_type(match.group('plugin_type'))
if plugin_type in _AMBIGUOUS_DEPRECATOR_PLUGIN_TYPES:
# We're able to detect the namespace, collection and plugin type -- but we have no way to identify the plugin name currently.
# To keep things simple we'll fall back to just identifying the namespace and collection.
# In the future we could improve the detection and/or make it easier for a caller to identify the plugin name.
return deprecator_from_collection_name('.'.join((match.group('ns'), match.group('coll'))))
if plugin_type not in _DEPRECATOR_PLUGIN_TYPES:
# The plugin type isn't a known deprecator type, so we have to assume the caller is intermediate code.
# We have no way of knowing if the intermediate code is deprecating its own feature, or acting on behalf of another plugin.
# Callers in this case need to identify the deprecator to avoid ambiguity, since it could be the same collection or another collection.
return INDETERMINATE_DEPRECATOR
name = '.'.join((match.group('ns'), match.group('coll'), match.group('plugin_name')))
# DTFIX-FUTURE: deprecations from __init__ will be incorrectly attributed to a plugin of that name
return _messages.PluginInfo(resolved_name=name, type=plugin_type)
_ANSIBLE_MODULE_BASE_PATH: t.Final = pathlib.Path(sys.modules['ansible'].__file__).parent
"""Runtime-detected base path of the `ansible` Python package to distinguish between Ansible-owned and external code."""
ANSIBLE_CORE_DEPRECATOR: t.Final = deprecator_from_collection_name('ansible.builtin')
"""Singleton `PluginInfo` instance for ansible-core callers where the plugin can/should not be identified in messages."""
INDETERMINATE_DEPRECATOR: t.Final = _messages.PluginInfo(resolved_name=None, type=None)
"""Singleton `PluginInfo` instance for indeterminate deprecator."""
_DEPRECATOR_PLUGIN_TYPES: t.Final = frozenset(
{
_messages.PluginType.ACTION,
_messages.PluginType.BECOME,
_messages.PluginType.CACHE,
_messages.PluginType.CALLBACK,
_messages.PluginType.CLICONF,
_messages.PluginType.CONNECTION,
# DOC_FRAGMENTS - no code execution
# FILTER - basename inadequate to identify plugin
_messages.PluginType.HTTPAPI,
_messages.PluginType.INVENTORY,
_messages.PluginType.LOOKUP,
_messages.PluginType.MODULE, # only for collections
_messages.PluginType.NETCONF,
_messages.PluginType.SHELL,
_messages.PluginType.STRATEGY,
_messages.PluginType.TERMINAL,
# TEST - basename inadequate to identify plugin
_messages.PluginType.VARS,
}
)
"""Plugin types which are valid for identifying a deprecator for deprecation purposes."""
_AMBIGUOUS_DEPRECATOR_PLUGIN_TYPES: t.Final = frozenset(
{
_messages.PluginType.FILTER,
_messages.PluginType.TEST,
}
)
"""Plugin types for which basename cannot be used to identify the plugin name."""
| {
"repo_id": "ansible/ansible",
"file_path": "lib/ansible/module_utils/_internal/_deprecator.py",
"license": "GNU General Public License v3.0",
"lines": 117,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
ansible/ansible:lib/ansible/module_utils/_internal/_plugin_info.py | from __future__ import annotations
import typing as t
from . import _messages
class HasPluginInfo(t.Protocol):
"""Protocol to type-annotate and expose PluginLoader-set values."""
@property
def ansible_name(self) -> str | None:
"""Fully resolved plugin name."""
@property
def plugin_type(self) -> str:
"""Plugin type name."""
def get_plugin_info(value: HasPluginInfo) -> _messages.PluginInfo:
"""Utility method that returns a `PluginInfo` from an object implementing the `HasPluginInfo` protocol."""
return _messages.PluginInfo(
resolved_name=value.ansible_name,
type=normalize_plugin_type(value.plugin_type),
)
def normalize_plugin_type(value: str) -> _messages.PluginType | None:
"""Normalize value and return it as a PluginType, or None if the value does match any known plugin type."""
value = value.lower()
if value == 'modules':
value = 'module'
try:
return _messages.PluginType(value)
except ValueError:
return None
| {
"repo_id": "ansible/ansible",
"file_path": "lib/ansible/module_utils/_internal/_plugin_info.py",
"license": "GNU General Public License v3.0",
"lines": 26,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
ansible/ansible:lib/ansible/module_utils/_internal/_validation.py | from __future__ import annotations
import keyword
def validate_collection_name(collection_name: object, name: str = 'collection_name') -> None:
"""Validate a collection name."""
if not isinstance(collection_name, str):
raise TypeError(f"{name} must be {str} instead of {type(collection_name)}")
parts = collection_name.split('.')
if len(parts) != 2 or not all(part.isidentifier() and not keyword.iskeyword(part) for part in parts):
raise ValueError(f"{name} must consist of two non-keyword identifiers separated by '.'")
| {
"repo_id": "ansible/ansible",
"file_path": "lib/ansible/module_utils/_internal/_validation.py",
"license": "GNU General Public License v3.0",
"lines": 9,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
ansible/ansible:test/integration/targets/ansible-test-sanity-pylint/ansible_collections/ns/col/plugins/action/do_deprecated_stuff.py | from __future__ import annotations
from ansible.module_utils.datatag import deprecator_from_collection_name
from ansible.plugins.action import ActionBase
from ansible.utils.display import _display
# extra lines below to allow for adding more imports without shifting the line numbers of the code that follows
#
#
#
#
#
class ActionModule(ActionBase):
def run(self, tmp=None, task_vars=None):
result = super(ActionModule, self).run(tmp, task_vars)
deprecator = deprecator_from_collection_name('ns.col')
# ansible-deprecated-version - only ansible-core can encounter this
_display.deprecated(msg='ansible-deprecated-no-version')
# ansible-invalid-deprecated-version - only ansible-core can encounter this
_display.deprecated(msg='collection-deprecated-version', version='1.0.0')
_display.deprecated(msg='collection-invalid-deprecated-version', version='not-a-version')
# ansible-deprecated-no-collection-name - only a module_utils can encounter this
_display.deprecated(msg='wrong-collection-deprecated', collection_name='ns.wrong', version='3.0.0')
_display.deprecated(msg='ansible-expired-deprecated-date', date='2000-01-01')
_display.deprecated(msg='ansible-invalid-deprecated-date', date='not-a-date')
_display.deprecated(msg='ansible-deprecated-both-version-and-date', version='3.0.0', date='2099-01-01')
_display.deprecated(msg='removal-version-must-be-major', version='3.1.0')
# ansible-deprecated-date-not-permitted - only ansible-core can encounter this
_display.deprecated(msg='ansible-deprecated-unnecessary-collection-name', deprecator=deprecator, version='3.0.0')
# ansible-deprecated-collection-name-not-permitted - only ansible-core can encounter this
_display.deprecated(msg='ansible-deprecated-both-collection-name-and-deprecator', collection_name='ns.col', deprecator=deprecator, version='3.0.0')
return result
| {
"repo_id": "ansible/ansible",
"file_path": "test/integration/targets/ansible-test-sanity-pylint/ansible_collections/ns/col/plugins/action/do_deprecated_stuff.py",
"license": "GNU General Public License v3.0",
"lines": 30,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
ansible/ansible:test/integration/targets/ansible-test-sanity-pylint/ansible_collections/ns/col/plugins/module_utils/deprecated_utils.py | from __future__ import annotations
from ansible.module_utils.datatag import deprecator_from_collection_name
from ansible.module_utils.common.warnings import deprecate
# extra lines below to allow for adding more imports without shifting the line numbers of the code that follows
#
#
#
#
#
#
#
#
def do_stuff() -> None:
deprecator = deprecator_from_collection_name('ns.col')
# ansible-deprecated-version - only ansible-core can encounter this
deprecate(msg='ansible-deprecated-no-version', collection_name='ns.col')
# ansible-invalid-deprecated-version - only ansible-core can encounter this
deprecate(msg='collection-deprecated-version', collection_name='ns.col', version='1.0.0')
deprecate(msg='collection-invalid-deprecated-version', collection_name='ns.col', version='not-a-version')
# ansible-deprecated-no-collection-name - module_utils cannot encounter this
deprecate(msg='wrong-collection-deprecated', collection_name='ns.wrong', version='3.0.0')
deprecate(msg='ansible-expired-deprecated-date', collection_name='ns.col', date='2000-01-01')
deprecate(msg='ansible-invalid-deprecated-date', collection_name='ns.col', date='not-a-date')
deprecate(msg='ansible-deprecated-both-version-and-date', collection_name='ns.col', version='3.0.0', date='2099-01-01')
deprecate(msg='removal-version-must-be-major', collection_name='ns.col', version='3.1.0')
# ansible-deprecated-date-not-permitted - only ansible-core can encounter this
# ansible-deprecated-unnecessary-collection-name - module_utils cannot encounter this
# ansible-deprecated-collection-name-not-permitted - only ansible-core can encounter this
deprecate(msg='ansible-deprecated-both-collection-name-and-deprecator', collection_name='ns.col', deprecator=deprecator, version='3.0.0')
| {
"repo_id": "ansible/ansible",
"file_path": "test/integration/targets/ansible-test-sanity-pylint/ansible_collections/ns/col/plugins/module_utils/deprecated_utils.py",
"license": "GNU General Public License v3.0",
"lines": 29,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
ansible/ansible:test/integration/targets/ansible-test-sanity-pylint/deprecated_thing.py | """
This file is not used by the integration test, but serves a related purpose.
It triggers sanity test failures that can only occur for ansible-core, which need to be ignored to ensure the pylint plugin is functioning properly.
"""
from __future__ import annotations
from ansible.module_utils.datatag import deprecator_from_collection_name
from ansible.module_utils.common.warnings import deprecate
def do_stuff() -> None:
deprecator = deprecator_from_collection_name('ansible.builtin')
deprecate(msg='ansible-deprecated-version', version='2.18')
deprecate(msg='ansible-deprecated-no-version')
deprecate(msg='ansible-invalid-deprecated-version', version='not-a-version')
# collection-deprecated-version - ansible-core cannot encounter this
# collection-invalid-deprecated-version - ansible-core cannot encounter this
# ansible-deprecated-no-collection-name - ansible-core cannot encounter this
# wrong-collection-deprecated - ansible-core cannot encounter this
# ansible-expired-deprecated-date - ansible-core cannot encounter this
# ansible-invalid-deprecated-date - ansible-core cannot encounter this
# ansible-deprecated-both-version-and-date - ansible-core cannot encounter this
# removal-version-must-be-major - ansible-core cannot encounter this
deprecate(msg='ansible-deprecated-date-not-permitted', date='2099-01-01')
deprecate(msg='ansible-deprecated-unnecessary-collection-name', deprecator=deprecator, version='2.99')
deprecate(msg='ansible-deprecated-collection-name-not-permitted', collection_name='ansible.builtin', version='2.99')
# ansible-deprecated-both-collection-name-and-deprecator - ansible-core cannot encounter this
| {
"repo_id": "ansible/ansible",
"file_path": "test/integration/targets/ansible-test-sanity-pylint/deprecated_thing.py",
"license": "GNU General Public License v3.0",
"lines": 24,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
ansible/ansible:test/integration/targets/deprecations/collections/ansible_collections/foo/bar/plugins/action/noisy_action.py | from __future__ import annotations
from ansible.plugins.action import ActionBase
from ansible.module_utils.common.warnings import deprecate
from ..module_utils.shared_deprecation import get_deprecation_kwargs, get_deprecated_value
class ActionModule(ActionBase):
def run(self, tmp=None, task_vars=None):
result = super().run(tmp, task_vars)
for deprecate_kw in get_deprecation_kwargs():
deprecate(**deprecate_kw) # pylint: disable=ansible-deprecated-no-version
result.update(deprecated_result=get_deprecated_value())
return result
| {
"repo_id": "ansible/ansible",
"file_path": "test/integration/targets/deprecations/collections/ansible_collections/foo/bar/plugins/action/noisy_action.py",
"license": "GNU General Public License v3.0",
"lines": 11,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
ansible/ansible:test/integration/targets/deprecations/collections/ansible_collections/foo/bar/plugins/module_utils/shared_deprecation.py | from __future__ import annotations
from ansible.module_utils.datatag import deprecate_value, deprecator_from_collection_name
def get_deprecation_kwargs() -> list[dict[str, object]]:
return [
dict(msg="Deprecation that passes collection_name, version, and help_text.", version='9999.9', collection_name='bla.bla', help_text="Help text."),
dict(
msg="Deprecation that passes deprecator and datetime.date.",
date='2034-01-02',
deprecator=deprecator_from_collection_name('bla.bla'),
),
dict(msg="Deprecation that passes deprecator and string date.", date='2034-01-02', deprecator=deprecator_from_collection_name('bla.bla')),
dict(msg="Deprecation that passes no deprecator, collection name, or date/version."),
]
def get_deprecated_value() -> str:
return deprecate_value( # pylint: disable=ansible-deprecated-unnecessary-collection-name,ansible-deprecated-collection-name-not-permitted
value='a deprecated value',
msg="value is deprecated",
collection_name='foo.bar',
version='9999.9',
)
| {
"repo_id": "ansible/ansible",
"file_path": "test/integration/targets/deprecations/collections/ansible_collections/foo/bar/plugins/module_utils/shared_deprecation.py",
"license": "GNU General Public License v3.0",
"lines": 20,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
ansible/ansible:test/integration/targets/deprecations/collections/ansible_collections/foo/bar/plugins/modules/noisy.py | from __future__ import annotations
from ansible.module_utils.basic import AnsibleModule
from ..module_utils.shared_deprecation import get_deprecation_kwargs, get_deprecated_value
def main() -> None:
m = AnsibleModule({})
m.warn("This is a warning.")
for deprecate_kw in get_deprecation_kwargs():
m.deprecate(**deprecate_kw) # pylint: disable=ansible-deprecated-no-version
m.exit_json(deprecated_result=get_deprecated_value())
if __name__ == '__main__':
main()
| {
"repo_id": "ansible/ansible",
"file_path": "test/integration/targets/deprecations/collections/ansible_collections/foo/bar/plugins/modules/noisy.py",
"license": "GNU General Public License v3.0",
"lines": 11,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
ansible/ansible:test/lib/ansible_test/_util/controller/sanity/pylint/plugins/deprecated_calls.py | """Ansible-specific pylint plugin for checking deprecation calls."""
# (c) 2018, Matt Martz <matt@sivel.net>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import dataclasses
import datetime
import functools
import pathlib
import re
import astroid.bases
import astroid.exceptions
import astroid.nodes
import astroid.typing
import astroid.util
import pylint.lint
import pylint.checkers
import pylint.checkers.utils
import ansible.release
from ansible.module_utils._internal._deprecator import INDETERMINATE_DEPRECATOR, _path_as_collection_plugininfo
from ansible.module_utils.compat.version import StrictVersion
from ansible.utils.version import SemanticVersion
@dataclasses.dataclass(frozen=True, kw_only=True)
class DeprecationCallArgs:
"""Arguments passed to a deprecation function."""
msg: object = None
version: object = None
date: object = None
collection_name: object = None
deprecator: object = None
help_text: object = None # only on Display.deprecated, warnings.deprecate and deprecate_value
obj: object = None # only on Display.deprecated and warnings.deprecate
removed: object = None # only on Display.deprecated
value: object = None # only on deprecate_value
def all_args_dynamic(self) -> bool:
"""True if all args are dynamic or None, otherwise False."""
return all(arg is None or isinstance(arg, astroid.nodes.NodeNG) for arg in dataclasses.asdict(self).values())
class AnsibleDeprecatedChecker(pylint.checkers.BaseChecker):
"""Checks for deprecated calls to ensure proper usage."""
name = 'deprecated-calls'
msgs = {
'E9501': (
"Deprecated version %r found in call to %r",
"ansible-deprecated-version",
None,
),
'E9502': (
"Found %r call without a version or date",
"ansible-deprecated-no-version",
None,
),
'E9503': (
"Invalid deprecated version %r found in call to %r",
"ansible-invalid-deprecated-version",
None,
),
'E9504': (
"Deprecated version %r found in call to %r",
"collection-deprecated-version",
None,
),
'E9505': (
"Invalid deprecated version %r found in call to %r",
"collection-invalid-deprecated-version",
None,
),
'E9506': (
"No collection_name or deprecator found in call to %r",
"ansible-deprecated-no-collection-name",
None,
),
'E9507': (
"Wrong collection_name %r found in call to %r",
"wrong-collection-deprecated",
None,
),
'E9508': (
"Expired date %r found in call to %r",
"ansible-expired-deprecated-date",
None,
),
'E9509': (
"Invalid date %r found in call to %r",
"ansible-invalid-deprecated-date",
None,
),
'E9510': (
"Both version and date found in call to %r",
"ansible-deprecated-both-version-and-date",
None,
),
'E9511': (
"Removal version %r must be a major release, not a minor or patch release, see https://semver.org/",
"removal-version-must-be-major",
None,
),
'E9512': (
"Passing date is not permitted in call to %r for ansible-core, use a version instead",
"ansible-deprecated-date-not-permitted",
None,
),
'E9513': (
"Unnecessary %r found in call to %r",
"ansible-deprecated-unnecessary-collection-name",
None,
),
'E9514': (
"Passing collection_name not permitted in call to %r for ansible-core, use deprecator instead",
"ansible-deprecated-collection-name-not-permitted",
None,
),
'E9515': (
"Both collection_name and deprecator found in call to %r",
"ansible-deprecated-both-collection-name-and-deprecator",
None,
),
}
options = (
(
'collection-name',
dict(
default=None,
type='string',
metavar='<name>',
help="The name of the collection to check.",
),
),
(
'collection-version',
dict(
default=None,
type='string',
metavar='<version>',
help="The version of the collection to check.",
),
),
(
'collection-path',
dict(
default=None,
type='string',
metavar='<path>',
help="The path of the collection to check.",
),
),
)
ANSIBLE_VERSION = StrictVersion(re.match('[0-9.]*[0-9]', ansible.release.__version__)[0])
"""The current ansible-core X.Y.Z version."""
DEPRECATION_MODULE_FUNCTIONS: dict[tuple[str, str], tuple[str, ...]] = {
('ansible.module_utils.common.warnings', 'deprecate'): ('msg', 'version', 'date', 'collection_name'),
('ansible.module_utils.datatag', 'deprecate_value'): ('value', 'msg'),
('ansible.module_utils.basic', 'AnsibleModule.deprecate'): ('msg', 'version', 'date', 'collection_name'),
('ansible.utils.display', 'Display.deprecated'): ('msg', 'version', 'removed', 'date', 'collection_name'),
}
"""Mapping of deprecation module+function and their positional arguments."""
DEPRECATION_MODULES = frozenset(key[0] for key in DEPRECATION_MODULE_FUNCTIONS)
"""Modules which contain deprecation functions."""
DEPRECATION_FUNCTIONS = {'.'.join(key): value for key, value in DEPRECATION_MODULE_FUNCTIONS.items()}
"""Mapping of deprecation functions and their positional arguments."""
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
self.module_cache: dict[str, astroid.nodes.Module] = {}
@functools.cached_property
def collection_name(self) -> str | None:
"""Return the collection name, or None if ansible-core is being tested."""
return self.linter.config.collection_name or None
@functools.cached_property
def collection_path(self) -> pathlib.Path:
"""Return the collection path. Not valid when ansible-core is being tested."""
return pathlib.Path(self.linter.config.collection_path)
@functools.cached_property
def collection_version(self) -> SemanticVersion | None:
"""Return the collection version, or None if ansible-core is being tested."""
if not self.linter.config.collection_version:
return None
sem_ver = SemanticVersion(self.linter.config.collection_version)
sem_ver.prerelease = () # ignore pre-release for version comparison to catch issues before the final release is cut
return sem_ver
@functools.cached_property
def is_ansible_core(self) -> bool:
"""True if ansible-core is being tested."""
return not self.collection_name
@functools.cached_property
def today_utc(self) -> datetime.date:
"""Today's date in UTC."""
return datetime.datetime.now(tz=datetime.timezone.utc).date()
def is_deprecator_required(self) -> bool | None:
"""Determine is a `collection_name` or `deprecator` is required (True), unnecessary (False) or optional (None)."""
if self.is_ansible_core:
return False # in ansible-core, never provide the deprecator -- if it really is needed, disable the sanity test inline for that line of code
plugin_info = _path_as_collection_plugininfo(self.linter.current_file)
if plugin_info is INDETERMINATE_DEPRECATOR:
return True # deprecator cannot be detected, caller must provide deprecator
# deprecated: description='deprecate collection_name/deprecator now that detection is widely available' core_version='2.23'
# When this deprecation triggers, change the return type here to False.
# At that point, callers should be able to omit the collection_name/deprecator in all but a few cases (inline ignores can be used for those cases)
return None
@pylint.checkers.utils.only_required_for_messages(*(msgs.keys()))
def visit_call(self, node: astroid.nodes.Call) -> None:
"""Visit a call node."""
if inferred := self.infer(node.func):
name = self.get_fully_qualified_name(inferred)
if args := self.DEPRECATION_FUNCTIONS.get(name):
self.check_call(node, name, args)
def infer(self, node: astroid.nodes.NodeNG) -> astroid.nodes.NodeNG | None:
"""Return the inferred node from the given node, or `None` if it cannot be unambiguously inferred."""
names: list[str] = []
target: astroid.nodes.NodeNG | None = node
inferred: astroid.typing.InferenceResult | None = None
while target:
if inferred := astroid.util.safe_infer(target):
break
if isinstance(target, astroid.nodes.Call):
inferred = self.infer(target.func)
break
if isinstance(target, astroid.nodes.FunctionDef):
inferred = target
break
if isinstance(target, astroid.nodes.Name):
target = self.infer_name(target)
elif isinstance(target, astroid.nodes.AssignName) and isinstance(target.parent, astroid.nodes.Assign):
target = target.parent.value
elif isinstance(target, astroid.nodes.Attribute):
names.append(target.attrname)
target = target.expr
else:
break
for name in reversed(names):
if isinstance(inferred, astroid.bases.Instance):
try:
attr = next(iter(inferred.getattr(name)), None)
except astroid.exceptions.AttributeInferenceError:
break
if isinstance(attr, astroid.nodes.AssignAttr):
inferred = self.get_ansible_module(attr)
continue
if isinstance(attr, astroid.nodes.FunctionDef):
inferred = attr
continue
if not isinstance(inferred, (astroid.nodes.Module, astroid.nodes.ClassDef)):
inferred = None
break
try:
inferred = inferred[name]
except KeyError:
inferred = None
else:
inferred = self.infer(inferred)
if isinstance(inferred, astroid.nodes.FunctionDef) and isinstance(inferred.parent, astroid.nodes.ClassDef):
inferred = astroid.bases.BoundMethod(inferred, inferred.parent)
return inferred
def infer_name(self, node: astroid.nodes.Name) -> astroid.nodes.NodeNG | None:
"""Infer the node referenced by the given name, or `None` if it cannot be unambiguously inferred."""
scope = node.scope()
inferred: astroid.nodes.NodeNG | None = None
name = node.name
while scope:
try:
assignment = scope[name]
except KeyError:
scope = scope.parent.scope() if scope.parent else None
continue
if isinstance(assignment, astroid.nodes.AssignName) and isinstance(assignment.parent, astroid.nodes.Assign):
inferred = assignment.parent.value
elif (
isinstance(scope, astroid.nodes.FunctionDef)
and isinstance(assignment, astroid.nodes.AssignName)
and isinstance(assignment.parent, astroid.nodes.Arguments)
and assignment.parent.annotations
):
idx, _node = assignment.parent.find_argname(name)
if idx is not None:
try:
annotation = assignment.parent.annotations[idx]
except IndexError:
pass
else:
if isinstance(annotation, astroid.nodes.Name):
name = annotation.name
continue
elif isinstance(assignment, astroid.nodes.ClassDef):
inferred = assignment
elif isinstance(assignment, astroid.nodes.ImportFrom):
if module := self.get_module(assignment):
name = assignment.real_name(name)
scope = module.scope()
continue
break
return inferred
def get_module(self, node: astroid.nodes.ImportFrom) -> astroid.nodes.Module | None:
"""Import the requested module if possible and cache the result."""
module_name = pylint.checkers.utils.get_import_name(node, node.modname)
if module_name not in self.DEPRECATION_MODULES:
return None # avoid unnecessary import overhead
if module := self.module_cache.get(module_name):
return module
module = node.do_import_module()
if module.name != module_name:
raise RuntimeError(f'Attempted to import {module_name!r} but found {module.name!r} instead.')
self.module_cache[module_name] = module
return module
@staticmethod
def get_fully_qualified_name(node: astroid.nodes.NodeNG) -> str | None:
"""Return the fully qualified name of the given inferred node."""
parent = node.parent
parts: tuple[str, ...] | None
if isinstance(node, astroid.nodes.FunctionDef) and isinstance(parent, astroid.nodes.Module):
parts = (parent.name, node.name)
elif isinstance(node, astroid.bases.BoundMethod) and isinstance(parent, astroid.nodes.ClassDef) and isinstance(parent.parent, astroid.nodes.Module):
parts = (parent.parent.name, parent.name, node.name)
else:
parts = None
return '.'.join(parts) if parts else None
def check_call(self, node: astroid.nodes.Call, name: str, args: tuple[str, ...]) -> None:
"""Check the given deprecation call node for valid arguments."""
call_args = self.get_deprecation_call_args(node, args)
self.check_collection_name(node, name, call_args)
if not call_args.version and not call_args.date:
self.add_message('ansible-deprecated-no-version', node=node, args=(name,))
return
if call_args.date and self.is_ansible_core:
self.add_message('ansible-deprecated-date-not-permitted', node=node, args=(name,))
return
if call_args.all_args_dynamic():
# assume collection maintainers know what they're doing if all args are dynamic
return
if call_args.version and call_args.date:
self.add_message('ansible-deprecated-both-version-and-date', node=node, args=(name,))
return
if call_args.date:
self.check_date(node, name, call_args)
if call_args.version:
self.check_version(node, name, call_args)
@staticmethod
def get_deprecation_call_args(node: astroid.nodes.Call, args: tuple[str, ...]) -> DeprecationCallArgs:
"""Get the deprecation call arguments from the given node."""
fields: dict[str, object] = {}
for idx, arg in enumerate(node.args):
field = args[idx]
fields[field] = arg
for keyword in node.keywords:
if keyword.arg is not None:
fields[keyword.arg] = keyword.value
for key, value in fields.items():
if isinstance(value, astroid.nodes.Const):
fields[key] = value.value
return DeprecationCallArgs(**fields)
def check_collection_name(self, node: astroid.nodes.Call, name: str, args: DeprecationCallArgs) -> None:
"""Check the collection name provided to the given call node."""
deprecator_requirement = self.is_deprecator_required()
if self.is_ansible_core and args.collection_name:
self.add_message('ansible-deprecated-collection-name-not-permitted', node=node, args=(name,))
return
if args.collection_name and args.deprecator:
self.add_message('ansible-deprecated-both-collection-name-and-deprecator', node=node, args=(name,))
if deprecator_requirement is True:
if not args.collection_name and not args.deprecator:
self.add_message('ansible-deprecated-no-collection-name', node=node, args=(name,))
return
elif deprecator_requirement is False:
if args.collection_name:
self.add_message('ansible-deprecated-unnecessary-collection-name', node=node, args=('collection_name', name,))
return
if args.deprecator:
self.add_message('ansible-deprecated-unnecessary-collection-name', node=node, args=('deprecator', name,))
return
else:
# collection_name may be needed for backward compat with 2.18 and earlier, since it is only detected in 2.19 and later
if args.deprecator:
# Unlike collection_name, which is needed for backward compat, deprecator is generally not needed by collections.
# For the very rare cases where this is needed by collections, an inline pylint ignore can be used to silence it.
self.add_message('ansible-deprecated-unnecessary-collection-name', node=node, args=('deprecator', name,))
return
if args.all_args_dynamic():
# assume collection maintainers know what they're doing if all args are dynamic
return
expected_collection_name = 'ansible.builtin' if self.is_ansible_core else self.collection_name
if args.collection_name and args.collection_name != expected_collection_name:
self.add_message('wrong-collection-deprecated', node=node, args=(args.collection_name, name))
def check_version(self, node: astroid.nodes.Call, name: str, args: DeprecationCallArgs) -> None:
"""Check the version provided to the given call node."""
if self.collection_name:
self.check_collection_version(node, name, args)
else:
self.check_core_version(node, name, args)
def check_core_version(self, node: astroid.nodes.Call, name: str, args: DeprecationCallArgs) -> None:
"""Check the core version provided to the given call node."""
try:
if not isinstance(args.version, str) or not args.version:
raise ValueError()
strict_version = StrictVersion(args.version)
except ValueError:
self.add_message('ansible-invalid-deprecated-version', node=node, args=(args.version, name))
return
if self.ANSIBLE_VERSION >= strict_version:
self.add_message('ansible-deprecated-version', node=node, args=(args.version, name))
def check_collection_version(self, node: astroid.nodes.Call, name: str, args: DeprecationCallArgs) -> None:
"""Check the collection version provided to the given call node."""
try:
if not isinstance(args.version, str) or not args.version:
raise ValueError()
semantic_version = SemanticVersion(args.version)
except ValueError:
self.add_message('collection-invalid-deprecated-version', node=node, args=(args.version, name))
return
if self.collection_version >= semantic_version:
self.add_message('collection-deprecated-version', node=node, args=(args.version, name))
if semantic_version.major != 0 and (semantic_version.minor != 0 or semantic_version.patch != 0):
self.add_message('removal-version-must-be-major', node=node, args=(args.version,))
def check_date(self, node: astroid.nodes.Call, name: str, args: DeprecationCallArgs) -> None:
"""Check the date provided to the given call node."""
try:
date_parsed = self.parse_isodate(args.date)
except (ValueError, TypeError):
self.add_message('ansible-invalid-deprecated-date', node=node, args=(args.date, name))
else:
if date_parsed < self.today_utc:
self.add_message('ansible-expired-deprecated-date', node=node, args=(args.date, name))
@staticmethod
def parse_isodate(value: object) -> datetime.date:
"""Parse an ISO 8601 date string."""
if isinstance(value, str):
return datetime.date.fromisoformat(value)
raise TypeError(type(value))
def get_ansible_module(self, node: astroid.nodes.AssignAttr) -> astroid.bases.Instance | None:
"""Infer an AnsibleModule instance node from the given assignment."""
if isinstance(node.parent, astroid.nodes.Assign) and isinstance(node.parent.type_annotation, astroid.nodes.Name):
inferred = self.infer_name(node.parent.type_annotation)
elif (isinstance(node.parent, astroid.nodes.Assign) and isinstance(node.parent.parent, astroid.nodes.FunctionDef) and
isinstance(node.parent.value, astroid.nodes.Name)):
inferred = self.infer_name(node.parent.value)
elif isinstance(node.parent, astroid.nodes.AnnAssign) and isinstance(node.parent.annotation, astroid.nodes.Name):
inferred = self.infer_name(node.parent.annotation)
else:
inferred = None
if isinstance(inferred, astroid.nodes.ClassDef) and inferred.name == 'AnsibleModule':
return inferred.instantiate_class()
return None
def register(self) -> None:
"""Register this plugin."""
self.linter.register_checker(self)
def register(linter: pylint.lint.PyLinter) -> None:
"""Required method to auto-register this checker."""
AnsibleDeprecatedChecker(linter).register()
| {
"repo_id": "ansible/ansible",
"file_path": "test/lib/ansible_test/_util/controller/sanity/pylint/plugins/deprecated_calls.py",
"license": "GNU General Public License v3.0",
"lines": 443,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
ansible/ansible:test/lib/ansible_test/_util/controller/sanity/pylint/plugins/deprecated_comment.py | """Ansible-specific pylint plugin for checking deprecation comments."""
# (c) 2018, Matt Martz <matt@sivel.net>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import shlex
import tokenize
import pylint.checkers
import pylint.lint
import ansible.release
from ansible.module_utils.compat.version import LooseVersion
class AnsibleDeprecatedCommentChecker(pylint.checkers.BaseTokenChecker):
"""Checks for ``# deprecated:`` comments to ensure that the ``version`` has not passed or met the time for removal."""
name = 'deprecated-comment'
msgs = {
'E9601': (
"Deprecated core version (%r) found: %s",
"ansible-deprecated-version-comment",
None,
),
'E9602': (
"Deprecated comment contains invalid keys %r",
"ansible-deprecated-version-comment-invalid-key",
None,
),
'E9603': (
"Deprecated comment missing version",
"ansible-deprecated-version-comment-missing-version",
None,
),
'E9604': (
"Deprecated python version (%r) found: %s",
"ansible-deprecated-python-version-comment",
None,
),
'E9605': (
"Deprecated comment contains invalid version %r: %s",
"ansible-deprecated-version-comment-invalid-version",
None,
),
}
ANSIBLE_VERSION = LooseVersion('.'.join(ansible.release.__version__.split('.')[:3]))
"""The current ansible-core X.Y.Z version."""
def process_tokens(self, tokens: list[tokenize.TokenInfo]) -> None:
for token in tokens:
if token.type == tokenize.COMMENT:
self._process_comment(token)
def _deprecated_string_to_dict(self, token: tokenize.TokenInfo, string: str) -> dict[str, str]:
valid_keys = {'description', 'core_version', 'python_version'}
data = dict.fromkeys(valid_keys)
for opt in shlex.split(string):
if '=' not in opt:
data[opt] = None
continue
key, _sep, value = opt.partition('=')
data[key] = value
if not any((data['core_version'], data['python_version'])):
self.add_message(
'ansible-deprecated-version-comment-missing-version',
line=token.start[0],
col_offset=token.start[1],
)
bad = set(data).difference(valid_keys)
if bad:
self.add_message(
'ansible-deprecated-version-comment-invalid-key',
line=token.start[0],
col_offset=token.start[1],
args=(','.join(bad),),
)
return data
def _process_python_version(self, token: tokenize.TokenInfo, data: dict[str, str]) -> None:
check_version = '.'.join(map(str, self.linter.config.py_version)) # minimum supported Python version provided by ansible-test
try:
if LooseVersion(check_version) > LooseVersion(data['python_version']):
self.add_message(
'ansible-deprecated-python-version-comment',
line=token.start[0],
col_offset=token.start[1],
args=(
data['python_version'],
data['description'] or 'description not provided',
),
)
except (ValueError, TypeError) as exc:
self.add_message(
'ansible-deprecated-version-comment-invalid-version',
line=token.start[0],
col_offset=token.start[1],
args=(data['python_version'], exc),
)
def _process_core_version(self, token: tokenize.TokenInfo, data: dict[str, str]) -> None:
try:
if self.ANSIBLE_VERSION >= LooseVersion(data['core_version']):
self.add_message(
'ansible-deprecated-version-comment',
line=token.start[0],
col_offset=token.start[1],
args=(
data['core_version'],
data['description'] or 'description not provided',
),
)
except (ValueError, TypeError) as exc:
self.add_message(
'ansible-deprecated-version-comment-invalid-version',
line=token.start[0],
col_offset=token.start[1],
args=(data['core_version'], exc),
)
def _process_comment(self, token: tokenize.TokenInfo) -> None:
if token.string.startswith('# deprecated:'):
data = self._deprecated_string_to_dict(token, token.string[13:].strip())
if data['core_version']:
self._process_core_version(token, data)
if data['python_version']:
self._process_python_version(token, data)
def register(linter: pylint.lint.PyLinter) -> None:
"""Required method to auto-register this checker."""
linter.register_checker(AnsibleDeprecatedCommentChecker(linter))
| {
"repo_id": "ansible/ansible",
"file_path": "test/lib/ansible_test/_util/controller/sanity/pylint/plugins/deprecated_comment.py",
"license": "GNU General Public License v3.0",
"lines": 119,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
ansible/ansible:test/units/module_utils/_internal/test_deprecator.py | from __future__ import annotations
import importlib.abc
import importlib.util
import ansible
import pathlib
import pytest
from ansible.module_utils._internal import _messages
from ansible.module_utils._internal import _deprecator
class FakePathLoader(importlib.abc.SourceLoader):
"""A test loader that can fake out the code/frame paths to simulate callers of various types without relying on actual files on disk."""
def get_filename(self, fullname):
if fullname.startswith('ansible.'):
basepath = pathlib.Path(ansible.__file__).parent.parent
else:
basepath = '/x/y'
return f'{basepath}/{fullname.replace(".", "/")}'
def get_data(self, path):
return b'''
from ansible.module_utils._internal import _deprecator
def do_stuff():
return _deprecator.get_caller_plugin_info()
'''
def exec_module(self, module):
return super().exec_module(module)
@pytest.mark.parametrize("python_fq_name,expected_plugin_info", (
# legacy module callers
('ansible.legacy.blah', _messages.PluginInfo(resolved_name='ansible.legacy.blah', type=_messages.PluginType.MODULE)),
# core callers
('ansible.modules.ping', _messages.PluginInfo(resolved_name='ansible.builtin.ping', type=_messages.PluginType.MODULE)),
('ansible.plugins.filter.core', _deprecator.ANSIBLE_CORE_DEPRECATOR),
('ansible.plugins.test.core', _deprecator.ANSIBLE_CORE_DEPRECATOR),
('ansible.nonplugin_something', _deprecator.ANSIBLE_CORE_DEPRECATOR),
# collections plugin callers
('ansible_collections.foo.bar.plugins.modules.module_thing', _messages.PluginInfo(resolved_name='foo.bar.module_thing', type=_messages.PluginType.MODULE)),
('ansible_collections.foo.bar.plugins.filter.somefilter', _messages.PluginInfo(resolved_name='foo.bar', type=None)),
('ansible_collections.foo.bar.plugins.test.sometest', _messages.PluginInfo(resolved_name='foo.bar', type=None)),
# indeterminate callers (e.g. collection module_utils- must specify since they might be calling on behalf of another
('ansible_collections.foo.bar.plugins.module_utils.something', _deprecator.INDETERMINATE_DEPRECATOR),
# other callers
('something.else', None),
('ansible_collections.foo.bar.nonplugin_something', None),
))
def test_get_caller_plugin_info(python_fq_name: str, expected_plugin_info: _messages.PluginInfo):
"""Validates the expected `PluginInfo` values received from various types of core/non-core/collection callers."""
# invoke a standalone fake loader that generates a Python module with the specified FQ python name (converted to a corresponding __file__ entry) that
# pretends as if it called `get_caller_plugin_info()` and returns its result
loader = FakePathLoader()
spec = importlib.util.spec_from_loader(name=python_fq_name, loader=loader)
mod = importlib.util.module_from_spec(spec)
loader.exec_module(mod)
pi: _messages.PluginInfo = mod.do_stuff()
assert pi == expected_plugin_info
| {
"repo_id": "ansible/ansible",
"file_path": "test/units/module_utils/_internal/test_deprecator.py",
"license": "GNU General Public License v3.0",
"lines": 52,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/common/sql/src/airflow/providers/common/sql/decorators/analytics.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from collections.abc import Callable, Collection, Mapping, Sequence
from typing import TYPE_CHECKING, Any, ClassVar
from airflow.providers.common.compat.sdk import (
AIRFLOW_V_3_0_PLUS,
DecoratedOperator,
TaskDecorator,
context_merge,
task_decorator_factory,
)
from airflow.providers.common.sql.operators.analytics import AnalyticsOperator
from airflow.utils.operator_helpers import determine_kwargs
if AIRFLOW_V_3_0_PLUS:
from airflow.sdk.definitions._internal.types import SET_DURING_EXECUTION
else:
from airflow.utils.types import NOTSET as SET_DURING_EXECUTION # type: ignore[attr-defined,no-redef]
if TYPE_CHECKING:
from airflow.providers.common.compat.sdk import Context
class _AnalyticsDecoratedOperator(DecoratedOperator, AnalyticsOperator):
"""
Wraps a Python callable and uses the callable return value as the SQL commands to be executed.
:param python_callable: A reference to an object that is callable.
:param op_kwargs: A dictionary of keyword arguments that will get unpacked (templated).
:param op_args: A list of positional arguments that will get unpacked (templated).
"""
template_fields: Sequence[str] = (
*DecoratedOperator.template_fields,
*AnalyticsOperator.template_fields,
)
template_fields_renderers: ClassVar[dict[str, str]] = {
**DecoratedOperator.template_fields_renderers,
**AnalyticsOperator.template_fields_renderers,
}
overwrite_rtif_after_execution: bool = True
custom_operator_name: str = "@task.analytics"
def __init__(
self,
python_callable: Callable,
op_args: Collection[Any] | None = None,
op_kwargs: Mapping[str, Any] | None = None,
**kwargs,
) -> None:
super().__init__(
python_callable=python_callable,
op_args=op_args,
op_kwargs=op_kwargs,
queries=SET_DURING_EXECUTION,
**kwargs,
)
@property
def xcom_push(self) -> bool:
"""Compatibility property for BaseDecorator that expects xcom_push attribute."""
return self.do_xcom_push
@xcom_push.setter
def xcom_push(self, value: bool) -> None:
"""Compatibility setter for BaseDecorator that expects xcom_push attribute."""
self.do_xcom_push = value
def execute(self, context: Context) -> Any:
"""
Build the SQL and execute the generated query (or queries).
:param context: Airflow context.
:return: Any
"""
context_merge(context, self.op_kwargs)
kwargs = determine_kwargs(self.python_callable, self.op_args, context)
# Set the queries using the Python callable
result = self.python_callable(*self.op_args, **kwargs)
# Only non-empty strings and non-empty lists of non-empty strings are acceptable return types
if (
not isinstance(result, (str, list))
or (isinstance(result, str) and not result.strip())
or (
isinstance(result, list)
and (not result or not all(isinstance(s, str) and s.strip() for s in result))
)
):
raise TypeError(
"The returned value from the @task.analytics callable must be a non-empty string "
"or a non-empty list of non-empty strings."
)
# AnalyticsOperator expects queries as a list of strings
self.queries = [result] if isinstance(result, str) else result
self.render_template_fields(context)
return AnalyticsOperator.execute(self, context)
def analytics_task(python_callable=None, **kwargs) -> TaskDecorator:
"""
Wrap a Python function into a AnalyticsOperator.
:param python_callable: Function to decorate.
:meta private:
"""
return task_decorator_factory(
python_callable=python_callable,
decorated_operator_class=_AnalyticsDecoratedOperator,
**kwargs,
)
| {
"repo_id": "apache/airflow",
"file_path": "providers/common/sql/src/airflow/providers/common/sql/decorators/analytics.py",
"license": "Apache License 2.0",
"lines": 111,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/common/sql/tests/unit/common/sql/decorators/test_analytics.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from unittest.mock import MagicMock, patch
import pytest
from airflow.providers.common.sql.config import DataSourceConfig
from airflow.providers.common.sql.decorators.analytics import _AnalyticsDecoratedOperator
DATASOURCE_CONFIGS = [
DataSourceConfig(conn_id="", table_name="users_data", uri="file:///path/to/", format="parquet")
]
class TestAnalyticsDecoratedOperator:
def test_custom_operator_name(self):
assert _AnalyticsDecoratedOperator.custom_operator_name == "@task.analytics"
@patch(
"airflow.providers.common.sql.operators.analytics.AnalyticsOperator.execute",
autospec=True,
)
def test_execute_calls_callable_and_sets_queries_from_list(self, mock_execute):
"""The callable return value (list) becomes self.queries."""
mock_execute.return_value = "mocked output"
def get_user_queries():
return ["SELECT * FROM users_data", "SELECT count(*) FROM users_data"]
op = _AnalyticsDecoratedOperator(
task_id="test",
python_callable=get_user_queries,
datasource_configs=DATASOURCE_CONFIGS,
)
result = op.execute(context={})
assert result == "mocked output"
assert op.queries == ["SELECT * FROM users_data", "SELECT count(*) FROM users_data"]
mock_execute.assert_called_once()
@patch(
"airflow.providers.common.sql.operators.analytics.AnalyticsOperator.execute",
autospec=True,
)
def test_execute_wraps_single_string_into_list(self, mock_execute):
"""A single string return value is wrapped into a list for self.queries."""
mock_execute.return_value = "mocked output"
def get_single_query():
return "SELECT 1"
op = _AnalyticsDecoratedOperator(
task_id="test",
python_callable=get_single_query,
datasource_configs=DATASOURCE_CONFIGS,
)
op.execute(context={})
assert op.queries == ["SELECT 1"]
@pytest.mark.parametrize(
"return_value",
[42, "", " ", None, [], [""], ["SELECT 1", ""], ["SELECT 1", " "], [42]],
ids=[
"non-string",
"empty-string",
"whitespace-string",
"none",
"empty-list",
"list-with-empty-string",
"list-with-one-valid-one-empty",
"list-with-one-valid-one-whitespace",
"list-with-non-string",
],
)
def test_execute_raises_on_invalid_return_value(self, return_value):
"""TypeError when the callable returns an invalid value."""
op = _AnalyticsDecoratedOperator(
task_id="test",
python_callable=lambda: return_value,
datasource_configs=DATASOURCE_CONFIGS,
)
with pytest.raises(TypeError, match="non-empty string"):
op.execute(context={})
@patch(
"airflow.providers.common.sql.operators.analytics.AnalyticsOperator.execute",
autospec=True,
)
def test_execute_merges_op_kwargs_into_callable(self, mock_execute):
"""op_kwargs are forwarded to the callable to build queries."""
mock_execute.return_value = "mocked output"
def get_queries_for_table(table_name):
return [f"SELECT * FROM {table_name}", f"SELECT count(*) FROM {table_name}"]
op = _AnalyticsDecoratedOperator(
task_id="test",
python_callable=get_queries_for_table,
datasource_configs=DATASOURCE_CONFIGS,
op_kwargs={"table_name": "orders"},
)
op.execute(context={"task_instance": MagicMock()})
assert op.queries == ["SELECT * FROM orders", "SELECT count(*) FROM orders"]
| {
"repo_id": "apache/airflow",
"file_path": "providers/common/sql/tests/unit/common/sql/decorators/test_analytics.py",
"license": "Apache License 2.0",
"lines": 102,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-core/src/airflow/executors/workloads/base.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""ORM models and Pydantic schemas for BaseWorkload."""
from __future__ import annotations
import os
from abc import ABC
from typing import TYPE_CHECKING
from pydantic import BaseModel, ConfigDict
if TYPE_CHECKING:
from airflow.api_fastapi.auth.tokens import JWTGenerator
class BaseWorkload:
"""
Mixin for ORM models that can be scheduled as workloads.
This mixin defines the interface that scheduler workloads (TaskInstance,
ExecutorCallback, etc.) must implement to provide routing information to the scheduler.
Subclasses must override:
- get_dag_id() -> str | None
- get_executor_name() -> str | None
"""
def get_dag_id(self) -> str | None:
"""
Return the DAG ID for scheduler routing.
Must be implemented by subclasses.
"""
raise NotImplementedError(f"{self.__class__.__name__} must implement get_dag_id()")
def get_executor_name(self) -> str | None:
"""
Return the executor name for scheduler routing.
Must be implemented by subclasses.
"""
raise NotImplementedError(f"{self.__class__.__name__} must implement get_executor_name()")
class BundleInfo(BaseModel):
"""Schema for telling task which bundle to run with."""
name: str
version: str | None = None
class BaseWorkloadSchema(BaseModel):
"""Base Pydantic schema for executor workload DTOs."""
model_config = ConfigDict(populate_by_name=True)
token: str
"""The identity token for this workload"""
@staticmethod
def generate_token(sub_id: str, generator: JWTGenerator | None = None) -> str:
return generator.generate({"sub": sub_id}) if generator else ""
class BaseDagBundleWorkload(BaseWorkloadSchema, ABC):
"""Base class for Workloads that are associated with a DAG bundle."""
dag_rel_path: os.PathLike[str] # Filepath where the DAG can be found (likely prefixed with `DAG_FOLDER/`)
bundle_info: BundleInfo
log_path: str | None # Rendered relative log filename template the task logs should be written to.
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/executors/workloads/base.py",
"license": "Apache License 2.0",
"lines": 62,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:airflow-core/src/airflow/executors/workloads/callback.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Callback workload schemas for executor communication."""
from __future__ import annotations
from enum import Enum
from importlib import import_module
from pathlib import Path
from typing import TYPE_CHECKING, Literal
from uuid import UUID
import structlog
from pydantic import BaseModel, Field, field_validator
from airflow.executors.workloads.base import BaseDagBundleWorkload, BundleInfo
if TYPE_CHECKING:
from airflow.api_fastapi.auth.tokens import JWTGenerator
from airflow.models import DagRun
from airflow.models.callback import Callback as CallbackModel, CallbackKey
log = structlog.get_logger(__name__)
class CallbackFetchMethod(str, Enum):
"""Methods used to fetch callback at runtime."""
# For future use once Dag Processor callbacks (on_success_callback/on_failure_callback) get moved to executors
DAG_ATTRIBUTE = "dag_attribute"
# For deadline callbacks since they import callbacks through the import path
IMPORT_PATH = "import_path"
class CallbackDTO(BaseModel):
"""Schema for Callback with minimal required fields needed for Executors and Task SDK."""
id: str # A uuid.UUID stored as a string
fetch_method: CallbackFetchMethod
data: dict
@field_validator("id", mode="before")
@classmethod
def validate_id(cls, v):
"""Convert UUID to str if needed."""
if isinstance(v, UUID):
return str(v)
return v
@property
def key(self) -> CallbackKey:
"""Return callback ID as key (CallbackKey = str)."""
return self.id
class ExecuteCallback(BaseDagBundleWorkload):
"""Execute the given Callback."""
callback: CallbackDTO
type: Literal["ExecuteCallback"] = Field(init=False, default="ExecuteCallback")
@classmethod
def make(
cls,
callback: CallbackModel,
dag_run: DagRun,
dag_rel_path: Path | None = None,
generator: JWTGenerator | None = None,
bundle_info: BundleInfo | None = None,
) -> ExecuteCallback:
"""Create an ExecuteCallback workload from a Callback ORM model."""
if not bundle_info:
bundle_info = BundleInfo(
name=dag_run.dag_model.bundle_name,
version=dag_run.bundle_version,
)
fname = f"executor_callbacks/{callback.id}" # TODO: better log file template
return cls(
callback=CallbackDTO.model_validate(callback, from_attributes=True),
dag_rel_path=dag_rel_path or Path(dag_run.dag_model.relative_fileloc or ""),
token=cls.generate_token(str(callback.id), generator),
log_path=fname,
bundle_info=bundle_info,
)
def execute_callback_workload(
callback: CallbackDTO,
log,
) -> tuple[bool, str | None]:
"""
Execute a callback function by importing and calling it, returning the success state.
Supports two patterns:
1. Functions - called directly with kwargs
2. Classes that return callable instances (like BaseNotifier) - instantiated then called with context
Example:
# Function callback
callback.data = {"path": "my_module.alert_func", "kwargs": {"msg": "Alert!", "context": {...}}}
execute_callback_workload(callback, log) # Calls alert_func(msg="Alert!", context={...})
# Notifier callback
callback.data = {"path": "airflow.providers.slack...SlackWebhookNotifier", "kwargs": {"text": "Alert!", "context": {...}}}
execute_callback_workload(callback, log) # SlackWebhookNotifier(text=..., context=...) then calls instance(context)
:param callback: The Callback schema containing path and kwargs
:param log: Logger instance for recording execution
:return: Tuple of (success: bool, error_message: str | None)
"""
callback_path = callback.data.get("path")
callback_kwargs = callback.data.get("kwargs", {})
if not callback_path:
return False, "Callback path not found in data."
try:
# Import the callback callable
# Expected format: "module.path.to.function_or_class"
module_path, function_name = callback_path.rsplit(".", 1)
module = import_module(module_path)
callback_callable = getattr(module, function_name)
log.debug("Executing callback %s(%s)...", callback_path, callback_kwargs)
# If the callback is a callable, call it. If it is a class, instantiate it.
result = callback_callable(**callback_kwargs)
# If the callback is a class then it is now instantiated and callable, call it.
if callable(result):
context = callback_kwargs.get("context", {})
log.debug("Calling result with context for %s", callback_path)
result = result(context)
log.info("Callback %s executed successfully.", callback_path)
return True, None
except Exception as e:
error_msg = f"Callback execution failed: {type(e).__name__}: {str(e)}"
log.exception("Callback %s(%s) execution failed: %s", callback_path, callback_kwargs, error_msg)
return False, error_msg
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/executors/workloads/callback.py",
"license": "Apache License 2.0",
"lines": 124,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:airflow-core/src/airflow/executors/workloads/task.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Task workload schemas for executor communication."""
from __future__ import annotations
import uuid
from pathlib import Path
from typing import TYPE_CHECKING, Literal
from pydantic import BaseModel, Field
from airflow.executors.workloads.base import BaseDagBundleWorkload, BundleInfo
if TYPE_CHECKING:
from airflow.api_fastapi.auth.tokens import JWTGenerator
from airflow.models.taskinstance import TaskInstance as TIModel
from airflow.models.taskinstancekey import TaskInstanceKey
class TaskInstanceDTO(BaseModel):
"""Schema for TaskInstance with minimal required fields needed for Executors and Task SDK."""
id: uuid.UUID
dag_version_id: uuid.UUID
task_id: str
dag_id: str
run_id: str
try_number: int
map_index: int = -1
pool_slots: int
queue: str
priority_weight: int
executor_config: dict | None = Field(default=None, exclude=True)
parent_context_carrier: dict | None = None
context_carrier: dict | None = None
# TODO: Task-SDK: Can we replace TaskInstanceKey with just the uuid across the codebase?
@property
def key(self) -> TaskInstanceKey:
from airflow.models.taskinstancekey import TaskInstanceKey
return TaskInstanceKey(
dag_id=self.dag_id,
task_id=self.task_id,
run_id=self.run_id,
try_number=self.try_number,
map_index=self.map_index,
)
class ExecuteTask(BaseDagBundleWorkload):
"""Execute the given Task."""
ti: TaskInstanceDTO
sentry_integration: str = ""
type: Literal["ExecuteTask"] = Field(init=False, default="ExecuteTask")
@classmethod
def make(
cls,
ti: TIModel,
dag_rel_path: Path | None = None,
generator: JWTGenerator | None = None,
bundle_info: BundleInfo | None = None,
sentry_integration: str = "",
) -> ExecuteTask:
"""Create an ExecuteTask workload from a TaskInstance ORM model."""
from airflow.utils.helpers import log_filename_template_renderer
ser_ti = TaskInstanceDTO.model_validate(ti, from_attributes=True)
ser_ti.parent_context_carrier = ti.dag_run.context_carrier
if not bundle_info:
bundle_info = BundleInfo(
name=ti.dag_model.bundle_name,
version=ti.dag_run.bundle_version,
)
fname = log_filename_template_renderer()(ti=ti)
return cls(
ti=ser_ti,
dag_rel_path=dag_rel_path or Path(ti.dag_model.relative_fileloc or ""),
token=cls.generate_token(str(ti.id), generator),
log_path=fname,
bundle_info=bundle_info,
sentry_integration=sentry_integration,
)
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/executors/workloads/task.py",
"license": "Apache License 2.0",
"lines": 85,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:airflow-core/src/airflow/executors/workloads/trigger.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Trigger workload schemas for executor communication."""
from __future__ import annotations
from datetime import datetime
from typing import Literal
from pydantic import BaseModel, Field
# Using noqa because Ruff wants this in a TYPE_CHECKING block but Pydantic fails if it is.
from airflow.executors.workloads.task import TaskInstanceDTO # noqa: TCH001
class RunTrigger(BaseModel):
"""
Execute an async "trigger" process that yields events.
Consumers of this Workload must perform their own validation of the classpath input.
"""
id: int
ti: TaskInstanceDTO | None # Could be none for asset-based triggers.
classpath: str # Dot-separated name of the module+fn to import and run this workload.
encrypted_kwargs: str
timeout_after: datetime | None = None
type: Literal["RunTrigger"] = Field(init=False, default="RunTrigger")
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/executors/workloads/trigger.py",
"license": "Apache License 2.0",
"lines": 34,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:airflow-core/src/airflow/executors/workloads/types.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Type aliases for Workloads."""
from __future__ import annotations
from typing import TYPE_CHECKING, TypeAlias
from airflow.models.callback import ExecutorCallback
from airflow.models.taskinstance import TaskInstance
if TYPE_CHECKING:
from airflow.models.callback import CallbackKey
from airflow.models.taskinstancekey import TaskInstanceKey
from airflow.utils.state import CallbackState, TaskInstanceState
# Type aliases for workload keys and states (used by executor layer)
WorkloadKey: TypeAlias = TaskInstanceKey | CallbackKey
WorkloadState: TypeAlias = TaskInstanceState | CallbackState
# Type alias for executor workload results (used by executor implementations)
WorkloadResultType: TypeAlias = tuple[WorkloadKey, WorkloadState, Exception | None]
# Type alias for scheduler workloads (ORM models that can be routed to executors)
# Must be outside TYPE_CHECKING for use in function signatures
SchedulerWorkload: TypeAlias = TaskInstance | ExecutorCallback
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/executors/workloads/types.py",
"license": "Apache License 2.0",
"lines": 33,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:airflow-ctl/tests/airflow_ctl/ctl/test_console_formatting.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from unittest.mock import Mock
import pytest
from rich.box import ASCII_DOUBLE_HEAD
from airflowctl.ctl import console_formatting
class _StdoutWithIsAtty:
def __init__(self, is_tty: bool):
self._is_tty = is_tty
def isatty(self) -> bool:
return self._is_tty
class _StdoutWithoutIsAtty:
pass
def test_is_tty_returns_false_without_isatty(monkeypatch):
monkeypatch.setattr(console_formatting.sys, "stdout", _StdoutWithoutIsAtty())
assert console_formatting.is_tty() is False
@pytest.mark.parametrize("is_tty_value", [True, False])
def test_is_tty_returns_stdout_isatty_value(monkeypatch, is_tty_value):
monkeypatch.setattr(console_formatting.sys, "stdout", _StdoutWithIsAtty(is_tty_value))
assert console_formatting.is_tty() is is_tty_value
def test_is_data_sequence_returns_true_for_dict_sequence():
assert console_formatting.is_data_sequence([{"a": 1}, {"b": 2}])
def test_is_data_sequence_returns_false_for_mixed_sequence():
assert not console_formatting.is_data_sequence([{"a": 1}, 2])
def test_console_width_is_forced_when_not_tty(monkeypatch):
monkeypatch.setattr(console_formatting, "is_tty", lambda: False)
console = console_formatting.AirflowConsole(record=True)
assert console._width == 200
def test_console_width_is_not_forced_when_tty(monkeypatch):
monkeypatch.setattr(console_formatting, "is_tty", lambda: True)
console = console_formatting.AirflowConsole(width=123, record=True)
assert console._width == 123
@pytest.mark.parametrize(
("value", "output", "expected"),
[
((1, 2), "table", "1,2"),
([1, 2], "json", ["1", "2"]),
({"a": 1}, "json", {"a": "1"}),
({"a": 1}, "table", "{'a': 1}"),
(None, "json", None),
],
)
def test_normalize_data(value, output, expected):
console = console_formatting.AirflowConsole(record=True)
assert console._normalize_data(value, output) == expected
def test_print_as_raises_for_unknown_output():
console = console_formatting.AirflowConsole(record=True)
with pytest.raises(ValueError, match="Unknown formatter"):
console.print_as([{"a": 1}], output="xml")
def test_print_as_raises_for_non_dict_without_mapper():
console = console_formatting.AirflowConsole(record=True)
with pytest.raises(ValueError, match="mapper"):
console.print_as([1, 2], output="json")
def test_print_as_uses_mapper(monkeypatch):
console = console_formatting.AirflowConsole(record=True)
renderer_mock = Mock()
monkeypatch.setattr(console, "print_as_json", renderer_mock)
console.print_as([1, 2], output="json", mapper=lambda value: {"value": value})
renderer_mock.assert_called_once_with([{"value": "1"}, {"value": "2"}])
def test_print_as_normalizes_dict_data(monkeypatch):
console = console_formatting.AirflowConsole(record=True)
renderer_mock = Mock()
monkeypatch.setattr(console, "print_as_json", renderer_mock)
console.print_as([{"a": 1, "b": None}], output="json")
renderer_mock.assert_called_once_with([{"a": "1", "b": None}])
def test_print_as_table_prints_no_data_for_empty_input(monkeypatch):
console = console_formatting.AirflowConsole(record=True)
print_mock = Mock()
monkeypatch.setattr(console, "print", print_mock)
console.print_as_table([])
print_mock.assert_called_once_with("No data found")
def test_print_as_plain_table_prints_no_data_for_empty_input(monkeypatch):
console = console_formatting.AirflowConsole(record=True)
print_mock = Mock()
monkeypatch.setattr(console, "print", print_mock)
console.print_as_plain_table([])
print_mock.assert_called_once_with("No data found")
def test_print_as_plain_table_prints_headers_and_values():
console = console_formatting.AirflowConsole(record=True)
console.print_as_plain_table([{"name": "alpha", "state": "ok"}])
output = console.export_text()
assert "name" in output
assert "state" in output
assert "alpha" in output
assert "ok" in output
def test_simple_table_has_expected_defaults():
table = console_formatting.SimpleTable()
assert table.show_edge is False
assert table.pad_edge is False
assert table.box == ASCII_DOUBLE_HEAD
assert table.show_header is False
assert table.title_style == "bold green"
assert table.title_justify == "left"
assert table.caption == " "
def test_simple_table_add_column_smoke():
table = console_formatting.SimpleTable()
table.add_column("column_1")
assert table.columns[0].header == "column_1"
| {
"repo_id": "apache/airflow",
"file_path": "airflow-ctl/tests/airflow_ctl/ctl/test_console_formatting.py",
"license": "Apache License 2.0",
"lines": 113,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/common/ai/src/airflow/providers/common/ai/decorators/llm_sql.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
TaskFlow decorator for LLM SQL generation.
The user writes a function that **returns the prompt**. The decorator handles
the LLM call, schema introspection, and safety validation. The decorated task's
XCom output is the generated SQL string.
"""
from __future__ import annotations
from collections.abc import Callable, Collection, Mapping, Sequence
from typing import TYPE_CHECKING, Any, ClassVar
from airflow.providers.common.ai.operators.llm_sql import LLMSQLQueryOperator
from airflow.providers.common.compat.sdk import (
DecoratedOperator,
TaskDecorator,
context_merge,
task_decorator_factory,
)
from airflow.sdk.definitions._internal.types import SET_DURING_EXECUTION
from airflow.utils.operator_helpers import determine_kwargs
if TYPE_CHECKING:
from airflow.sdk import Context
class _LLMSQLDecoratedOperator(DecoratedOperator, LLMSQLQueryOperator):
"""
Wraps a callable that returns a prompt for LLM SQL generation.
The user function is called at execution time to produce the prompt string.
All other parameters (``llm_conn_id``, ``db_conn_id``, ``table_names``, etc.)
are passed through to :class:`~airflow.providers.common.ai.operators.llm_sql.LLMSQLQueryOperator`.
:param python_callable: A reference to a callable that returns the prompt string.
:param op_args: Positional arguments for the callable.
:param op_kwargs: Keyword arguments for the callable.
"""
template_fields: Sequence[str] = (
*DecoratedOperator.template_fields,
*LLMSQLQueryOperator.template_fields,
)
template_fields_renderers: ClassVar[dict[str, str]] = {
**DecoratedOperator.template_fields_renderers,
}
custom_operator_name: str = "@task.llm_sql"
def __init__(
self,
*,
python_callable: Callable,
op_args: Collection[Any] | None = None,
op_kwargs: Mapping[str, Any] | None = None,
**kwargs,
) -> None:
super().__init__(
python_callable=python_callable,
op_args=op_args,
op_kwargs=op_kwargs,
prompt=SET_DURING_EXECUTION,
**kwargs,
)
def execute(self, context: Context) -> Any:
context_merge(context, self.op_kwargs)
kwargs = determine_kwargs(self.python_callable, self.op_args, context)
self.prompt = self.python_callable(*self.op_args, **kwargs)
if not isinstance(self.prompt, str) or not self.prompt.strip():
raise TypeError("The returned value from the @task.llm_sql callable must be a non-empty string.")
self.render_template_fields(context)
# Call LLMSQLQueryOperator.execute directly, not super().execute(),
# because we need to skip DecoratedOperator.execute — the callable
# invocation is already handled above.
return LLMSQLQueryOperator.execute(self, context)
def llm_sql_task(
python_callable: Callable | None = None,
**kwargs,
) -> TaskDecorator:
"""
Wrap a function that returns a natural language prompt into an LLM SQL task.
The function body constructs the prompt (can use Airflow context, XCom, etc.).
The decorator handles: LLM connection, schema introspection, SQL generation,
and safety validation.
Usage::
@task.llm_sql(
llm_conn_id="openai_default",
db_conn_id="postgres_default",
table_names=["customers", "orders"],
)
def build_query(ds=None):
return f"Find top 10 customers by revenue in {ds}"
:param python_callable: Function to decorate.
"""
return task_decorator_factory(
python_callable=python_callable,
decorated_operator_class=_LLMSQLDecoratedOperator,
**kwargs,
)
| {
"repo_id": "apache/airflow",
"file_path": "providers/common/ai/src/airflow/providers/common/ai/decorators/llm_sql.py",
"license": "Apache License 2.0",
"lines": 104,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/common/ai/src/airflow/providers/common/ai/example_dags/example_llm_sql.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Example DAGs demonstrating LLMSQLQueryOperator usage."""
from __future__ import annotations
from airflow.providers.common.ai.operators.llm_sql import LLMSQLQueryOperator
from airflow.providers.common.compat.sdk import dag, task
from airflow.providers.common.sql.config import DataSourceConfig
# [START howto_operator_llm_sql_basic]
@dag
def example_llm_sql_basic():
LLMSQLQueryOperator(
task_id="generate_sql",
prompt="Find the top 10 customers by total revenue",
llm_conn_id="pydantic_ai_default",
schema_context=(
"Table: customers\n"
"Columns: id INT, name TEXT, email TEXT\n\n"
"Table: orders\n"
"Columns: id INT, customer_id INT, total DECIMAL, created_at TIMESTAMP"
),
)
# [END howto_operator_llm_sql_basic]
example_llm_sql_basic()
# [START howto_operator_llm_sql_schema]
@dag
def example_llm_sql_schema_introspection():
LLMSQLQueryOperator(
task_id="generate_sql",
prompt="Calculate monthly revenue for 2024",
llm_conn_id="pydantic_ai_default",
db_conn_id="postgres_default",
table_names=["orders", "customers"],
dialect="postgres",
)
# [END howto_operator_llm_sql_schema]
example_llm_sql_schema_introspection()
# [START howto_decorator_llm_sql]
@dag
def example_llm_sql_decorator():
@task.llm_sql(
llm_conn_id="pydantic_ai_default",
schema_context="Table: users\nColumns: id INT, name TEXT, signup_date DATE",
)
def build_churn_query(ds=None):
return f"Find users who signed up before {ds} and have no orders"
build_churn_query()
# [END howto_decorator_llm_sql]
example_llm_sql_decorator()
# [START howto_operator_llm_sql_expand]
@dag
def example_llm_sql_expand():
LLMSQLQueryOperator.partial(
task_id="generate_sql",
llm_conn_id="pydantic_ai_default",
schema_context=(
"Table: orders\nColumns: id INT, customer_id INT, total DECIMAL, created_at TIMESTAMP"
),
).expand(
prompt=[
"Total revenue by month",
"Top 10 customers by order count",
"Average order value by day of week",
]
)
# [END howto_operator_llm_sql_expand]
example_llm_sql_expand()
# [START howto_operator_llm_sql_with_object_storage]
@dag
def example_llm_sql_with_object_storage():
datasource_config = DataSourceConfig(
conn_id="aws_default",
table_name="sales_data",
uri="s3://my-bucket/data/sales/",
format="parquet",
)
LLMSQLQueryOperator(
task_id="generate_sql",
prompt="Find the top 5 products by total sales amount",
llm_conn_id="pydantic_ai_default",
datasource_config=datasource_config,
)
# [END howto_operator_llm_sql_with_object_storage]
example_llm_sql_with_object_storage()
| {
"repo_id": "apache/airflow",
"file_path": "providers/common/ai/src/airflow/providers/common/ai/example_dags/example_llm_sql.py",
"license": "Apache License 2.0",
"lines": 97,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/common/ai/src/airflow/providers/common/ai/operators/llm_sql.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Operator for generating SQL queries from natural language using LLMs."""
from __future__ import annotations
from collections.abc import Sequence
from functools import cached_property
from typing import TYPE_CHECKING, Any
try:
from airflow.providers.common.ai.utils.sql_validation import (
DEFAULT_ALLOWED_TYPES,
validate_sql as _validate_sql,
)
from airflow.providers.common.sql.datafusion.engine import DataFusionEngine
except ImportError as e:
from airflow.providers.common.compat.sdk import AirflowOptionalProviderFeatureException
raise AirflowOptionalProviderFeatureException(e)
from airflow.providers.common.ai.operators.llm import LLMOperator
from airflow.providers.common.compat.sdk import BaseHook
if TYPE_CHECKING:
from sqlglot import exp
from airflow.providers.common.sql.config import DataSourceConfig
from airflow.providers.common.sql.hooks.sql import DbApiHook
from airflow.sdk import Context
# SQLAlchemy dialect_name → sqlglot dialect mapping for names that differ.
_SQLALCHEMY_TO_SQLGLOT_DIALECT: dict[str, str] = {
"postgresql": "postgres",
"mssql": "tsql",
}
class LLMSQLQueryOperator(LLMOperator):
"""
Generate SQL queries from natural language using an LLM.
Inherits from :class:`~airflow.providers.common.ai.operators.llm.LLMOperator`
for LLM access and optionally uses a
:class:`~airflow.providers.common.sql.hooks.sql.DbApiHook`
for schema introspection. The operator generates SQL but does not execute it —
the generated SQL is returned as XCom and can be passed to
``SQLExecuteQueryOperator`` or used in downstream tasks.
When ``system_prompt`` is provided, it is appended to the built-in SQL safety
instructions — use it for domain-specific guidance (e.g. "prefer CTEs over
subqueries", "always use LEFT JOINs").
:param prompt: Natural language description of the desired query.
:param llm_conn_id: Connection ID for the LLM provider.
:param model_id: Model identifier (e.g. ``"openai:gpt-4o"``).
Overrides the model stored in the connection's extra field.
:param system_prompt: Additional instructions appended to the built-in SQL
safety prompt. Use for domain-specific guidance.
:param agent_params: Additional keyword arguments passed to the pydantic-ai
``Agent`` constructor (e.g. ``retries``, ``model_settings``).
:param db_conn_id: Connection ID for database schema introspection.
The connection must resolve to a ``DbApiHook``.
:param table_names: Tables to include in the LLM's schema context.
Used with ``db_conn_id`` for automatic introspection.
:param schema_context: Manual schema context string. When provided,
this is used instead of ``db_conn_id`` introspection.
:param validate_sql: Whether to validate generated SQL via AST parsing.
Default ``True`` (safe by default).
:param allowed_sql_types: SQL statement types to allow.
Default: ``(Select, Union, Intersect, Except)``.
:param dialect: SQL dialect for parsing (``postgres``, ``mysql``, etc.).
Auto-detected from the database hook if not set.
"""
template_fields: Sequence[str] = (
*LLMOperator.template_fields,
"db_conn_id",
"table_names",
"schema_context",
)
def __init__(
self,
*,
db_conn_id: str | None = None,
table_names: list[str] | None = None,
schema_context: str | None = None,
validate_sql: bool = True,
allowed_sql_types: tuple[type[exp.Expression], ...] = DEFAULT_ALLOWED_TYPES,
dialect: str | None = None,
datasource_config: DataSourceConfig | None = None,
**kwargs: Any,
) -> None:
kwargs.pop("output_type", None) # SQL operator always returns str
super().__init__(**kwargs)
self.db_conn_id = db_conn_id
self.table_names = table_names
self.schema_context = schema_context
self.validate_sql = validate_sql
self.allowed_sql_types = allowed_sql_types
self.dialect = dialect
self.datasource_config = datasource_config
@cached_property
def db_hook(self) -> DbApiHook | None:
"""Return DbApiHook for the configured database connection, or None."""
if not self.db_conn_id:
return None
from airflow.providers.common.sql.hooks.sql import DbApiHook
connection = BaseHook.get_connection(self.db_conn_id)
hook = connection.get_hook()
if not isinstance(hook, DbApiHook):
raise ValueError(
f"Connection {self.db_conn_id!r} does not provide a DbApiHook. Got {type(hook).__name__}."
)
return hook
def execute(self, context: Context) -> str:
schema_info = self._get_schema_context()
full_system_prompt = self._build_system_prompt(schema_info)
agent = self.llm_hook.create_agent(
output_type=str, instructions=full_system_prompt, **self.agent_params
)
result = agent.run_sync(self.prompt)
sql = self._strip_llm_output(result.output)
if self.validate_sql:
_validate_sql(sql, allowed_types=self.allowed_sql_types, dialect=self._resolved_dialect)
self.log.info("Generated SQL:\n%s", sql)
return sql
@staticmethod
def _strip_llm_output(raw: str) -> str:
"""Strip whitespace and markdown code fences from LLM output."""
text = raw.strip()
if text.startswith("```"):
lines = text.split("\n")
# Remove opening fence (```sql, ```, etc.) and closing fence
if len(lines) >= 2:
end = -1 if lines[-1].strip().startswith("```") else len(lines)
text = "\n".join(lines[1:end]).strip()
return text
def _get_schema_context(self) -> str:
"""Return schema context from manual override or database introspection."""
if self.schema_context:
return self.schema_context
if (self.db_hook and self.table_names) or self.datasource_config:
return self._introspect_schemas()
return ""
def _introspect_schemas(self) -> str:
"""Build schema context by introspecting tables via the database hook."""
parts: list[str] = []
for table in self.table_names or []:
columns = self.db_hook.get_table_schema(table) # type: ignore[union-attr]
if not columns:
self.log.warning("Table %r returned no columns — it may not exist.", table)
continue
col_info = ", ".join(f"{c['name']} {c['type']}" for c in columns)
parts.append(f"Table: {table}\nColumns: {col_info}")
if not parts and self.table_names:
raise ValueError(
f"None of the requested tables ({self.table_names}) returned schema information. "
"Check that the table names are correct and the database connection has access."
)
if self.datasource_config:
object_storage_schema = self._introspect_object_storage_schema()
parts.append(f"Table: {self.datasource_config.table_name}\nColumns: {object_storage_schema}")
return "\n\n".join(parts)
def _introspect_object_storage_schema(self):
"""Use DataFusion Engine to get the schema of object stores."""
engine = DataFusionEngine()
engine.register_datasource(self.datasource_config)
return engine.get_schema(self.datasource_config.table_name)
def _build_system_prompt(self, schema_info: str) -> str:
"""Construct the system prompt for the LLM."""
dialect_label = self._resolved_dialect or "SQL"
prompt = (
f"You are a {dialect_label} expert. "
"Generate a single SQL query based on the user's request.\n"
"Return ONLY the SQL query, no explanation or markdown.\n"
)
if schema_info:
prompt += f"\nAvailable schema:\n{schema_info}\n"
prompt += (
"\nRules:\n"
"- Generate only SELECT queries (including CTEs, JOINs, subqueries, UNION)\n"
"- Never generate data modification statements "
"(INSERT, UPDATE, DELETE, DROP, etc.)\n"
"- Use proper syntax for the specified dialect\n"
)
if self.system_prompt:
prompt += f"\nAdditional instructions:\n{self.system_prompt}\n"
return prompt
@cached_property
def _resolved_dialect(self) -> str | None:
"""
Resolve the SQL dialect from explicit parameter or database hook.
Normalizes SQLAlchemy dialect names to sqlglot equivalents
(e.g. ``postgresql`` → ``postgres``).
"""
raw = self.dialect
if not raw and self.db_hook and hasattr(self.db_hook, "dialect_name"):
raw = self.db_hook.dialect_name
if raw:
return _SQLALCHEMY_TO_SQLGLOT_DIALECT.get(raw, raw)
return None
| {
"repo_id": "apache/airflow",
"file_path": "providers/common/ai/src/airflow/providers/common/ai/operators/llm_sql.py",
"license": "Apache License 2.0",
"lines": 202,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/common/ai/src/airflow/providers/common/ai/utils/sql_validation.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
SQL safety validation for LLM-generated queries.
Uses an allowlist approach: only explicitly permitted statement types pass.
This is safer than a denylist because new/unexpected statement types
(INSERT, UPDATE, MERGE, TRUNCATE, COPY, etc.) are blocked by default.
"""
from __future__ import annotations
import sqlglot
from sqlglot import exp
from sqlglot.errors import ErrorLevel
# Allowlist: only these top-level statement types pass validation by default.
# - Select: plain queries and CTE-wrapped queries (WITH ... AS ... SELECT is parsed
# as Select with a `with` clause property — still a Select node at the top level)
# - Union/Intersect/Except: set operations on SELECT results
DEFAULT_ALLOWED_TYPES: tuple[type[exp.Expression], ...] = (
exp.Select,
exp.Union,
exp.Intersect,
exp.Except,
)
class SQLSafetyError(Exception):
"""Generated SQL failed safety validation."""
def validate_sql(
sql: str,
*,
allowed_types: tuple[type[exp.Expression], ...] | None = None,
dialect: str | None = None,
allow_multiple_statements: bool = False,
) -> list[exp.Expression]:
"""
Parse SQL and verify all statements are in the allowed types list.
By default, only a single SELECT-family statement is allowed. Multi-statement
SQL (separated by semicolons) is rejected unless ``allow_multiple_statements=True``,
because multi-statement inputs can hide dangerous operations after a benign SELECT.
Returns parsed statements on success, raises :class:`SQLSafetyError` on violation.
:param sql: SQL string to validate.
:param allowed_types: Tuple of sqlglot expression types to permit.
Defaults to ``(Select, Union, Intersect, Except)``.
:param dialect: SQL dialect for parsing (``postgres``, ``mysql``, etc.).
:param allow_multiple_statements: Whether to allow multiple semicolon-separated
statements. Default ``False``.
:return: List of parsed sqlglot Expression objects.
:raises SQLSafetyError: If the SQL is empty, contains disallowed statement types,
or has multiple statements when not permitted.
"""
if not sql or not sql.strip():
raise SQLSafetyError("Empty SQL input.")
types = allowed_types or DEFAULT_ALLOWED_TYPES
try:
statements = sqlglot.parse(sql, dialect=dialect, error_level=ErrorLevel.RAISE)
except sqlglot.errors.ParseError as e:
raise SQLSafetyError(f"SQL parse error: {e}") from e
# sqlglot.parse can return [None] for empty input
parsed: list[exp.Expression] = [s for s in statements if s is not None]
if not parsed:
raise SQLSafetyError("Empty SQL input.")
if not allow_multiple_statements and len(parsed) > 1:
raise SQLSafetyError(
f"Multiple statements detected ({len(parsed)}). Only single statements are allowed by default."
)
for stmt in parsed:
if not isinstance(stmt, types):
allowed_names = ", ".join(t.__name__ for t in types)
raise SQLSafetyError(
f"Statement type '{type(stmt).__name__}' is not allowed. Allowed types: {allowed_names}"
)
return parsed
| {
"repo_id": "apache/airflow",
"file_path": "providers/common/ai/src/airflow/providers/common/ai/utils/sql_validation.py",
"license": "Apache License 2.0",
"lines": 83,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/common/ai/tests/unit/common/ai/decorators/test_llm_sql.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from unittest.mock import MagicMock, patch
import pytest
from airflow.providers.common.ai.decorators.llm_sql import _LLMSQLDecoratedOperator
class TestLLMSQLDecoratedOperator:
def test_custom_operator_name(self):
assert _LLMSQLDecoratedOperator.custom_operator_name == "@task.llm_sql"
@patch("airflow.providers.common.ai.operators.llm.PydanticAIHook", autospec=True)
def test_execute_calls_callable_and_uses_result_as_prompt(self, mock_hook_cls):
"""The user's callable return value becomes the LLM prompt."""
mock_agent = MagicMock(spec=["run_sync"])
mock_result = MagicMock(spec=["output"])
mock_result.output = "SELECT 1"
mock_agent.run_sync.return_value = mock_result
mock_hook_cls.return_value.create_agent.return_value = mock_agent
def my_prompt_fn():
return "Get all users"
op = _LLMSQLDecoratedOperator(task_id="test", python_callable=my_prompt_fn, llm_conn_id="my_llm")
result = op.execute(context={})
assert result == "SELECT 1"
assert op.prompt == "Get all users"
mock_agent.run_sync.assert_called_once_with("Get all users")
@pytest.mark.parametrize(
"return_value",
[42, "", " ", None],
ids=["non-string", "empty", "whitespace", "none"],
)
def test_execute_raises_on_invalid_prompt(self, return_value):
"""TypeError when the callable returns a non-string or blank string."""
op = _LLMSQLDecoratedOperator(
task_id="test",
python_callable=lambda: return_value,
llm_conn_id="my_llm",
)
with pytest.raises(TypeError, match="non-empty string"):
op.execute(context={})
@patch("airflow.providers.common.ai.operators.llm.PydanticAIHook", autospec=True)
def test_execute_merges_op_kwargs_into_callable(self, mock_hook_cls):
"""op_kwargs are resolved by the callable to build the prompt."""
mock_agent = MagicMock(spec=["run_sync"])
mock_result = MagicMock(spec=["output"])
mock_result.output = "SELECT 1"
mock_agent.run_sync.return_value = mock_result
mock_hook_cls.return_value.create_agent.return_value = mock_agent
def my_prompt_fn(table_name):
return f"Get all rows from {table_name}"
op = _LLMSQLDecoratedOperator(
task_id="test",
python_callable=my_prompt_fn,
llm_conn_id="my_llm",
op_kwargs={"table_name": "users"},
)
op.execute(context={"task_instance": MagicMock()})
assert op.prompt == "Get all rows from users"
| {
"repo_id": "apache/airflow",
"file_path": "providers/common/ai/tests/unit/common/ai/decorators/test_llm_sql.py",
"license": "Apache License 2.0",
"lines": 70,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/common/ai/tests/unit/common/ai/operators/test_llm_sql.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from unittest.mock import MagicMock, PropertyMock, patch
import pytest
from airflow.providers.common.ai.operators.llm_sql import LLMSQLQueryOperator
from airflow.providers.common.ai.utils.sql_validation import SQLSafetyError
from airflow.providers.common.sql.config import DataSourceConfig
def _make_mock_agent(output: str):
"""Create a mock agent that returns the given output string."""
mock_result = MagicMock(spec=["output"])
mock_result.output = output
mock_agent = MagicMock(spec=["run_sync"])
mock_agent.run_sync.return_value = mock_result
return mock_agent
class TestStripLLMOutput:
@pytest.mark.parametrize(
("raw", "expected"),
(
pytest.param("SELECT 1", "SELECT 1", id="plain_sql"),
pytest.param(" SELECT 1 ", "SELECT 1", id="leading_trailing_whitespace"),
pytest.param("```sql\nSELECT 1\n```", "SELECT 1", id="sql_code_fence"),
pytest.param("```\nSELECT 1\n```", "SELECT 1", id="bare_code_fence"),
pytest.param("```SQL\nSELECT 1\n```", "SELECT 1", id="uppercase_language_tag"),
pytest.param(
"```sql\nSELECT id\nFROM users\nWHERE active\n```",
"SELECT id\nFROM users\nWHERE active",
id="multiline_query",
),
pytest.param(
"```sql\nSELECT 1\n",
"SELECT 1",
id="missing_closing_fence",
),
),
)
def test_strip_llm_output(self, raw, expected):
assert LLMSQLQueryOperator._strip_llm_output(raw) == expected
class TestLLMSQLQueryOperator:
def test_inherits_from_llm_operator(self):
from airflow.providers.common.ai.operators.llm import LLMOperator
assert issubclass(LLMSQLQueryOperator, LLMOperator)
def test_template_fields_include_parent_and_sql_specific(self):
expected = {
"prompt",
"llm_conn_id",
"model_id",
"system_prompt",
"agent_params",
"db_conn_id",
"table_names",
"schema_context",
}
assert set(LLMSQLQueryOperator.template_fields) == expected
@patch("airflow.providers.common.ai.operators.llm.PydanticAIHook", autospec=True)
def test_execute_with_schema_context(self, mock_hook_cls):
"""Operator uses schema_context and returns generated SQL."""
mock_agent = _make_mock_agent("SELECT id, name FROM users WHERE active = true")
mock_hook_cls.return_value.create_agent.return_value = mock_agent
op = LLMSQLQueryOperator(
task_id="test",
prompt="Get active users",
llm_conn_id="my_llm",
schema_context="Table: users\nColumns: id INT, name TEXT, active BOOLEAN",
)
result = op.execute(context=MagicMock())
assert result == "SELECT id, name FROM users WHERE active = true"
mock_agent.run_sync.assert_called_once_with("Get active users")
@patch("airflow.providers.common.ai.operators.llm.PydanticAIHook", autospec=True)
def test_execute_validation_blocks_unsafe_sql(self, mock_hook_cls):
"""Validation catches unsafe SQL generated by the LLM."""
mock_hook_cls.return_value.create_agent.return_value = _make_mock_agent("DROP TABLE users")
op = LLMSQLQueryOperator(task_id="test", prompt="Delete everything", llm_conn_id="my_llm")
with pytest.raises(SQLSafetyError, match="not allowed"):
op.execute(context=MagicMock())
@patch("airflow.providers.common.ai.operators.llm.PydanticAIHook", autospec=True)
def test_execute_validation_disabled(self, mock_hook_cls):
"""When validate_sql=False, unsafe SQL is returned without checks."""
mock_hook_cls.return_value.create_agent.return_value = _make_mock_agent("DROP TABLE users")
op = LLMSQLQueryOperator(task_id="test", prompt="Drop it", llm_conn_id="my_llm", validate_sql=False)
result = op.execute(context=MagicMock())
assert result == "DROP TABLE users"
@patch("airflow.providers.common.ai.operators.llm.PydanticAIHook", autospec=True)
def test_execute_passes_agent_params(self, mock_hook_cls):
"""agent_params inherited from LLMOperator are unpacked into create_agent."""
mock_hook_cls.return_value.create_agent.return_value = _make_mock_agent("SELECT 1")
op = LLMSQLQueryOperator(
task_id="test",
prompt="test",
llm_conn_id="my_llm",
agent_params={"retries": 3, "model_settings": {"temperature": 0}},
)
op.execute(context=MagicMock())
create_agent_call = mock_hook_cls.return_value.create_agent.call_args
assert create_agent_call[1]["retries"] == 3
assert create_agent_call[1]["model_settings"] == {"temperature": 0}
@patch("airflow.providers.common.ai.operators.llm.PydanticAIHook", autospec=True)
def test_system_prompt_appended_to_sql_instructions(self, mock_hook_cls):
"""User-provided system_prompt is appended to built-in SQL safety prompt."""
mock_hook_cls.return_value.create_agent.return_value = _make_mock_agent("SELECT 1")
op = LLMSQLQueryOperator(
task_id="test",
prompt="test",
llm_conn_id="my_llm",
system_prompt="Always use LEFT JOINs.",
)
op.execute(context=MagicMock())
instructions = mock_hook_cls.return_value.create_agent.call_args[1]["instructions"]
assert "Always use LEFT JOINs." in instructions
# Built-in SQL safety prompt should still be present
assert "Generate only SELECT queries" in instructions
assert "Never generate data modification" in instructions
class TestLLMSQLQueryOperatorSchemaIntrospection:
@patch("airflow.providers.common.ai.operators.llm.PydanticAIHook", autospec=True)
def test_introspect_schemas_via_db_hook(self, mock_hook_cls):
"""db_conn_id + table_names triggers schema introspection."""
mock_agent = _make_mock_agent("SELECT id FROM users")
mock_hook_cls.return_value.create_agent.return_value = mock_agent
mock_db_hook = MagicMock(spec=["get_table_schema", "dialect_name"])
mock_db_hook.get_table_schema.return_value = [
{"name": "id", "type": "INTEGER"},
{"name": "name", "type": "VARCHAR"},
]
mock_db_hook.dialect_name = "postgresql"
op = LLMSQLQueryOperator(
task_id="test",
prompt="Get user IDs",
llm_conn_id="my_llm",
db_conn_id="pg_default",
table_names=["users"],
)
with patch.object(type(op), "db_hook", new_callable=PropertyMock, return_value=mock_db_hook):
result = op.execute(context=MagicMock())
assert result == "SELECT id FROM users"
mock_db_hook.get_table_schema.assert_called_once_with("users")
# Verify the system prompt contains the schema info
instructions = mock_hook_cls.return_value.create_agent.call_args[1]["instructions"]
assert "users" in instructions
assert "id INTEGER" in instructions
def test_introspect_raises_when_no_tables_found(self):
"""Raise ValueError when all requested tables return empty columns."""
mock_db_hook = MagicMock(spec=["get_table_schema", "dialect_name"])
mock_db_hook.get_table_schema.return_value = []
op = LLMSQLQueryOperator(
task_id="test",
prompt="test",
llm_conn_id="my_llm",
db_conn_id="pg_default",
table_names=["nonexistent_table"],
)
with patch.object(type(op), "db_hook", new_callable=PropertyMock, return_value=mock_db_hook):
with pytest.raises(ValueError, match="None of the requested tables"):
op._introspect_schemas()
def test_schema_context_overrides_introspection(self):
"""schema_context takes priority over db_conn_id introspection."""
op = LLMSQLQueryOperator(
task_id="test",
prompt="test",
llm_conn_id="my_llm",
db_conn_id="pg_default",
table_names=["users"],
schema_context="My custom schema info",
)
assert op._get_schema_context() == "My custom schema info"
@patch(
"airflow.providers.common.ai.operators.llm_sql.DataFusionEngine",
autospec=True,
)
def test_introspect_object_storage_schema(self, mock_engine_cls):
"""_introspect_object_storage_schema registers datasource and returns schema."""
mock_engine = mock_engine_cls.return_value
schema_text = "cust_id: int64\nname: string\namount: float64"
mock_engine.get_schema.return_value = schema_text
ds_config = DataSourceConfig(
conn_id="aws_default",
table_name="sales",
uri="s3://bucket/data/",
format="parquet",
)
op = LLMSQLQueryOperator(
task_id="test",
prompt="test",
llm_conn_id="my_llm",
datasource_config=ds_config,
)
result = op._introspect_object_storage_schema()
mock_engine.register_datasource.assert_called_once_with(ds_config)
mock_engine.get_schema.assert_called_once_with("sales")
assert result == schema_text
@patch(
"airflow.providers.common.ai.operators.llm_sql.DataFusionEngine",
autospec=True,
)
def test_introspect_schemas_with_db_and_datasource_config(self, mock_engine_cls):
"""_introspect_schemas includes both db table and object storage schema."""
mock_engine = mock_engine_cls.return_value
object_schema = "col_a: int64\ncol_b: string"
mock_engine.get_schema.return_value = object_schema
ds_config = DataSourceConfig(
conn_id="aws_default",
table_name="remote_table",
uri="s3://bucket/path/",
format="csv",
)
mock_db_hook = MagicMock(spec=["get_table_schema", "dialect_name"])
mock_db_hook.get_table_schema.return_value = [
{"name": "id", "type": "INTEGER"},
]
op = LLMSQLQueryOperator(
task_id="test",
prompt="test",
llm_conn_id="my_llm",
db_conn_id="pg_default",
table_names=["local_table"],
datasource_config=ds_config,
)
with patch.object(type(op), "db_hook", new_callable=PropertyMock, return_value=mock_db_hook):
result = op._introspect_schemas()
assert "Table: local_table" in result
assert "id INTEGER" in result
assert "Table: remote_table" in result
assert object_schema in result
@patch(
"airflow.providers.common.ai.operators.llm_sql.DataFusionEngine",
autospec=True,
)
def test_introspect_schemas_datasource_config_without_db_tables(self, mock_engine_cls):
"""_introspect_schemas works when only datasource_config is provided (no db tables)."""
mock_engine = mock_engine_cls.return_value
mock_engine.get_schema.return_value = "ts: TIMESTAMP\nvalue: DOUBLE"
ds_config = DataSourceConfig(
conn_id="aws_default",
table_name="s3_data",
uri="s3://bucket/metrics/",
format="parquet",
)
op = LLMSQLQueryOperator(
task_id="test",
prompt="test",
llm_conn_id="my_llm",
db_conn_id="pg_default",
table_names=[],
datasource_config=ds_config,
)
mock_db_hook = MagicMock(spec=["get_table_schema", "dialect_name"])
mock_db_hook.get_table_schema.return_value = []
with patch.object(type(op), "db_hook", new_callable=PropertyMock, return_value=mock_db_hook):
result = op._introspect_schemas()
assert "Table: s3_data" in result
assert "ts: TIMESTAMP\nvalue: DOUBLE" in result
@patch(
"airflow.providers.common.ai.operators.llm_sql.DataFusionEngine",
autospec=True,
)
def test_introspect_schemas_raises_when_no_tables_and_no_datasource(self, mock_engine_cls):
"""ValueError is raised when no db tables return schema and no datasource_config is set."""
mock_db_hook = MagicMock(spec=["get_table_schema", "dialect_name"])
mock_db_hook.get_table_schema.return_value = []
op = LLMSQLQueryOperator(
task_id="test",
prompt="test",
llm_conn_id="my_llm",
db_conn_id="pg_default",
table_names=["missing_table"],
)
with patch.object(type(op), "db_hook", new_callable=PropertyMock, return_value=mock_db_hook):
with pytest.raises(ValueError, match="None of the requested tables"):
op._introspect_schemas()
@patch("airflow.providers.common.ai.operators.llm.PydanticAIHook", autospec=True)
@patch(
"airflow.providers.common.ai.operators.llm_sql.DataFusionEngine",
autospec=True,
)
def test_execute_with_datasource_config_and_db_tables(self, mock_engine_cls, mock_hook_cls):
"""Full execute flow with both db tables and object storage datasource."""
mock_engine = mock_engine_cls.return_value
mock_engine.get_schema.return_value = "event: TEXT\nts: TIMESTAMP"
mock_agent = _make_mock_agent("SELECT u.id, e.event FROM users u JOIN events e ON u.id = e.user_id")
mock_hook_cls.return_value.create_agent.return_value = mock_agent
ds_config = DataSourceConfig(
conn_id="aws_default",
table_name="events",
uri="s3://bucket/events/",
format="parquet",
)
mock_db_hook = MagicMock(spec=["get_table_schema", "dialect_name"])
mock_db_hook.get_table_schema.return_value = [
{"name": "id", "type": "INTEGER"},
{"name": "name", "type": "VARCHAR"},
]
mock_db_hook.dialect_name = "postgresql"
op = LLMSQLQueryOperator(
task_id="test",
prompt="Join users with events",
llm_conn_id="my_llm",
db_conn_id="pg_default",
table_names=["users"],
datasource_config=ds_config,
)
with patch.object(type(op), "db_hook", new_callable=PropertyMock, return_value=mock_db_hook):
result = op.execute(context=MagicMock())
assert "SELECT" in result
instructions = mock_hook_cls.return_value.create_agent.call_args[1]["instructions"]
assert "users" in instructions
assert "events" in instructions
assert "event: TEXT\nts: TIMESTAMP" in instructions
class TestLLMSQLQueryOperatorDialect:
def test_resolved_dialect_from_param(self):
op = LLMSQLQueryOperator(task_id="test", prompt="test", llm_conn_id="my_llm", dialect="mysql")
assert op._resolved_dialect == "mysql"
def test_resolved_dialect_from_db_hook_normalized(self):
"""SQLAlchemy's 'postgresql' is normalized to sqlglot's 'postgres'."""
mock_db_hook = MagicMock(spec=["dialect_name"])
mock_db_hook.dialect_name = "postgresql"
op = LLMSQLQueryOperator(task_id="test", prompt="test", llm_conn_id="my_llm", db_conn_id="pg_default")
with patch.object(type(op), "db_hook", new_callable=PropertyMock, return_value=mock_db_hook):
assert op._resolved_dialect == "postgres"
def test_resolved_dialect_none_when_nothing_set(self):
op = LLMSQLQueryOperator(task_id="test", prompt="test", llm_conn_id="my_llm")
assert op._resolved_dialect is None
class TestLLMSQLQueryOperatorDbHook:
@patch("airflow.providers.common.ai.operators.llm_sql.BaseHook", autospec=True)
def test_db_hook_returns_none_without_conn_id(self, mock_base_hook):
op = LLMSQLQueryOperator(task_id="test", prompt="test", llm_conn_id="my_llm")
assert op.db_hook is None
mock_base_hook.get_connection.assert_not_called()
@patch("airflow.providers.common.ai.operators.llm_sql.BaseHook", autospec=True)
def test_db_hook_raises_for_non_dbapi_hook(self, mock_base_hook):
mock_conn = MagicMock(spec=["get_hook"])
mock_conn.get_hook.return_value = MagicMock() # Not a DbApiHook
mock_base_hook.get_connection.return_value = mock_conn
op = LLMSQLQueryOperator(task_id="test", prompt="test", llm_conn_id="my_llm", db_conn_id="bad_conn")
with pytest.raises(ValueError, match="does not provide a DbApiHook"):
_ = op.db_hook
| {
"repo_id": "apache/airflow",
"file_path": "providers/common/ai/tests/unit/common/ai/operators/test_llm_sql.py",
"license": "Apache License 2.0",
"lines": 348,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/common/ai/tests/unit/common/ai/utils/test_sql_validation.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import pytest
from sqlglot import exp
from airflow.providers.common.ai.utils.sql_validation import SQLSafetyError, validate_sql
class TestValidateSQLAllowed:
"""Statements that should pass validation with default settings."""
def test_simple_select(self):
result = validate_sql("SELECT 1")
assert len(result) == 1
assert isinstance(result[0], exp.Select)
def test_select_from_table(self):
result = validate_sql("SELECT id, name FROM users WHERE active = true")
assert len(result) == 1
assert isinstance(result[0], exp.Select)
def test_select_with_join(self):
result = validate_sql("SELECT u.name, o.total FROM users u JOIN orders o ON u.id = o.user_id")
assert len(result) == 1
def test_select_with_cte(self):
result = validate_sql("WITH top_users AS (SELECT id FROM users LIMIT 10) SELECT * FROM top_users")
assert len(result) == 1
assert isinstance(result[0], exp.Select)
def test_select_with_subquery(self):
result = validate_sql("SELECT * FROM users WHERE id IN (SELECT user_id FROM orders)")
assert len(result) == 1
def test_union(self):
result = validate_sql("SELECT 1 UNION SELECT 2")
assert len(result) == 1
assert isinstance(result[0], exp.Union)
def test_union_all(self):
result = validate_sql("SELECT 1 UNION ALL SELECT 2")
assert len(result) == 1
assert isinstance(result[0], exp.Union)
def test_intersect(self):
result = validate_sql("SELECT 1 INTERSECT SELECT 1")
assert len(result) == 1
assert isinstance(result[0], exp.Intersect)
def test_except(self):
result = validate_sql("SELECT 1 EXCEPT SELECT 2")
assert len(result) == 1
assert isinstance(result[0], exp.Except)
class TestValidateSQLBlocked:
"""Statements that should be blocked with default settings."""
def test_insert_blocked(self):
with pytest.raises(SQLSafetyError, match="Insert.*not allowed"):
validate_sql("INSERT INTO users (name) VALUES ('test')")
def test_update_blocked(self):
with pytest.raises(SQLSafetyError, match="Update.*not allowed"):
validate_sql("UPDATE users SET name = 'test' WHERE id = 1")
def test_delete_blocked(self):
with pytest.raises(SQLSafetyError, match="Delete.*not allowed"):
validate_sql("DELETE FROM users WHERE id = 1")
def test_drop_blocked(self):
with pytest.raises(SQLSafetyError, match="Drop.*not allowed"):
validate_sql("DROP TABLE users")
def test_create_blocked(self):
with pytest.raises(SQLSafetyError, match="Create.*not allowed"):
validate_sql("CREATE TABLE test (id INT)")
def test_alter_blocked(self):
with pytest.raises(SQLSafetyError, match="Alter.*not allowed"):
validate_sql("ALTER TABLE users ADD COLUMN email TEXT")
def test_truncate_blocked(self):
with pytest.raises(SQLSafetyError, match="not allowed"):
validate_sql("TRUNCATE TABLE users")
class TestValidateSQLMultiStatement:
"""Multi-statement SQL should be blocked by default."""
def test_multiple_statements_blocked_by_default(self):
with pytest.raises(SQLSafetyError, match="Multiple statements detected"):
validate_sql("SELECT 1; SELECT 2")
def test_multiple_statements_allowed_when_opted_in(self):
result = validate_sql("SELECT 1; SELECT 2", allow_multiple_statements=True)
assert len(result) == 2
def test_dangerous_hidden_after_select(self):
"""Multi-statement blocks even if first statement is safe."""
with pytest.raises(SQLSafetyError, match="Multiple statements"):
validate_sql("SELECT 1; DROP TABLE users")
def test_multi_statement_still_validates_types(self):
"""Even when multi-statement is allowed, types are still checked."""
with pytest.raises(SQLSafetyError, match="Drop.*not allowed"):
validate_sql("SELECT 1; DROP TABLE users", allow_multiple_statements=True)
class TestValidateSQLEdgeCases:
"""Edge cases and error handling."""
def test_empty_string_raises(self):
with pytest.raises(SQLSafetyError, match="Empty SQL"):
validate_sql("")
def test_whitespace_only_raises(self):
with pytest.raises(SQLSafetyError, match="Empty SQL"):
validate_sql(" \n\t ")
def test_malformed_sql_raises(self):
with pytest.raises(SQLSafetyError, match="SQL parse error"):
validate_sql("NOT VALID SQL AT ALL }{][")
def test_dialect_parameter(self):
result = validate_sql("SELECT 1", dialect="postgres")
assert len(result) == 1
def test_custom_allowed_types(self):
"""Allow INSERT when explicitly opted in."""
result = validate_sql(
"INSERT INTO users (name) VALUES ('test')",
allowed_types=(exp.Insert,),
)
assert len(result) == 1
def test_custom_allowed_types_still_blocks_others(self):
"""Custom types don't allow everything."""
with pytest.raises(SQLSafetyError, match="Select.*not allowed"):
validate_sql("SELECT 1", allowed_types=(exp.Insert,))
def test_select_with_trailing_semicolon(self):
"""Trailing semicolon should not cause multi-statement error."""
result = validate_sql("SELECT 1;")
assert len(result) == 1
| {
"repo_id": "apache/airflow",
"file_path": "providers/common/ai/tests/unit/common/ai/utils/test_sql_validation.py",
"license": "Apache License 2.0",
"lines": 124,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/common/ai/src/airflow/providers/common/ai/decorators/llm.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
TaskFlow decorator for general-purpose LLM calls.
The user writes a function that **returns the prompt string**. The decorator
handles hook creation, agent configuration, LLM call, and output serialization.
When ``output_type`` is a Pydantic ``BaseModel``, the result is serialized via
``model_dump()`` for XCom.
"""
from __future__ import annotations
from collections.abc import Callable, Collection, Mapping, Sequence
from typing import TYPE_CHECKING, Any, ClassVar
from airflow.providers.common.ai.operators.llm import LLMOperator
from airflow.providers.common.compat.sdk import (
DecoratedOperator,
TaskDecorator,
context_merge,
task_decorator_factory,
)
from airflow.sdk.definitions._internal.types import SET_DURING_EXECUTION
from airflow.utils.operator_helpers import determine_kwargs
if TYPE_CHECKING:
from airflow.sdk import Context
class _LLMDecoratedOperator(DecoratedOperator, LLMOperator):
"""
Wraps a callable that returns a prompt for a general-purpose LLM call.
The user function is called at execution time to produce the prompt string.
All other parameters (``llm_conn_id``, ``model_id``, ``system_prompt``, etc.)
are passed through to :class:`~airflow.providers.common.ai.operators.llm.LLMOperator`.
:param python_callable: A reference to a callable that returns the prompt string.
:param op_args: Positional arguments for the callable.
:param op_kwargs: Keyword arguments for the callable.
"""
template_fields: Sequence[str] = (
*DecoratedOperator.template_fields,
*LLMOperator.template_fields,
)
template_fields_renderers: ClassVar[dict[str, str]] = {
**DecoratedOperator.template_fields_renderers,
}
custom_operator_name: str = "@task.llm"
def __init__(
self,
*,
python_callable: Callable,
op_args: Collection[Any] | None = None,
op_kwargs: Mapping[str, Any] | None = None,
**kwargs,
) -> None:
super().__init__(
python_callable=python_callable,
op_args=op_args,
op_kwargs=op_kwargs,
prompt=SET_DURING_EXECUTION,
**kwargs,
)
def execute(self, context: Context) -> Any:
context_merge(context, self.op_kwargs)
kwargs = determine_kwargs(self.python_callable, self.op_args, context)
self.prompt = self.python_callable(*self.op_args, **kwargs)
if not isinstance(self.prompt, str) or not self.prompt.strip():
raise TypeError("The returned value from the @task.llm callable must be a non-empty string.")
self.render_template_fields(context)
return LLMOperator.execute(self, context)
def llm_task(
python_callable: Callable | None = None,
**kwargs,
) -> TaskDecorator:
"""
Wrap a function that returns a prompt into a general-purpose LLM task.
The function body constructs the prompt (can use Airflow context, XCom, etc.).
The decorator handles hook creation, agent configuration, LLM call, and output
serialization.
Usage::
@task.llm(
llm_conn_id="openai_default",
system_prompt="Summarize concisely.",
)
def summarize(text: str):
return f"Summarize this article: {text}"
With structured output::
@task.llm(
llm_conn_id="openai_default",
system_prompt="Extract named entities.",
output_type=Entities,
)
def extract(text: str):
return f"Extract entities from: {text}"
:param python_callable: Function to decorate.
"""
return task_decorator_factory(
python_callable=python_callable,
decorated_operator_class=_LLMDecoratedOperator,
**kwargs,
)
| {
"repo_id": "apache/airflow",
"file_path": "providers/common/ai/src/airflow/providers/common/ai/decorators/llm.py",
"license": "Apache License 2.0",
"lines": 109,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/common/ai/src/airflow/providers/common/ai/example_dags/example_llm.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Example DAGs demonstrating LLMOperator and @task.llm usage."""
from __future__ import annotations
from pydantic import BaseModel
from airflow.providers.common.ai.operators.llm import LLMOperator
from airflow.providers.common.compat.sdk import dag, task
# [START howto_operator_llm_basic]
@dag
def example_llm_operator():
LLMOperator(
task_id="summarize",
prompt="Summarize the key findings from the Q4 earnings report.",
llm_conn_id="pydantic_ai_default",
system_prompt="You are a financial analyst. Be concise.",
)
# [END howto_operator_llm_basic]
example_llm_operator()
# [START howto_operator_llm_structured]
@dag
def example_llm_operator_structured():
class Entities(BaseModel):
names: list[str]
locations: list[str]
LLMOperator(
task_id="extract_entities",
prompt="Extract all named entities from the article.",
llm_conn_id="pydantic_ai_default",
system_prompt="Extract named entities.",
output_type=Entities,
)
# [END howto_operator_llm_structured]
example_llm_operator_structured()
# [START howto_operator_llm_agent_params]
@dag
def example_llm_operator_agent_params():
LLMOperator(
task_id="creative_writing",
prompt="Write a haiku about data pipelines.",
llm_conn_id="pydantic_ai_default",
system_prompt="You are a creative writer.",
agent_params={"model_settings": {"temperature": 0.9}, "retries": 3},
)
# [END howto_operator_llm_agent_params]
example_llm_operator_agent_params()
# [START howto_decorator_llm]
@dag
def example_llm_decorator():
@task.llm(llm_conn_id="pydantic_ai_default", system_prompt="Summarize concisely.")
def summarize(text: str):
return f"Summarize this article: {text}"
summarize("Apache Airflow is a platform for programmatically authoring...")
# [END howto_decorator_llm]
example_llm_decorator()
# [START howto_decorator_llm_structured]
@dag
def example_llm_decorator_structured():
class Entities(BaseModel):
names: list[str]
locations: list[str]
@task.llm(
llm_conn_id="pydantic_ai_default",
system_prompt="Extract named entities.",
output_type=Entities,
)
def extract(text: str):
return f"Extract entities from: {text}"
extract("Alice visited Paris and met Bob in London.")
# [END howto_decorator_llm_structured]
example_llm_decorator_structured()
| {
"repo_id": "apache/airflow",
"file_path": "providers/common/ai/src/airflow/providers/common/ai/example_dags/example_llm.py",
"license": "Apache License 2.0",
"lines": 84,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/common/ai/src/airflow/providers/common/ai/example_dags/example_llm_analysis_pipeline.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Example DAG: triage support tickets with @task.llm, structured output, and dynamic task mapping."""
from __future__ import annotations
from pydantic import BaseModel
from airflow.providers.common.compat.sdk import dag, task
# [START howto_decorator_llm_pipeline]
@dag
def example_llm_analysis_pipeline():
class TicketAnalysis(BaseModel):
priority: str
category: str
summary: str
suggested_action: str
@task
def get_support_tickets():
"""Fetch unprocessed support tickets."""
return [
(
"Our nightly ETL pipeline has been failing for the past 3 days. "
"The error shows a connection timeout to the Postgres source database. "
"This is blocking our daily financial reports."
),
(
"We'd like to add a new connection type for our internal ML model registry. "
"Is there documentation on creating custom hooks?"
),
(
"After upgrading to the latest version, the Grid view takes over "
"30 seconds to load for DAGs with more than 500 tasks. "
"Previously it loaded in under 5 seconds."
),
]
@task.llm(
llm_conn_id="pydantic_ai_default",
system_prompt=(
"Analyze the support ticket and extract: "
"priority (critical/high/medium/low), "
"category (bug/feature_request/question/performance), "
"a one-sentence summary, and a suggested next action."
),
output_type=TicketAnalysis,
)
def analyze_ticket(ticket: str):
return f"Analyze this support ticket:\n\n{ticket}"
@task
def store_results(analyses: list[dict]):
"""Store ticket analyses. In production, this would write to a database or ticketing system."""
for analysis in analyses:
print(f"[{analysis['priority'].upper()}] {analysis['category']}: {analysis['summary']}")
tickets = get_support_tickets()
analyses = analyze_ticket.expand(ticket=tickets)
store_results(analyses)
# [END howto_decorator_llm_pipeline]
example_llm_analysis_pipeline()
| {
"repo_id": "apache/airflow",
"file_path": "providers/common/ai/src/airflow/providers/common/ai/example_dags/example_llm_analysis_pipeline.py",
"license": "Apache License 2.0",
"lines": 69,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/common/ai/src/airflow/providers/common/ai/example_dags/example_llm_classification.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Example DAG: classify pipeline incidents by severity using @task.llm with Literal output."""
from __future__ import annotations
from typing import Literal
from airflow.providers.common.compat.sdk import dag, task
# [START howto_decorator_llm_classification]
@dag
def example_llm_classification():
@task.llm(
llm_conn_id="pydantic_ai_default",
system_prompt=(
"Classify the severity of the given pipeline incident. "
"Use 'critical' for data loss or complete pipeline failure, "
"'high' for significant delays or partial failures, "
"'medium' for degraded performance, "
"'low' for cosmetic issues or minor warnings."
),
output_type=Literal["critical", "high", "medium", "low"],
)
def classify_incident(description: str):
# Pre-process the description before sending to the LLM
return f"Classify this incident:\n{description.strip()}"
classify_incident(
"Scheduler heartbeat lost for 15 minutes. "
"Multiple DAG runs stuck in queued state. "
"No new tasks being scheduled across all DAGs."
)
# [END howto_decorator_llm_classification]
example_llm_classification()
| {
"repo_id": "apache/airflow",
"file_path": "providers/common/ai/src/airflow/providers/common/ai/example_dags/example_llm_classification.py",
"license": "Apache License 2.0",
"lines": 44,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/common/ai/src/airflow/providers/common/ai/operators/llm.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Operator for general-purpose LLM calls."""
from __future__ import annotations
from collections.abc import Sequence
from functools import cached_property
from typing import TYPE_CHECKING, Any
from pydantic import BaseModel
from airflow.providers.common.ai.hooks.pydantic_ai import PydanticAIHook
from airflow.providers.common.compat.sdk import BaseOperator
if TYPE_CHECKING:
from pydantic_ai import Agent
from airflow.sdk import Context
class LLMOperator(BaseOperator):
"""
Call an LLM with a prompt and return the output.
Uses a :class:`~airflow.providers.common.ai.hooks.pydantic_ai.PydanticAIHook`
for LLM access. Supports plain string output (default) and structured output
via a Pydantic ``BaseModel``. When ``output_type`` is a ``BaseModel`` subclass,
the result is serialized via ``model_dump()`` for XCom.
:param prompt: The prompt to send to the LLM.
:param llm_conn_id: Connection ID for the LLM provider.
:param model_id: Model identifier (e.g. ``"openai:gpt-5"``).
Overrides the model stored in the connection's extra field.
:param system_prompt: System-level instructions for the LLM agent.
:param output_type: Expected output type. Default ``str``. Set to a Pydantic
``BaseModel`` subclass for structured output.
:param agent_params: Additional keyword arguments passed to the pydantic-ai
``Agent`` constructor (e.g. ``retries``, ``model_settings``, ``tools``).
See `pydantic-ai Agent docs <https://ai.pydantic.dev/api/agent/>`__
for the full list.
"""
template_fields: Sequence[str] = (
"prompt",
"llm_conn_id",
"model_id",
"system_prompt",
"agent_params",
)
def __init__(
self,
*,
prompt: str,
llm_conn_id: str,
model_id: str | None = None,
system_prompt: str = "",
output_type: type = str,
agent_params: dict[str, Any] | None = None,
**kwargs: Any,
) -> None:
super().__init__(**kwargs)
self.prompt = prompt
self.llm_conn_id = llm_conn_id
self.model_id = model_id
self.system_prompt = system_prompt
self.output_type = output_type
self.agent_params = agent_params or {}
@cached_property
def llm_hook(self) -> PydanticAIHook:
"""Return PydanticAIHook for the configured LLM connection."""
return PydanticAIHook(llm_conn_id=self.llm_conn_id, model_id=self.model_id)
def execute(self, context: Context) -> Any:
agent: Agent[None, Any] = self.llm_hook.create_agent(
output_type=self.output_type, instructions=self.system_prompt, **self.agent_params
)
result = agent.run_sync(self.prompt)
output = result.output
if isinstance(output, BaseModel):
return output.model_dump()
return output
| {
"repo_id": "apache/airflow",
"file_path": "providers/common/ai/src/airflow/providers/common/ai/operators/llm.py",
"license": "Apache License 2.0",
"lines": 84,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/common/ai/tests/unit/common/ai/decorators/test_llm.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from unittest.mock import MagicMock, patch
import pytest
from airflow.providers.common.ai.decorators.llm import _LLMDecoratedOperator
class TestLLMDecoratedOperator:
def test_custom_operator_name(self):
assert _LLMDecoratedOperator.custom_operator_name == "@task.llm"
@patch("airflow.providers.common.ai.operators.llm.PydanticAIHook", autospec=True)
def test_execute_calls_callable_and_returns_output(self, mock_hook_cls):
"""The callable's return value becomes the LLM prompt."""
mock_agent = MagicMock(spec=["run_sync"])
mock_result = MagicMock(spec=["output"])
mock_result.output = "This is a summary."
mock_agent.run_sync.return_value = mock_result
mock_hook_cls.return_value.create_agent.return_value = mock_agent
def my_prompt():
return "Summarize this text"
op = _LLMDecoratedOperator(task_id="test", python_callable=my_prompt, llm_conn_id="my_llm")
result = op.execute(context={})
assert result == "This is a summary."
assert op.prompt == "Summarize this text"
mock_agent.run_sync.assert_called_once_with("Summarize this text")
@pytest.mark.parametrize(
"return_value",
[42, "", " ", None],
ids=["non-string", "empty", "whitespace", "none"],
)
def test_execute_raises_on_invalid_prompt(self, return_value):
"""TypeError when the callable returns a non-string or blank string."""
op = _LLMDecoratedOperator(
task_id="test",
python_callable=lambda: return_value,
llm_conn_id="my_llm",
)
with pytest.raises(TypeError, match="non-empty string"):
op.execute(context={})
@patch("airflow.providers.common.ai.operators.llm.PydanticAIHook", autospec=True)
def test_execute_merges_op_kwargs_into_callable(self, mock_hook_cls):
"""op_kwargs are resolved by the callable to build the prompt."""
mock_agent = MagicMock(spec=["run_sync"])
mock_result = MagicMock(spec=["output"])
mock_result.output = "done"
mock_agent.run_sync.return_value = mock_result
mock_hook_cls.return_value.create_agent.return_value = mock_agent
def my_prompt(topic):
return f"Summarize {topic}"
op = _LLMDecoratedOperator(
task_id="test",
python_callable=my_prompt,
llm_conn_id="my_llm",
op_kwargs={"topic": "quantum computing"},
)
op.execute(context={"task_instance": MagicMock()})
assert op.prompt == "Summarize quantum computing"
mock_agent.run_sync.assert_called_once_with("Summarize quantum computing")
| {
"repo_id": "apache/airflow",
"file_path": "providers/common/ai/tests/unit/common/ai/decorators/test_llm.py",
"license": "Apache License 2.0",
"lines": 71,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/common/ai/tests/unit/common/ai/operators/test_llm.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from unittest.mock import MagicMock, patch
from pydantic import BaseModel
from airflow.providers.common.ai.operators.llm import LLMOperator
class TestLLMOperator:
def test_template_fields(self):
expected = {"prompt", "llm_conn_id", "model_id", "system_prompt", "agent_params"}
assert set(LLMOperator.template_fields) == expected
@patch("airflow.providers.common.ai.operators.llm.PydanticAIHook", autospec=True)
def test_execute_returns_string_output(self, mock_hook_cls):
"""Default output_type=str returns the LLM string directly."""
mock_agent = MagicMock(spec=["run_sync"])
mock_result = MagicMock(spec=["output"])
mock_result.output = "Paris is the capital of France."
mock_agent.run_sync.return_value = mock_result
mock_hook_cls.return_value.create_agent.return_value = mock_agent
op = LLMOperator(task_id="test", prompt="What is the capital of France?", llm_conn_id="my_llm")
result = op.execute(context=MagicMock())
assert result == "Paris is the capital of France."
mock_agent.run_sync.assert_called_once_with("What is the capital of France?")
mock_hook_cls.return_value.create_agent.assert_called_once_with(output_type=str, instructions="")
mock_hook_cls.assert_called_once_with(llm_conn_id="my_llm", model_id=None)
@patch("airflow.providers.common.ai.operators.llm.PydanticAIHook", autospec=True)
def test_execute_structured_output_with_all_params(self, mock_hook_cls):
"""Structured output via model_dump(), with model_id, system_prompt, and agent_params."""
class Entities(BaseModel):
names: list[str]
mock_agent = MagicMock(spec=["run_sync"])
mock_result = MagicMock(spec=["output"])
mock_result.output = Entities(names=["Alice", "Bob"])
mock_agent.run_sync.return_value = mock_result
mock_hook_cls.return_value.create_agent.return_value = mock_agent
op = LLMOperator(
task_id="test",
prompt="Extract entities",
llm_conn_id="my_llm",
model_id="openai:gpt-5",
system_prompt="You are an extractor.",
output_type=Entities,
agent_params={"retries": 3, "model_settings": {"temperature": 0.9}},
)
result = op.execute(context=MagicMock())
assert result == {"names": ["Alice", "Bob"]}
mock_hook_cls.assert_called_once_with(llm_conn_id="my_llm", model_id="openai:gpt-5")
mock_hook_cls.return_value.create_agent.assert_called_once_with(
output_type=Entities,
instructions="You are an extractor.",
retries=3,
model_settings={"temperature": 0.9},
)
| {
"repo_id": "apache/airflow",
"file_path": "providers/common/ai/tests/unit/common/ai/operators/test_llm.py",
"license": "Apache License 2.0",
"lines": 66,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/common/sql/src/airflow/providers/common/sql/config.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from dataclasses import dataclass, field
from enum import Enum
from typing import Any
@dataclass(frozen=True)
class ConnectionConfig:
"""Configuration for datafusion object store connections."""
conn_id: str
credentials: dict[str, Any] = field(default_factory=dict)
extra_config: dict[str, Any] = field(default_factory=dict)
class FormatType(str, Enum):
"""Supported data formats."""
PARQUET = "parquet"
CSV = "csv"
AVRO = "avro"
class StorageType(str, Enum):
"""Storage types for Data Fusion."""
S3 = "s3"
LOCAL = "local"
@dataclass
class DataSourceConfig:
"""
Configuration for an input data source.
:param conn_id: The connection ID to use for accessing the data source.
:param uri: The URI of the data source (e.g., file path, S3 bucket, etc.).
:param format: The format of the data (e.g., 'parquet', 'csv').
:param table_name: The name of the table if applicable.
:param schema: A dictionary mapping column names to their types.
:param db_name: The database name if applicable.
:param storage_type: The type of storage (automatically inferred from URI).
:param options: Additional options for the data source. eg: you can set partition columns to any datasource
that will be set in while registering the data
"""
conn_id: str
uri: str
format: str | None = None
table_name: str | None = None
storage_type: StorageType | None = None
options: dict[str, Any] = field(default_factory=dict)
def __post_init__(self):
if self.storage_type is None:
self.storage_type = self._extract_storage_type
if self.storage_type is not None and self.table_name is None:
raise ValueError("Table name must be provided for storage type")
@property
def _extract_storage_type(self) -> StorageType | None:
"""Extract storage type."""
if self.uri.startswith("s3://"):
return StorageType.S3
if self.uri.startswith("file://"):
return StorageType.LOCAL
raise ValueError(f"Unsupported storage type for URI: {self.uri}")
| {
"repo_id": "apache/airflow",
"file_path": "providers/common/sql/src/airflow/providers/common/sql/config.py",
"license": "Apache License 2.0",
"lines": 68,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/common/sql/src/airflow/providers/common/sql/datafusion/base.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from abc import ABC, abstractmethod
from typing import TYPE_CHECKING, Any
from airflow.utils.log.logging_mixin import LoggingMixin
if TYPE_CHECKING:
from datafusion import SessionContext
from airflow.providers.common.sql.config import ConnectionConfig, FormatType, StorageType
class ObjectStorageProvider(LoggingMixin, ABC):
"""Abstract base class for object storage providers."""
@property
def get_storage_type(self) -> StorageType:
"""Return storage type handled by this provider (e.g., 's3', 'gcs', 'local')."""
raise NotImplementedError
@abstractmethod
def create_object_store(self, path: str, connection_config: ConnectionConfig | None = None) -> Any:
"""Create and return a DataFusion object store instance."""
raise NotImplementedError
@abstractmethod
def get_scheme(self) -> str:
"""Return URL scheme for this storage type (e.g., 's3://', 'gs://')."""
raise NotImplementedError
def get_bucket(self, path: str) -> str | None:
"""Extract the bucket name from the given path."""
if path and path.startswith(self.get_scheme()):
path_parts = path[len(self.get_scheme()) :].split("/", 1)
return path_parts[0]
return None
class FormatHandler(LoggingMixin, ABC):
"""Abstract base class for format handlers."""
@property
def get_format(self) -> FormatType:
"""Return file format type."""
raise NotImplementedError
@abstractmethod
def register_data_source_format(self, ctx: SessionContext, table_name: str, path: str) -> None:
"""Register data source format."""
raise NotImplementedError
| {
"repo_id": "apache/airflow",
"file_path": "providers/common/sql/src/airflow/providers/common/sql/datafusion/base.py",
"license": "Apache License 2.0",
"lines": 53,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/common/sql/src/airflow/providers/common/sql/datafusion/engine.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING, Any
from datafusion import SessionContext
from airflow.providers.common.compat.sdk import BaseHook, Connection
from airflow.providers.common.sql.config import ConnectionConfig, DataSourceConfig, StorageType
from airflow.providers.common.sql.datafusion.exceptions import (
ObjectStoreCreationException,
QueryExecutionException,
)
from airflow.providers.common.sql.datafusion.format_handlers import get_format_handler
from airflow.providers.common.sql.datafusion.object_storage_provider import get_object_storage_provider
from airflow.utils.log.logging_mixin import LoggingMixin
class DataFusionEngine(LoggingMixin):
"""Apache DataFusion engine."""
def __init__(self):
super().__init__()
# TODO: session context has additional parameters via SessionConfig see what's possible we can use Possible via DataFusionHook ?
self.df_ctx = SessionContext()
self.registered_tables: dict[str, str] = {}
@property
def session_context(self) -> SessionContext:
"""Return the session context."""
return self.df_ctx
def register_datasource(self, datasource_config: DataSourceConfig):
"""Register a datasource with the datafusion engine."""
if not isinstance(datasource_config, DataSourceConfig):
raise ValueError("datasource_config must be of type DataSourceConfig")
if datasource_config.storage_type == StorageType.LOCAL:
connection_config = None
else:
connection_config = self._get_connection_config(datasource_config.conn_id)
self._register_object_store(datasource_config, connection_config)
self._register_data_source_format(datasource_config)
def _register_object_store(
self, datasource_config: DataSourceConfig, connection_config: ConnectionConfig | None
):
"""Register object stores."""
if TYPE_CHECKING:
assert datasource_config.storage_type is not None
try:
storage_provider = get_object_storage_provider(datasource_config.storage_type)
object_store = storage_provider.create_object_store(
datasource_config.uri, connection_config=connection_config
)
schema = storage_provider.get_scheme()
self.session_context.register_object_store(schema=schema, store=object_store)
self.log.info("Registered object store for schema: %s", schema)
except Exception as e:
raise ObjectStoreCreationException(
f"Error while creating object store for {datasource_config.storage_type}: {e}"
)
def _register_data_source_format(self, datasource_config: DataSourceConfig):
"""Register data source format."""
if TYPE_CHECKING:
assert datasource_config.table_name is not None
assert datasource_config.format is not None
if datasource_config.table_name in self.registered_tables:
raise ValueError(
f"Table {datasource_config.table_name} already registered for {self.registered_tables[datasource_config.table_name]}, please choose different name"
)
format_cls = get_format_handler(datasource_config.format, datasource_config.options)
format_cls.register_data_source_format(
self.session_context, datasource_config.table_name, datasource_config.uri
)
self.registered_tables[datasource_config.table_name] = datasource_config.uri
self.log.info(
"Registered data source format %s for table: %s",
datasource_config.format,
datasource_config.table_name,
)
def execute_query(self, query: str) -> dict[str, list[Any]]:
"""Execute a query and return the result as a dictionary."""
try:
self.log.info("Executing query: %s", query)
df = self.session_context.sql(query)
return df.to_pydict()
except Exception as e:
raise QueryExecutionException(f"Error while executing query: {e}")
def _get_connection_config(self, conn_id: str) -> ConnectionConfig:
airflow_conn = BaseHook.get_connection(conn_id)
credentials, extra_config = self._get_credentials(airflow_conn)
return ConnectionConfig(
conn_id=airflow_conn.conn_id,
credentials=credentials,
extra_config=extra_config,
)
def _get_credentials(self, conn: Connection) -> tuple[dict[str, Any], dict[str, Any]]:
credentials = {}
extra_config = {}
def _fetch_extra_configs(keys: list[str]) -> dict[str, Any]:
conf = {}
extra_dejson = conn.extra_dejson
for key in keys:
if key in extra_dejson:
conf[key] = conn.extra_dejson[key]
return conf
match conn.conn_type:
case "aws":
try:
from airflow.providers.amazon.aws.hooks.base_aws import AwsGenericHook
except ImportError:
from airflow.providers.common.compat.sdk import AirflowOptionalProviderFeatureException
raise AirflowOptionalProviderFeatureException(
"Failed to import AwsGenericHook. To use the S3 storage functionality, please install the "
"apache-airflow-providers-amazon package."
)
aws_hook: AwsGenericHook = AwsGenericHook(aws_conn_id=conn.conn_id, client_type="s3")
creds = aws_hook.get_credentials()
credentials.update(
{
"access_key_id": conn.login or creds.access_key,
"secret_access_key": conn.password or creds.secret_key,
"session_token": creds.token if creds.token else None,
}
)
credentials = self._remove_none_values(credentials)
extra_config = _fetch_extra_configs(["region", "endpoint"])
case _:
raise ValueError(f"Unknown connection type {conn.conn_type}")
return credentials, extra_config
@staticmethod
def _remove_none_values(params: dict[str, Any]) -> dict[str, Any]:
"""Filter out None values from the dictionary."""
return {k: v for k, v in params.items() if v is not None}
def get_schema(self, table_name: str):
"""Get the schema of a table."""
schema = str(self.session_context.table(table_name).schema())
return schema
| {
"repo_id": "apache/airflow",
"file_path": "providers/common/sql/src/airflow/providers/common/sql/datafusion/engine.py",
"license": "Apache License 2.0",
"lines": 144,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/common/sql/src/airflow/providers/common/sql/datafusion/exceptions.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from airflow.providers.common.compat.sdk import AirflowException
class ObjectStoreCreationException(AirflowException):
"""Error while creating a DataFusion object store."""
class FileFormatRegistrationException(AirflowException):
"""Error while registering file format."""
class QueryExecutionException(AirflowException):
"""Error while executing query."""
| {
"repo_id": "apache/airflow",
"file_path": "providers/common/sql/src/airflow/providers/common/sql/datafusion/exceptions.py",
"license": "Apache License 2.0",
"lines": 24,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/common/sql/src/airflow/providers/common/sql/datafusion/format_handlers.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING, Any
from airflow.providers.common.sql.config import FormatType
from airflow.providers.common.sql.datafusion.base import FormatHandler
from airflow.providers.common.sql.datafusion.exceptions import FileFormatRegistrationException
if TYPE_CHECKING:
from datafusion import SessionContext
class ParquetFormatHandler(FormatHandler):
"""
Parquet format handler.
:param options: Additional options for the Parquet format.
https://datafusion.apache.org/python/autoapi/datafusion/context/index.html#datafusion.context.SessionContext.register_parquet
"""
def __init__(self, options: dict[str, Any] | None = None):
self.options = options or {}
@property
def get_format(self) -> FormatType:
"""Return the format type."""
return FormatType.PARQUET
def register_data_source_format(self, ctx: SessionContext, table_name: str, path: str):
"""Register a data source format."""
try:
ctx.register_parquet(table_name, path, **self.options)
except Exception as e:
raise FileFormatRegistrationException(f"Failed to register Parquet data source: {e}")
class CsvFormatHandler(FormatHandler):
"""
CSV format handler.
:param options: Additional options for the CSV format.
https://datafusion.apache.org/python/autoapi/datafusion/context/index.html#datafusion.context.SessionContext.register_csv
"""
def __init__(self, options: dict[str, Any] | None = None):
self.options = options or {}
@property
def get_format(self) -> FormatType:
"""Return the format type."""
return FormatType.CSV
def register_data_source_format(self, ctx: SessionContext, table_name: str, path: str):
"""Register a data source format."""
try:
ctx.register_csv(table_name, path, **self.options)
except Exception as e:
raise FileFormatRegistrationException(f"Failed to register csv data source: {e}")
class AvroFormatHandler(FormatHandler):
"""
Avro format handler.
:param options: Additional options for the Avro format.
https://datafusion.apache.org/python/autoapi/datafusion/context/index.html#datafusion.context.SessionContext.register_avro
"""
def __init__(self, options: dict[str, Any] | None = None):
self.options = options or {}
@property
def get_format(self) -> FormatType:
"""Return the format type."""
return FormatType.AVRO
def register_data_source_format(self, ctx: SessionContext, table_name: str, path: str) -> None:
"""Register a data source format."""
try:
ctx.register_avro(table_name, path, **self.options)
except Exception as e:
raise FileFormatRegistrationException(f"Failed to register Avro data source: {e}")
def get_format_handler(format_type: str, options: dict[str, Any] | None = None) -> FormatHandler:
"""Get a format handler based on the format type."""
format_type = format_type.lower()
match format_type:
case "parquet":
return ParquetFormatHandler(options)
case "csv":
return CsvFormatHandler(options)
case "avro":
return AvroFormatHandler(options)
case _:
raise ValueError(f"Unsupported format: {format_type}")
| {
"repo_id": "apache/airflow",
"file_path": "providers/common/sql/src/airflow/providers/common/sql/datafusion/format_handlers.py",
"license": "Apache License 2.0",
"lines": 89,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/common/sql/src/airflow/providers/common/sql/datafusion/object_storage_provider.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from datafusion.object_store import AmazonS3, LocalFileSystem
from airflow.providers.common.sql.config import ConnectionConfig, StorageType
from airflow.providers.common.sql.datafusion.base import ObjectStorageProvider
from airflow.providers.common.sql.datafusion.exceptions import ObjectStoreCreationException
class S3ObjectStorageProvider(ObjectStorageProvider):
"""S3 Object Storage Provider using DataFusion's AmazonS3."""
@property
def get_storage_type(self) -> StorageType:
"""Return the storage type."""
return StorageType.S3
def create_object_store(self, path: str, connection_config: ConnectionConfig | None = None):
"""Create an S3 object store using DataFusion's AmazonS3."""
if connection_config is None:
raise ValueError("connection_config must be provided for %s", self.get_storage_type)
try:
credentials = connection_config.credentials
bucket = self.get_bucket(path)
s3_store = AmazonS3(**credentials, **connection_config.extra_config, bucket_name=bucket)
self.log.info("Created S3 object store for bucket %s", bucket)
return s3_store
except Exception as e:
raise ObjectStoreCreationException(f"Failed to create S3 object store: {e}")
def get_scheme(self) -> str:
"""Return the scheme for S3."""
return "s3://"
class LocalObjectStorageProvider(ObjectStorageProvider):
"""Local Object Storage Provider using DataFusion's LocalFileSystem."""
@property
def get_storage_type(self) -> StorageType:
"""Return the storage type."""
return StorageType.LOCAL
def create_object_store(self, path: str, connection_config: ConnectionConfig | None = None):
"""Create a Local object store."""
return LocalFileSystem()
def get_scheme(self) -> str:
"""Return the scheme to a Local file system."""
return "file://"
def get_object_storage_provider(storage_type: StorageType) -> ObjectStorageProvider:
"""Get an object storage provider based on the storage type."""
# TODO: Add support for GCS, Azure, HTTP: https://datafusion.apache.org/python/autoapi/datafusion/object_store/index.html
providers: dict[StorageType, type] = {
StorageType.S3: S3ObjectStorageProvider,
StorageType.LOCAL: LocalObjectStorageProvider,
}
if storage_type not in providers:
raise ValueError(
f"Unsupported storage type: {storage_type}. Supported types: {list(providers.keys())}"
)
provider_class = providers[storage_type]
return provider_class()
| {
"repo_id": "apache/airflow",
"file_path": "providers/common/sql/src/airflow/providers/common/sql/datafusion/object_storage_provider.py",
"license": "Apache License 2.0",
"lines": 67,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/common/sql/src/airflow/providers/common/sql/example_dags/example_analytics.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import datetime
from airflow.providers.common.sql.config import DataSourceConfig
from airflow.providers.common.sql.operators.analytics import AnalyticsOperator
from airflow.sdk import DAG, task
datasource_config_s3 = DataSourceConfig(
conn_id="aws_default", table_name="users_data", uri="s3://bucket/path/", format="parquet"
)
datasource_config_local = DataSourceConfig(
conn_id="", table_name="users_data", uri="file:///path/to/", format="parquet"
)
# Please replace uri with appropriate value
with DAG(
dag_id="example_analytics",
schedule=datetime.timedelta(hours=4),
start_date=datetime.datetime(2021, 1, 1),
catchup=False,
tags=["analytics", "common-sql"],
) as dag:
# [START howto_analytics_operator_with_s3]
analytics_with_s3 = AnalyticsOperator(
task_id="analytics_with_s3",
datasource_configs=[datasource_config_s3],
queries=["SELECT * FROM users_data", "SELECT count(*) FROM users_data"],
)
# [END howto_analytics_operator_with_s3]
# [START howto_analytics_operator_with_local]
analytics_with_local = AnalyticsOperator(
task_id="analytics_with_local",
datasource_configs=[datasource_config_local],
queries=["SELECT * FROM users_data", "SELECT count(*) FROM users_data"],
)
analytics_with_s3 >> analytics_with_local
# [END howto_analytics_operator_with_local]
# [START howto_analytics_decorator]
@task.analytics(datasource_configs=[datasource_config_s3])
def get_user_summary_queries():
return ["SELECT * FROM users_data LIMIT 10", "SELECT count(*) FROM users_data"]
# [END howto_analytics_decorator]
analytics_with_local >> get_user_summary_queries()
| {
"repo_id": "apache/airflow",
"file_path": "providers/common/sql/src/airflow/providers/common/sql/example_dags/example_analytics.py",
"license": "Apache License 2.0",
"lines": 56,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/common/sql/src/airflow/providers/common/sql/operators/analytics.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import json
from collections.abc import Sequence
from functools import cached_property
from typing import TYPE_CHECKING, Any, Literal
from airflow.providers.common.compat.sdk import BaseOperator, Context
from airflow.providers.common.sql.datafusion.engine import DataFusionEngine
if TYPE_CHECKING:
from airflow.providers.common.sql.config import DataSourceConfig
class AnalyticsOperator(BaseOperator):
"""
Operator to run queries on various datasource's stored in object stores like S3, GCS, Azure, etc.
:param datasource_configs: List of datasource configurations to register.
:param queries: List of SQL queries to execute.
:param max_rows_check: Maximum number of rows allowed in query results. Queries exceeding this will be skipped.
:param engine: Optional DataFusion engine instance.
:param result_output_format: List of output formats for results. Supported: 'tabulate', 'json'. Default is 'tabulate'.
"""
template_fields: Sequence[str] = (
"datasource_configs",
"queries",
"max_rows_check",
"result_output_format",
)
def __init__(
self,
datasource_configs: list[DataSourceConfig],
queries: list[str],
max_rows_check: int = 100,
engine: DataFusionEngine | None = None,
result_output_format: Literal["tabulate", "json"] = "tabulate",
**kwargs,
) -> None:
super().__init__(**kwargs)
self.datasource_configs = datasource_configs
self.queries = queries
self.engine = engine
self.max_rows_check = max_rows_check
self.result_output_format = result_output_format
@cached_property
def _df_engine(self):
if self.engine is None:
return DataFusionEngine()
return self.engine
def execute(self, context: Context) -> str:
results = []
for datasource_config in self.datasource_configs:
self._df_engine.register_datasource(datasource_config)
# TODO make it parallel as there is no dependency between queries
for query in self.queries:
result_dict = self._df_engine.execute_query(query)
results.append({"query": query, "data": result_dict})
match self.result_output_format:
case "tabulate":
return self._build_tabulate_output(results)
case "json":
return self._build_json_output(results)
case _:
raise ValueError(f"Unsupported output format: {self.result_output_format}")
def _is_result_too_large(self, result_dict: dict[str, Any]) -> tuple[bool, int]:
"""Check if a result exceeds the max_rows_check limit."""
if not result_dict:
return False, 0
num_rows = len(next(iter(result_dict.values())))
max_rows_exceeded = num_rows > self.max_rows_check
if max_rows_exceeded:
self.log.warning(
"Query returned %s rows, exceeding max_rows_check (%s). Skipping result output as large datasets are unsuitable for return.",
num_rows,
self.max_rows_check,
)
return max_rows_exceeded, num_rows
def _build_tabulate_output(self, query_results: list[dict[str, Any]]) -> str:
from tabulate import tabulate
output_parts = []
for item in query_results:
query = item["query"]
result_dict = item["data"]
too_large, row_count = self._is_result_too_large(result_dict)
if too_large:
output_parts.append(
f"\n### Results: {query}\n\n"
f"**Skipped**: {row_count} rows exceed max_rows_check ({self.max_rows_check})\n\n"
f"{'-' * 40}\n"
)
continue
table_str = tabulate(
self._get_rows(result_dict, row_count),
headers="keys",
tablefmt="github",
showindex=True,
)
output_parts.append(f"\n### Results: {query}\n\n{table_str}\n\n{'-' * 40}\n")
return "".join(output_parts)
@staticmethod
def _get_rows(result_dict: dict[str, Any], row_count: int) -> list[dict[str, Any]]:
return [{key: result_dict[key][i] for key in result_dict} for i in range(row_count)]
def _build_json_output(self, query_results: list[dict[str, Any]]) -> str:
json_results = []
for item in query_results:
query = item["query"]
result_dict = item["data"]
max_rows_exceeded, row_count = self._is_result_too_large(result_dict)
if max_rows_exceeded:
json_results.append(
{
"query": query,
"status": "skipped_too_large",
"row_count": row_count,
"max_allowed": self.max_rows_check,
}
)
continue
json_results.append(
{
"query": query,
"data": self._get_rows(result_dict, row_count),
}
)
return json.dumps(json_results, default=str)
| {
"repo_id": "apache/airflow",
"file_path": "providers/common/sql/src/airflow/providers/common/sql/operators/analytics.py",
"license": "Apache License 2.0",
"lines": 136,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/common/sql/tests/unit/common/sql/datafusion/test_engine.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import os
import tempfile
from unittest.mock import MagicMock, patch
import pytest
from datafusion import SessionContext
from airflow.models import Connection
from airflow.providers.common.sql.config import ConnectionConfig, DataSourceConfig
from airflow.providers.common.sql.datafusion.base import ObjectStorageProvider
from airflow.providers.common.sql.datafusion.engine import DataFusionEngine
from airflow.providers.common.sql.datafusion.exceptions import (
ObjectStoreCreationException,
QueryExecutionException,
)
TEST_CONNECTION_CONFIG = ConnectionConfig(
conn_id="aws_default",
credentials={
"access_key_id": "test",
"secret_access_key": "test",
"session_token": None,
},
extra_config={"region_name": "us-east-1"},
)
class TestDataFusionEngine:
@pytest.fixture(autouse=True)
def setup_connections(self, create_connection_without_db):
create_connection_without_db(
Connection(
conn_id="aws_default",
conn_type="aws",
login="fake_id",
password="fake_secret",
extra='{"region": "us-east-1"}',
)
)
def test_init(self):
engine = DataFusionEngine()
assert engine.df_ctx is not None
assert engine.registered_tables == {}
def test_session_context_property(self):
engine = DataFusionEngine()
assert isinstance(engine.session_context, SessionContext)
assert engine.session_context is engine.df_ctx
def test_register_datasource_invalid_config(self):
engine = DataFusionEngine()
with pytest.raises(ValueError, match="datasource_config must be of type DataSourceConfig"):
engine.register_datasource("invalid")
@pytest.mark.parametrize(
("storage_type", "format", "scheme"),
[("s3", "parquet", "s3"), ("s3", "csv", "s3"), ("s3", "avro", "s3")],
)
@patch("airflow.providers.common.sql.datafusion.engine.get_object_storage_provider", autospec=True)
@patch.object(DataFusionEngine, "_get_connection_config")
def test_register_datasource_success(self, mock_get_conn, mock_factory, storage_type, format, scheme):
mock_get_conn.return_value = TEST_CONNECTION_CONFIG
mock_provider = MagicMock(spec=ObjectStorageProvider)
mock_store = MagicMock()
mock_provider.create_object_store.return_value = mock_store
mock_provider.get_scheme.return_value = scheme
mock_factory.return_value = mock_provider
engine = DataFusionEngine()
datasource_config = DataSourceConfig(
conn_id="aws_default", table_name="test_table", uri=f"{scheme}://bucket/path", format=format
)
engine.df_ctx = MagicMock(spec=SessionContext)
engine.register_datasource(datasource_config)
mock_factory.assert_called_once()
mock_provider.create_object_store.assert_called_once_with(
f"{scheme}://bucket/path", connection_config=mock_get_conn.return_value
)
engine.df_ctx.register_object_store.assert_called_once_with(schema=scheme, store=mock_store)
if format == "parquet":
engine.df_ctx.register_parquet.assert_called_once_with("test_table", f"{scheme}://bucket/path")
elif format == "csv":
engine.df_ctx.register_csv.assert_called_once_with("test_table", f"{scheme}://bucket/path")
elif format == "avro":
engine.df_ctx.register_avro.assert_called_once_with("test_table", f"{scheme}://bucket/path")
assert engine.registered_tables == {"test_table": f"{scheme}://bucket/path"}
@patch("airflow.providers.common.sql.datafusion.engine.get_object_storage_provider", autospec=True)
@patch.object(DataFusionEngine, "_get_connection_config")
def test_register_datasource_object_store_exception(self, mock_get_conn, mock_factory):
mock_get_conn.return_value = TEST_CONNECTION_CONFIG
mock_factory.side_effect = Exception("Provider error")
engine = DataFusionEngine()
datasource_config = DataSourceConfig(
conn_id="aws_default", table_name="test_table", uri="s3://bucket/path", format="parquet"
)
with pytest.raises(ObjectStoreCreationException, match="Error while creating object store"):
engine.register_datasource(datasource_config)
@patch.object(DataFusionEngine, "_get_connection_config")
def test_register_datasource_duplicate_table(self, mock_get_conn):
mock_get_conn.return_value = TEST_CONNECTION_CONFIG
engine = DataFusionEngine()
engine.registered_tables["test_table"] = "s3://old/path"
datasource_config = DataSourceConfig(
conn_id="aws_default", table_name="test_table", uri="s3://new/path", format="parquet"
)
with patch.object(engine, "_register_object_store"):
with pytest.raises(ValueError, match="Table test_table already registered"):
engine.register_datasource(datasource_config)
def test_execute_query_success(self):
engine = DataFusionEngine()
engine.df_ctx = MagicMock(spec=SessionContext)
mock_df = MagicMock()
mock_df.to_pydict.return_value = {"col1": [1, 2]}
engine.df_ctx.sql.return_value = mock_df
result = engine.execute_query("SELECT * FROM test_table")
engine.df_ctx.sql.assert_called_once_with("SELECT * FROM test_table")
assert result == {"col1": [1, 2]}
def test_execute_query_failure(self):
engine = DataFusionEngine()
engine.df_ctx = MagicMock(spec=SessionContext)
engine.df_ctx.sql.side_effect = Exception("SQL Error")
with pytest.raises(QueryExecutionException, match="Error while executing query"):
engine.execute_query("SELECT * FROM test_table")
@patch.object(DataFusionEngine, "_get_connection_config")
def test_execute_query_with_local_csv(self, mock_get_conn):
mock_get_conn.return_value = None
with tempfile.NamedTemporaryFile(mode="w", suffix=".csv", delete=False) as f:
f.write("name,age\nAlice,30\nBob,25\n")
csv_path = f.name
try:
engine = DataFusionEngine()
datasource_config = DataSourceConfig(
table_name="test_csv",
uri=f"file://{csv_path}",
format="csv",
storage_type="local",
conn_id="",
)
engine.register_datasource(datasource_config)
result = engine.execute_query("SELECT * FROM test_csv ORDER BY name")
expected = {"name": ["Alice", "Bob"], "age": [30, 25]}
assert result == expected
finally:
os.unlink(csv_path)
@patch("airflow.providers.common.sql.datafusion.engine.get_object_storage_provider", autospec=True)
@patch.object(DataFusionEngine, "_get_connection_config")
def test_register_datasource_with_options(self, mock_get_conn, mock_factory):
mock_get_conn.return_value = TEST_CONNECTION_CONFIG
mock_provider = MagicMock(spec=ObjectStorageProvider)
mock_store = MagicMock()
mock_provider.create_object_store.return_value = mock_store
mock_provider.get_scheme.return_value = "s3"
mock_factory.return_value = mock_provider
engine = DataFusionEngine()
datasource_config = DataSourceConfig(
conn_id="aws_default",
table_name="test_table",
uri="s3://bucket/path/",
format="parquet",
options={"table_partition_cols": [("year", "integer"), ("month", "integer")]},
)
engine.df_ctx = MagicMock(spec=SessionContext)
engine.register_datasource(datasource_config)
mock_factory.assert_called_once()
mock_provider.create_object_store.assert_called_once_with(
"s3://bucket/path/", connection_config=mock_get_conn.return_value
)
engine.df_ctx.register_object_store.assert_called_once_with(schema="s3", store=mock_store)
engine.df_ctx.register_parquet.assert_called_once_with(
"test_table",
"s3://bucket/path/",
table_partition_cols=[("year", "integer"), ("month", "integer")],
)
assert engine.registered_tables == {"test_table": "s3://bucket/path/"}
def test_remove_none_values(self):
result = DataFusionEngine._remove_none_values({"a": 1, "b": None, "c": "test", "d": None})
assert result == {"a": 1, "c": "test"}
def test_get_connection_config(self):
engine = DataFusionEngine()
result = engine._get_connection_config("aws_default")
expected = ConnectionConfig(
conn_id="aws_default",
credentials={
"access_key_id": "fake_id",
"secret_access_key": "fake_secret",
},
extra_config={"region": "us-east-1"},
)
assert result.conn_id == expected.conn_id
assert result.credentials == expected.credentials
assert result.extra_config == expected.extra_config
def test_get_credentials_unknown_type(self):
mock_conn = MagicMock()
mock_conn.conn_type = "dummy"
engine = DataFusionEngine()
with pytest.raises(ValueError, match="Unknown connection type dummy"):
engine._get_credentials(mock_conn)
def test_get_schema_success(self):
engine = DataFusionEngine()
engine.df_ctx = MagicMock(spec=SessionContext)
mock_table = MagicMock()
mock_schema = MagicMock()
mock_schema.__str__ = lambda self: "id: int64, name: string"
mock_table.schema.return_value = mock_schema
engine.df_ctx.table.return_value = mock_table
result = engine.get_schema("test_table")
engine.df_ctx.table.assert_called_once_with("test_table")
mock_table.schema.assert_called_once()
assert result == "id: int64, name: string"
@patch.object(DataFusionEngine, "_get_connection_config")
def test_get_schema_with_local_csv(self, mock_get_conn):
mock_get_conn.return_value = None
with tempfile.NamedTemporaryFile(mode="w", suffix=".csv", delete=False) as f:
f.write("name,age\nAlice,30\nBob,25\n")
csv_path = f.name
try:
engine = DataFusionEngine()
datasource_config = DataSourceConfig(
table_name="test_csv",
uri=f"file://{csv_path}",
format="csv",
storage_type="local",
conn_id="",
)
engine.register_datasource(datasource_config)
result = engine.get_schema("test_csv")
assert "name: string" in result
assert "age: int64" in result
finally:
os.unlink(csv_path)
| {
"repo_id": "apache/airflow",
"file_path": "providers/common/sql/tests/unit/common/sql/datafusion/test_engine.py",
"license": "Apache License 2.0",
"lines": 238,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/common/sql/tests/unit/common/sql/datafusion/test_format_handlers.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from unittest.mock import MagicMock
import pytest
from airflow.providers.common.sql.config import FormatType
from airflow.providers.common.sql.datafusion.exceptions import FileFormatRegistrationException
from airflow.providers.common.sql.datafusion.format_handlers import (
AvroFormatHandler,
CsvFormatHandler,
ParquetFormatHandler,
get_format_handler,
)
class TestFormatHandlers:
@pytest.fixture
def session_context_mock(self):
return MagicMock()
def test_parquet_handler_success(self, session_context_mock):
handler = ParquetFormatHandler(options={"key": "value"})
handler.register_data_source_format(session_context_mock, "table_name", "path/to/file")
session_context_mock.register_parquet.assert_called_once_with(
"table_name", "path/to/file", key="value"
)
assert handler.get_format == FormatType.PARQUET
def test_parquet_handler_failure(self, session_context_mock):
session_context_mock.register_parquet.side_effect = Exception("Error")
handler = ParquetFormatHandler()
with pytest.raises(FileFormatRegistrationException, match="Failed to register Parquet data source"):
handler.register_data_source_format(session_context_mock, "table_name", "path/to/file")
def test_csv_handler_success(self, session_context_mock):
handler = CsvFormatHandler(options={"delimiter": ","})
handler.register_data_source_format(session_context_mock, "table_name", "path/to/file")
session_context_mock.register_csv.assert_called_once_with("table_name", "path/to/file", delimiter=",")
assert handler.get_format == FormatType.CSV
def test_csv_handler_failure(self, session_context_mock):
session_context_mock.register_csv.side_effect = Exception("Error")
handler = CsvFormatHandler()
with pytest.raises(FileFormatRegistrationException, match="Failed to register csv data source"):
handler.register_data_source_format(session_context_mock, "table_name", "path/to/file")
def test_avro_handler_success(self, session_context_mock):
handler = AvroFormatHandler(options={"key": "value"})
handler.register_data_source_format(session_context_mock, "table_name", "path/to/file")
session_context_mock.register_avro.assert_called_once_with("table_name", "path/to/file", key="value")
assert handler.get_format == FormatType.AVRO
def test_avro_handler_failure(self, session_context_mock):
session_context_mock.register_avro.side_effect = Exception("Error")
handler = AvroFormatHandler()
with pytest.raises(FileFormatRegistrationException, match="Failed to register Avro data source"):
handler.register_data_source_format(session_context_mock, "table_name", "path/to/file")
def test_get_format_handler(self):
assert isinstance(get_format_handler("parquet"), ParquetFormatHandler)
assert isinstance(get_format_handler("csv"), CsvFormatHandler)
assert isinstance(get_format_handler("avro"), AvroFormatHandler)
with pytest.raises(ValueError, match="Unsupported format"):
get_format_handler("invalid")
| {
"repo_id": "apache/airflow",
"file_path": "providers/common/sql/tests/unit/common/sql/datafusion/test_format_handlers.py",
"license": "Apache License 2.0",
"lines": 69,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/common/sql/tests/unit/common/sql/datafusion/test_object_storage_provider.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from unittest.mock import patch
import pytest
from airflow.providers.common.sql.config import ConnectionConfig, StorageType
from airflow.providers.common.sql.datafusion.exceptions import ObjectStoreCreationException
from airflow.providers.common.sql.datafusion.object_storage_provider import (
LocalObjectStorageProvider,
S3ObjectStorageProvider,
get_object_storage_provider,
)
class TestObjectStorageProvider:
@patch("airflow.providers.common.sql.datafusion.object_storage_provider.AmazonS3")
def test_s3_provider_success(self, mock_s3):
provider = S3ObjectStorageProvider()
connection_config = ConnectionConfig(
conn_id="aws_default",
credentials={"access_key_id": "fake_key", "secret_access_key": "fake_secret"},
)
store = provider.create_object_store("s3://demo-data/path", connection_config)
mock_s3.assert_called_once_with(
access_key_id="fake_key", secret_access_key="fake_secret", bucket_name="demo-data"
)
assert store == mock_s3.return_value
assert provider.get_storage_type == StorageType.S3
assert provider.get_scheme() == "s3://"
def test_s3_provider_failure(self):
provider = S3ObjectStorageProvider()
connection_config = ConnectionConfig(conn_id="aws_default")
with patch(
"airflow.providers.common.sql.datafusion.object_storage_provider.AmazonS3",
side_effect=Exception("Error"),
):
with pytest.raises(ObjectStoreCreationException, match="Failed to create S3 object store"):
provider.create_object_store("s3://demo-data/path", connection_config)
@patch("airflow.providers.common.sql.datafusion.object_storage_provider.LocalFileSystem")
def test_local_provider(self, mock_local):
provider = LocalObjectStorageProvider()
assert provider.get_storage_type == StorageType.LOCAL
assert provider.get_scheme() == "file://"
local_store = provider.create_object_store("file://path")
assert local_store == mock_local.return_value
def test_get_object_storage_provider(self):
assert isinstance(get_object_storage_provider(StorageType.S3), S3ObjectStorageProvider)
assert isinstance(get_object_storage_provider(StorageType.LOCAL), LocalObjectStorageProvider)
with pytest.raises(ValueError, match="Unsupported storage type"):
get_object_storage_provider("invalid")
| {
"repo_id": "apache/airflow",
"file_path": "providers/common/sql/tests/unit/common/sql/datafusion/test_object_storage_provider.py",
"license": "Apache License 2.0",
"lines": 62,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/common/sql/tests/unit/common/sql/operators/test_analytics.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import json
import os
import tempfile
from unittest.mock import MagicMock
import pytest
from airflow.providers.common.sql.config import DataSourceConfig, StorageType
from airflow.providers.common.sql.operators.analytics import AnalyticsOperator
class TestAnalyticsOperator:
@pytest.fixture
def mock_engine(self):
return MagicMock()
@pytest.fixture
def operator(self, mock_engine):
datasource_config = DataSourceConfig(
conn_id="aws_default", table_name="users_data", uri="s3://bucket/path", format="parquet"
)
return AnalyticsOperator(
task_id="test_analytics",
datasource_configs=[datasource_config],
queries=["SELECT * FROM users_data"],
engine=mock_engine,
)
def test_execute_success(self, operator, mock_engine):
mock_engine.execute_query.return_value = {
"col1": [1, 2, 3, 4, 5],
"col2": ["dave", "bob", "alice", "carol", "eve"],
}
result = operator.execute(context={})
mock_engine.register_datasource.assert_called_once()
mock_engine.execute_query.assert_called_once_with("SELECT * FROM users_data")
assert "col1" in result
assert "col2" in result
def test_execute_max_rows_exceeded(self, operator, mock_engine):
operator.max_rows_check = 3
mock_engine.execute_query.return_value = {"col1": [1, 2, 3, 4]}
result = operator.execute(context={})
assert "Skipped" in result
assert "4 rows exceed max_rows_check (3)" in result
def test_json_output_format(self, mock_engine):
datasource_config = DataSourceConfig(
conn_id="aws_default", table_name="users_data", uri="s3://bucket/path", format="parquet"
)
operator = AnalyticsOperator(
task_id="test_analytics",
datasource_configs=[datasource_config],
queries=["SELECT * FROM users_data"],
engine=mock_engine,
result_output_format="json",
)
mock_engine.execute_query.return_value = {
"id": [1, 2, 3],
"name": ["A", "B", "C"],
"value": [10.1, 20.2, 30.3],
}
result = operator.execute(context={})
json_result = json.loads(result)
assert len(json_result) == 1
assert json_result[0]["query"] == "SELECT * FROM users_data"
assert len(json_result[0]["data"]) == 3
assert json_result[0]["data"][0] == {"id": 1, "name": "A", "value": 10.1}
assert json_result[0]["data"][1] == {"id": 2, "name": "B", "value": 20.2}
assert json_result[0]["data"][2] == {"id": 3, "name": "C", "value": 30.3}
def test_tabulate_output_format(self, mock_engine):
datasource_config = DataSourceConfig(
conn_id="aws_default", table_name="users_data", uri="s3://bucket/path", format="parquet"
)
operator = AnalyticsOperator(
task_id="test_analytics",
datasource_configs=[datasource_config],
queries=["SELECT * FROM users_data"],
engine=mock_engine,
result_output_format="tabulate",
)
mock_engine.execute_query.return_value = {
"product": ["apple", "banana", "cherry"],
"quantity": [10, 20, 15],
}
result = operator.execute(context={})
assert "product" in result
assert "Results: SELECT * FROM users_data" in result
def test_unsupported_output_format(self, mock_engine):
datasource_config = DataSourceConfig(
conn_id="aws_default", table_name="users_data", uri="s3://bucket/path", format="parquet"
)
operator = AnalyticsOperator(
task_id="test_analytics",
datasource_configs=[datasource_config],
queries=["SELECT * FROM users_data"],
engine=mock_engine,
result_output_format=["invalid"], # type: ignore
)
with pytest.raises(ValueError, match="Unsupported output format"):
operator.execute(context={})
def test_execute_with_local_csv(self):
with tempfile.NamedTemporaryFile(mode="w", suffix=".csv", delete=False) as f:
f.write("name,age\nAlice,30\nBob,25\n")
csv_path = f.name
try:
datasource_config = DataSourceConfig(
conn_id="",
table_name="test_csv",
uri=f"file://{csv_path}",
format="csv",
storage_type=StorageType.LOCAL,
)
operator = AnalyticsOperator(
task_id="test_analytics",
datasource_configs=[datasource_config],
queries=["SELECT * FROM test_csv ORDER BY name"],
engine=None,
)
result = operator.execute(context={})
assert "Alice" in result
assert "Bob" in result
assert "Results: SELECT * FROM test_csv ORDER BY name" in result
finally:
os.unlink(csv_path)
| {
"repo_id": "apache/airflow",
"file_path": "providers/common/sql/tests/unit/common/sql/operators/test_analytics.py",
"license": "Apache License 2.0",
"lines": 134,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/common/sql/tests/unit/common/sql/test_config.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import pytest
from airflow.providers.common.sql.config import ConnectionConfig, DataSourceConfig, StorageType
class TestDataSourceConfig:
def test_successful_creation(self):
config = DataSourceConfig(conn_id="test_conn", uri="s3://bucket/key", table_name="my_table")
assert config.conn_id == "test_conn"
assert config.uri == "s3://bucket/key"
assert config.table_name == "my_table"
assert config.storage_type == StorageType.S3
@pytest.mark.parametrize(
("uri", "expected_type"),
[
("s3://bucket/path", StorageType.S3),
("file:///path/to/file", StorageType.LOCAL),
],
)
def test_extract_storage_type(self, uri, expected_type):
config = DataSourceConfig(conn_id="test", uri=uri, table_name="a_table" if expected_type else None)
assert config.storage_type == expected_type
def test_invalid_storage_type_raises_error(self):
with pytest.raises(ValueError, match="Unsupported storage type for URI"):
DataSourceConfig(conn_id="test", uri="unknown://bucket/path", table_name="a_table")
def test_missing_table_name_raises_error(self):
with pytest.raises(ValueError, match="Table name must be provided for storage type"):
DataSourceConfig(conn_id="test", uri="s3://bucket/path")
def test_parquet_with_partition_cols(self):
config = DataSourceConfig(
conn_id="test_conn",
uri="s3://bucket/path",
table_name="my_table",
format="parquet",
options={"table_partition_cols": [("year", "integer"), ("month", "integer")]},
)
assert config.conn_id == "test_conn"
assert config.uri == "s3://bucket/path"
assert config.table_name == "my_table"
assert config.format == "parquet"
assert config.options == {"table_partition_cols": [("year", "integer"), ("month", "integer")]}
assert config.storage_type == StorageType.S3
class TestConnectionConfig:
def test_connection_config_creation(self):
config = ConnectionConfig(
conn_id="my_conn", credentials={"key": "value"}, extra_config={"timeout": 30}
)
assert config.conn_id == "my_conn"
assert config.credentials == {"key": "value"}
assert config.extra_config == {"timeout": 30}
def test_connection_config_defaults(self):
config = ConnectionConfig(conn_id="my_conn")
assert config.credentials == {}
assert config.extra_config == {}
| {
"repo_id": "apache/airflow",
"file_path": "providers/common/sql/tests/unit/common/sql/test_config.py",
"license": "Apache License 2.0",
"lines": 68,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:dev/breeze/src/airflow_breeze/commands/issues_commands.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import sys
import click
from rich.table import Table
from airflow_breeze.commands.common_options import (
option_answer,
option_dry_run,
option_github_repository,
option_github_token,
option_verbose,
)
from airflow_breeze.utils.click_utils import BreezeGroup
from airflow_breeze.utils.confirm import Answer, user_confirm
from airflow_breeze.utils.console import get_console
from airflow_breeze.utils.run_utils import run_command
from airflow_breeze.utils.shared_options import get_dry_run
@click.group(cls=BreezeGroup, name="issues", help="Tools for managing GitHub issues.")
def issues_group():
pass
def _resolve_github_token(github_token: str | None) -> str | None:
"""Resolve GitHub token from option, environment, or gh CLI."""
if github_token:
return github_token
gh_token_result = run_command(
["gh", "auth", "token"],
capture_output=True,
text=True,
check=False,
dry_run_override=False,
)
if gh_token_result.returncode == 0:
return gh_token_result.stdout.strip()
return None
def _get_collaborator_logins(repo) -> set[str]:
"""Fetch all collaborator logins for the repository."""
get_console().print("[info]Fetching repository collaborators...[/]")
collaborators = {c.login for c in repo.get_collaborators()}
get_console().print(f"[info]Found {len(collaborators)} collaborators.[/]")
return collaborators
def _process_batch(
batch: list[tuple],
dry_run: bool,
) -> int:
"""Display a batch of proposed unassignments and handle confirmation.
Returns the number of issues actually unassigned.
"""
if not batch:
return 0
table = Table(title="Proposed unassignments")
table.add_column("Issue #", style="cyan", justify="right")
table.add_column("Title", style="white")
table.add_column("Non-collaborator assignees", style="red")
for issue, non_collab_logins in batch:
table.add_row(
str(issue.number),
issue.title[:80],
", ".join(sorted(non_collab_logins)),
)
get_console().print(table)
if dry_run:
get_console().print("[warning]Dry run — skipping actual unassignment.[/]")
return 0
answer = user_confirm("Unassign the above non-collaborators?")
if answer == Answer.QUIT:
get_console().print("[warning]Quitting.[/]")
sys.exit(0)
if answer == Answer.NO:
get_console().print("[info]Skipping this batch.[/]")
return 0
unassigned_count = 0
for issue, non_collab_logins in batch:
for login in non_collab_logins:
get_console().print(f" Removing [red]{login}[/] from issue #{issue.number}")
issue.remove_from_assignees(login)
comment = (
f"@{login} We are unassigning you from this issue as part of our "
f"updated [assignment policy]"
f"(https://github.com/apache/airflow/blob/main/"
f"contributing-docs/04_how_to_contribute.rst"
f"#contribute-code-changes).\n\n"
f"This is not meant to discourage your contribution — quite the opposite! "
f"You are still very welcome to work on this issue and submit a PR for it. "
f"Simply comment that you are working on it and open a PR when ready.\n\n"
f"We found that formal assignments were not working well, as they often "
f"prevented others from contributing when the assignee was not actively "
f"working on the issue."
)
get_console().print(f" Commenting on issue #{issue.number} about @{login}")
issue.create_comment(comment)
unassigned_count += 1
return unassigned_count
@issues_group.command(name="unassign", help="Unassign non-collaborators from open issues.")
@option_github_token
@option_github_repository
@click.option(
"--batch-size",
type=int,
default=100,
show_default=True,
help="Number of flagged issues to accumulate before prompting.",
)
@click.option(
"--max-num",
type=int,
default=0,
show_default=True,
help="Maximum number of issues to flag for unassignment. 0 means no limit.",
)
@option_dry_run
@option_verbose
@option_answer
def unassign(
github_token: str | None,
github_repository: str,
batch_size: int,
max_num: int,
):
from github import Github
token = _resolve_github_token(github_token)
if not token:
get_console().print(
"[error]GitHub token not found. Provide --github-token, "
"set GITHUB_TOKEN, or authenticate with `gh auth login`.[/]"
)
sys.exit(1)
g = Github(token)
repo = g.get_repo(github_repository)
collaborators = _get_collaborator_logins(repo)
dry_run = get_dry_run()
batch: list[tuple] = []
total_issues_scanned = 0
total_flagged = 0
total_unassigned = 0
get_console().print(f"[info]Scanning open issues in {github_repository}...[/]")
for issue in repo.get_issues(state="open"):
total_issues_scanned += 1
if not issue.assignees:
continue
non_collab = {a.login for a in issue.assignees if a.login not in collaborators}
if not non_collab:
continue
batch.append((issue, non_collab))
total_flagged += 1
if max_num and total_flagged >= max_num:
break
if len(batch) >= batch_size:
total_unassigned += _process_batch(batch, dry_run)
batch = []
# Process remaining batch
total_unassigned += _process_batch(batch, dry_run)
get_console().print()
get_console().print("[success]Done![/]")
get_console().print(f" Issues scanned: {total_issues_scanned}")
get_console().print(f" Issues flagged: {total_flagged}")
get_console().print(f" Assignees removed: {total_unassigned}")
| {
"repo_id": "apache/airflow",
"file_path": "dev/breeze/src/airflow_breeze/commands/issues_commands.py",
"license": "Apache License 2.0",
"lines": 171,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:dev/breeze/src/airflow_breeze/commands/issues_commands_config.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
ISSUES_COMMANDS: dict[str, str | list[str]] = {
"name": "Issues commands",
"commands": ["unassign"],
}
ISSUES_PARAMETERS: dict[str, list[dict[str, str | list[str]]]] = {
"breeze issues unassign": [
{
"name": "GitHub parameters",
"options": ["--github-token", "--github-repository"],
},
{
"name": "Unassign options",
"options": ["--batch-size", "--max-num"],
},
],
}
| {
"repo_id": "apache/airflow",
"file_path": "dev/breeze/src/airflow_breeze/commands/issues_commands_config.py",
"license": "Apache License 2.0",
"lines": 33,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/common/ai/src/airflow/providers/common/ai/example_dags/example_pydantic_ai_hook.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Example DAG demonstrating PydanticAIHook usage."""
from __future__ import annotations
from pydantic import BaseModel
from airflow.providers.common.ai.hooks.pydantic_ai import PydanticAIHook
from airflow.providers.common.compat.sdk import dag, task
# [START howto_hook_pydantic_ai_basic]
@dag(schedule=None)
def example_pydantic_ai_hook():
@task
def generate_summary(text: str) -> str:
hook = PydanticAIHook(llm_conn_id="pydantic_ai_default")
agent = hook.create_agent(output_type=str, instructions="Summarize concisely.")
result = agent.run_sync(text)
return result.output
generate_summary("Apache Airflow is a platform for programmatically authoring...")
# [END howto_hook_pydantic_ai_basic]
example_pydantic_ai_hook()
# [START howto_hook_pydantic_ai_structured_output]
@dag(schedule=None)
def example_pydantic_ai_structured_output():
@task
def generate_sql(prompt: str) -> dict:
class SQLResult(BaseModel):
query: str
explanation: str
hook = PydanticAIHook(llm_conn_id="pydantic_ai_default")
agent = hook.create_agent(
output_type=SQLResult,
instructions="Generate a SQL query and explain it.",
)
result = agent.run_sync(prompt)
return result.output.model_dump()
generate_sql("Find the top 10 customers by revenue")
# [END howto_hook_pydantic_ai_structured_output]
example_pydantic_ai_structured_output()
| {
"repo_id": "apache/airflow",
"file_path": "providers/common/ai/src/airflow/providers/common/ai/example_dags/example_pydantic_ai_hook.py",
"license": "Apache License 2.0",
"lines": 51,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/common/ai/src/airflow/providers/common/ai/hooks/pydantic_ai.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING, Any, TypeVar, overload
from pydantic_ai import Agent
from pydantic_ai.models import Model, infer_model
from pydantic_ai.providers import Provider, infer_provider, infer_provider_class
from airflow.providers.common.compat.sdk import BaseHook
OutputT = TypeVar("OutputT")
if TYPE_CHECKING:
from pydantic_ai.models import KnownModelName
class PydanticAIHook(BaseHook):
"""
Hook for LLM access via pydantic-ai.
Manages connection credentials and model creation. Uses pydantic-ai's
model inference to support any provider (OpenAI, Anthropic, Google,
Bedrock, Ollama, vLLM, etc.).
Connection fields:
- **password**: API key (OpenAI, Anthropic, Groq, Mistral, etc.)
- **host**: Base URL (optional — for custom endpoints like Ollama, vLLM, Azure)
- **extra** JSON: ``{"model": "openai:gpt-5.3"}``
Cloud providers (Bedrock, Vertex) that use native auth chains should leave
password empty and configure environment-based auth (``AWS_PROFILE``,
``GOOGLE_APPLICATION_CREDENTIALS``).
:param llm_conn_id: Airflow connection ID for the LLM provider.
:param model_id: Model identifier in ``provider:model`` format (e.g. ``"openai:gpt-5.3"``).
Overrides the model stored in the connection's extra field.
"""
conn_name_attr = "llm_conn_id"
default_conn_name = "pydantic_ai_default"
conn_type = "pydantic_ai"
hook_name = "Pydantic AI"
def __init__(
self,
llm_conn_id: str = default_conn_name,
model_id: str | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.llm_conn_id = llm_conn_id
self.model_id = model_id
self._model: Model | None = None
@staticmethod
def get_ui_field_behaviour() -> dict[str, Any]:
"""Return custom field behaviour for the Airflow connection form."""
return {
"hidden_fields": ["schema", "port", "login"],
"relabeling": {"password": "API Key"},
"placeholders": {
"host": "https://api.openai.com/v1 (optional, for custom endpoints)",
"extra": '{"model": "openai:gpt-5.3"}',
},
}
def get_conn(self) -> Model:
"""
Return a configured pydantic-ai Model.
Reads API key from connection password, model from connection extra
or ``model_id`` parameter, and base_url from connection host.
The result is cached for the lifetime of this hook instance.
"""
if self._model is not None:
return self._model
conn = self.get_connection(self.llm_conn_id)
model_name: str | KnownModelName = self.model_id or conn.extra_dejson.get("model", "")
if not model_name:
raise ValueError(
"No model specified. Set model_id on the hook or 'model' in the connection's extra JSON."
)
api_key = conn.password
base_url = conn.host or None
if not api_key and not base_url:
# No credentials to inject — use default provider resolution
# (picks up env vars like OPENAI_API_KEY, AWS_PROFILE, etc.)
self._model = infer_model(model_name)
return self._model
def _provider_factory(provider_name: str) -> Provider[Any]:
"""
Create a provider with credentials from the Airflow connection.
Falls back to default provider resolution if the provider's constructor
doesn't accept api_key/base_url (e.g. Google Vertex, Bedrock).
"""
provider_cls = infer_provider_class(provider_name)
kwargs: dict[str, Any] = {}
if api_key:
kwargs["api_key"] = api_key
if base_url:
kwargs["base_url"] = base_url
try:
return provider_cls(**kwargs)
except TypeError:
# Provider doesn't accept these kwargs (e.g. Google Vertex/GLA
# use ADC, Bedrock uses boto session). Fall back to default
# provider resolution which reads credentials from the environment.
return infer_provider(provider_name)
self._model = infer_model(model_name, provider_factory=_provider_factory)
return self._model
@overload
def create_agent(
self, output_type: type[OutputT], *, instructions: str, **agent_kwargs
) -> Agent[None, OutputT]: ...
@overload
def create_agent(self, *, instructions: str, **agent_kwargs) -> Agent[None, str]: ...
def create_agent(
self, output_type: type[Any] = str, *, instructions: str, **agent_kwargs
) -> Agent[None, Any]:
"""
Create a pydantic-ai Agent configured with this hook's model.
:param output_type: The expected output type from the agent (default: ``str``).
:param instructions: System-level instructions for the agent.
:param agent_kwargs: Additional keyword arguments passed to the Agent constructor.
"""
return Agent(self.get_conn(), output_type=output_type, instructions=instructions, **agent_kwargs)
def test_connection(self) -> tuple[bool, str]:
"""
Test connection by resolving the model.
Validates that the model string is valid, the provider package is
installed, and the provider class can be instantiated. Does NOT make an
LLM API call — that would be expensive, flaky, and fail for reasons
unrelated to connectivity (quotas, billing, rate limits).
"""
try:
self.get_conn()
return True, "Model resolved successfully."
except Exception as e:
return False, str(e)
| {
"repo_id": "apache/airflow",
"file_path": "providers/common/ai/src/airflow/providers/common/ai/hooks/pydantic_ai.py",
"license": "Apache License 2.0",
"lines": 139,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/common/ai/tests/unit/common/ai/hooks/test_pydantic_ai.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from unittest.mock import MagicMock, patch
import pytest
from pydantic_ai.models import Model
from airflow.models.connection import Connection
from airflow.providers.common.ai.hooks.pydantic_ai import PydanticAIHook
class TestPydanticAIHookInit:
def test_default_conn_id(self):
hook = PydanticAIHook()
assert hook.llm_conn_id == "pydantic_ai_default"
assert hook.model_id is None
def test_custom_conn_id(self):
hook = PydanticAIHook(llm_conn_id="my_llm", model_id="openai:gpt-5.3")
assert hook.llm_conn_id == "my_llm"
assert hook.model_id == "openai:gpt-5.3"
class TestPydanticAIHookGetConn:
@patch("airflow.providers.common.ai.hooks.pydantic_ai.infer_model", autospec=True)
@patch("airflow.providers.common.ai.hooks.pydantic_ai.infer_provider_class", autospec=True)
def test_get_conn_with_api_key_and_base_url(self, mock_infer_provider_class, mock_infer_model):
"""Credentials are injected via provider_factory, not as direct kwargs."""
mock_model = MagicMock(spec=Model)
mock_infer_model.return_value = mock_model
mock_provider = MagicMock()
mock_infer_provider_class.return_value = MagicMock(return_value=mock_provider)
hook = PydanticAIHook(llm_conn_id="test_conn", model_id="openai:gpt-5.3")
conn = Connection(
conn_id="test_conn",
conn_type="pydantic_ai",
password="sk-test-key",
host="https://api.openai.com/v1",
)
with patch.object(hook, "get_connection", return_value=conn):
result = hook.get_conn()
assert result is mock_model
mock_infer_model.assert_called_once()
call_args = mock_infer_model.call_args
assert call_args[0][0] == "openai:gpt-5.3"
# provider_factory should be passed as keyword arg
assert "provider_factory" in call_args[1]
# Call the factory to verify it creates the provider with credentials
factory = call_args[1]["provider_factory"]
factory("openai")
mock_infer_provider_class.assert_called_with("openai")
mock_infer_provider_class.return_value.assert_called_with(
api_key="sk-test-key", base_url="https://api.openai.com/v1"
)
@patch("airflow.providers.common.ai.hooks.pydantic_ai.infer_model", autospec=True)
@patch("airflow.providers.common.ai.hooks.pydantic_ai.infer_provider_class", autospec=True)
def test_get_conn_with_model_from_extra(self, mock_infer_provider_class, mock_infer_model):
mock_model = MagicMock(spec=Model)
mock_infer_model.return_value = mock_model
mock_infer_provider_class.return_value = MagicMock(return_value=MagicMock())
hook = PydanticAIHook(llm_conn_id="test_conn")
conn = Connection(
conn_id="test_conn",
conn_type="pydantic_ai",
password="sk-test-key",
extra='{"model": "anthropic:claude-opus-4-6"}',
)
with patch.object(hook, "get_connection", return_value=conn):
result = hook.get_conn()
assert result is mock_model
assert mock_infer_model.call_args[0][0] == "anthropic:claude-opus-4-6"
@patch("airflow.providers.common.ai.hooks.pydantic_ai.infer_model", autospec=True)
@patch("airflow.providers.common.ai.hooks.pydantic_ai.infer_provider_class", autospec=True)
def test_model_id_param_overrides_extra(self, mock_infer_provider_class, mock_infer_model):
mock_infer_model.return_value = MagicMock(spec=Model)
mock_infer_provider_class.return_value = MagicMock(return_value=MagicMock())
hook = PydanticAIHook(llm_conn_id="test_conn", model_id="openai:gpt-5.3")
conn = Connection(
conn_id="test_conn",
conn_type="pydantic_ai",
password="sk-test-key",
extra='{"model": "anthropic:claude-opus-4-6"}',
)
with patch.object(hook, "get_connection", return_value=conn):
hook.get_conn()
# model_id param takes priority over extra
assert mock_infer_model.call_args[0][0] == "openai:gpt-5.3"
def test_get_conn_raises_when_no_model(self):
hook = PydanticAIHook(llm_conn_id="test_conn")
conn = Connection(
conn_id="test_conn",
conn_type="pydantic_ai",
password="sk-test-key",
)
with patch.object(hook, "get_connection", return_value=conn):
with pytest.raises(ValueError, match="No model specified"):
hook.get_conn()
@patch("airflow.providers.common.ai.hooks.pydantic_ai.infer_model", autospec=True)
def test_get_conn_without_credentials_uses_default_provider(self, mock_infer_model):
"""No api_key or base_url means env-based auth (Bedrock, Vertex, etc.)."""
mock_model = MagicMock(spec=Model)
mock_infer_model.return_value = mock_model
hook = PydanticAIHook(llm_conn_id="test_conn", model_id="bedrock:us.anthropic.claude-v2")
conn = Connection(
conn_id="test_conn",
conn_type="pydantic_ai",
)
with patch.object(hook, "get_connection", return_value=conn):
hook.get_conn()
# No provider_factory — uses default infer_provider which reads env vars
mock_infer_model.assert_called_once_with("bedrock:us.anthropic.claude-v2")
@patch("airflow.providers.common.ai.hooks.pydantic_ai.infer_model", autospec=True)
@patch("airflow.providers.common.ai.hooks.pydantic_ai.infer_provider_class", autospec=True)
def test_get_conn_with_base_url_only(self, mock_infer_provider_class, mock_infer_model):
"""Ollama / vLLM: base_url but no API key."""
mock_infer_model.return_value = MagicMock(spec=Model)
mock_infer_provider_class.return_value = MagicMock(return_value=MagicMock())
hook = PydanticAIHook(llm_conn_id="test_conn", model_id="openai:llama3")
conn = Connection(
conn_id="test_conn",
conn_type="pydantic_ai",
host="http://localhost:11434/v1",
)
with patch.object(hook, "get_connection", return_value=conn):
hook.get_conn()
# provider_factory should be used since base_url is set
factory = mock_infer_model.call_args[1]["provider_factory"]
factory("openai")
mock_infer_provider_class.return_value.assert_called_with(base_url="http://localhost:11434/v1")
@patch("airflow.providers.common.ai.hooks.pydantic_ai.infer_model", autospec=True)
def test_get_conn_caches_model(self, mock_infer_model):
"""get_conn() should resolve the model once and cache it."""
mock_model = MagicMock(spec=Model)
mock_infer_model.return_value = mock_model
hook = PydanticAIHook(llm_conn_id="test_conn", model_id="openai:gpt-5.3")
conn = Connection(conn_id="test_conn", conn_type="pydantic_ai")
with patch.object(hook, "get_connection", return_value=conn):
first = hook.get_conn()
second = hook.get_conn()
assert first is second
mock_infer_model.assert_called_once()
@patch("airflow.providers.common.ai.hooks.pydantic_ai.infer_provider", autospec=True)
@patch("airflow.providers.common.ai.hooks.pydantic_ai.infer_provider_class", autospec=True)
@patch("airflow.providers.common.ai.hooks.pydantic_ai.infer_model", autospec=True)
def test_provider_factory_falls_back_on_unsupported_kwargs(
self, mock_infer_model, mock_infer_provider_class, mock_infer_provider
):
"""If a provider rejects api_key/base_url, fall back to default resolution."""
mock_infer_model.return_value = MagicMock(spec=Model)
mock_fallback_provider = MagicMock()
mock_infer_provider.return_value = mock_fallback_provider
# Simulate a provider that doesn't accept api_key/base_url
mock_infer_provider_class.return_value = MagicMock(side_effect=TypeError("unexpected keyword"))
hook = PydanticAIHook(llm_conn_id="test_conn", model_id="google:gemini-2.0-flash")
conn = Connection(
conn_id="test_conn",
conn_type="pydantic_ai",
password="some-key",
)
with patch.object(hook, "get_connection", return_value=conn):
hook.get_conn()
factory = mock_infer_model.call_args[1]["provider_factory"]
result = factory("google-gla")
# Should have tried provider_cls first, then fallen back to infer_provider
mock_infer_provider_class.return_value.assert_called_once_with(api_key="some-key")
mock_infer_provider.assert_called_with("google-gla")
assert result is mock_fallback_provider
class TestPydanticAIHookCreateAgent:
@patch("airflow.providers.common.ai.hooks.pydantic_ai.infer_model", autospec=True)
@patch("airflow.providers.common.ai.hooks.pydantic_ai.Agent", autospec=True)
def test_create_agent_defaults(self, mock_agent_cls, mock_infer_model):
mock_model = MagicMock(spec=Model)
mock_infer_model.return_value = mock_model
hook = PydanticAIHook(llm_conn_id="test_conn", model_id="openai:gpt-5.3")
conn = Connection(
conn_id="test_conn",
conn_type="pydantic_ai",
)
with patch.object(hook, "get_connection", return_value=conn):
hook.create_agent(instructions="You are a helpful assistant.")
mock_agent_cls.assert_called_once_with(
mock_model,
output_type=str,
instructions="You are a helpful assistant.",
)
@patch("airflow.providers.common.ai.hooks.pydantic_ai.infer_model", autospec=True)
@patch("airflow.providers.common.ai.hooks.pydantic_ai.Agent", autospec=True)
def test_create_agent_with_params(self, mock_agent_cls, mock_infer_model):
mock_model = MagicMock(spec=Model)
mock_infer_model.return_value = mock_model
hook = PydanticAIHook(llm_conn_id="test_conn", model_id="openai:gpt-5.3")
conn = Connection(
conn_id="test_conn",
conn_type="pydantic_ai",
)
with patch.object(hook, "get_connection", return_value=conn):
hook.create_agent(
output_type=dict,
instructions="Be helpful.",
retries=3,
)
mock_agent_cls.assert_called_once_with(
mock_model,
output_type=dict,
instructions="Be helpful.",
retries=3,
)
class TestPydanticAIHookTestConnection:
@patch("airflow.providers.common.ai.hooks.pydantic_ai.infer_model", autospec=True)
def test_successful_connection(self, mock_infer_model):
mock_model = MagicMock(spec=Model)
mock_infer_model.return_value = mock_model
hook = PydanticAIHook(llm_conn_id="test_conn", model_id="openai:gpt-5.3")
conn = Connection(
conn_id="test_conn",
conn_type="pydantic_ai",
)
with patch.object(hook, "get_connection", return_value=conn):
success, message = hook.test_connection()
assert success is True
assert message == "Model resolved successfully."
@patch("airflow.providers.common.ai.hooks.pydantic_ai.infer_model", autospec=True)
def test_failed_connection(self, mock_infer_model):
mock_infer_model.side_effect = ValueError("Unknown provider 'badprovider'")
hook = PydanticAIHook(llm_conn_id="test_conn", model_id="badprovider:model")
conn = Connection(
conn_id="test_conn",
conn_type="pydantic_ai",
)
with patch.object(hook, "get_connection", return_value=conn):
success, message = hook.test_connection()
assert success is False
assert "Unknown provider" in message
def test_failed_connection_no_model(self):
hook = PydanticAIHook(llm_conn_id="test_conn")
conn = Connection(
conn_id="test_conn",
conn_type="pydantic_ai",
)
with patch.object(hook, "get_connection", return_value=conn):
success, message = hook.test_connection()
assert success is False
assert "No model specified" in message
| {
"repo_id": "apache/airflow",
"file_path": "providers/common/ai/tests/unit/common/ai/hooks/test_pydantic_ai.py",
"license": "Apache License 2.0",
"lines": 251,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-core/tests/unit/api_fastapi/execution_api/versions/v2026_03_31/test_task_instances.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import pytest
from airflow._shared.timezones import timezone
from airflow.utils.state import DagRunState, State
from tests_common.test_utils.db import clear_db_runs
pytestmark = pytest.mark.db_test
TIMESTAMP_STR = "2024-09-30T12:00:00Z"
TIMESTAMP = timezone.parse(TIMESTAMP_STR)
RUN_PATCH_BODY = {
"state": "running",
"hostname": "test-hostname",
"unixname": "test-user",
"pid": 12345,
"start_date": TIMESTAMP_STR,
}
@pytest.fixture
def old_ver_client(client):
"""Client configured to use API version before start_date nullable change."""
client.headers["Airflow-API-Version"] = "2025-12-08"
return client
class TestDagRunStartDateNullableBackwardCompat:
"""Test that older API versions get a non-null start_date fallback."""
@pytest.fixture(autouse=True)
def _freeze_time(self, time_machine):
time_machine.move_to(TIMESTAMP_STR, tick=False)
def setup_method(self):
clear_db_runs()
def teardown_method(self):
clear_db_runs()
def test_old_version_gets_run_after_when_start_date_is_null(
self,
old_ver_client,
session,
create_task_instance,
):
ti = create_task_instance(
task_id="test_start_date_nullable",
state=State.QUEUED,
dagrun_state=DagRunState.QUEUED,
session=session,
start_date=TIMESTAMP,
)
ti.dag_run.start_date = None # DagRun has not started yet
session.commit()
response = old_ver_client.patch(f"/execution/task-instances/{ti.id}/run", json=RUN_PATCH_BODY)
dag_run = response.json()["dag_run"]
assert response.status_code == 200
assert dag_run["start_date"] is not None
assert dag_run["start_date"] == dag_run["run_after"]
def test_head_version_allows_null_start_date(
self,
client,
session,
create_task_instance,
):
ti = create_task_instance(
task_id="test_start_date_null_head",
state=State.QUEUED,
dagrun_state=DagRunState.QUEUED,
session=session,
start_date=TIMESTAMP,
)
ti.dag_run.start_date = None # DagRun has not started yet
session.commit()
response = client.patch(f"/execution/task-instances/{ti.id}/run", json=RUN_PATCH_BODY)
dag_run = response.json()["dag_run"]
assert response.status_code == 200
assert dag_run["start_date"] is None
def test_old_version_preserves_real_start_date(
self,
old_ver_client,
session,
create_task_instance,
):
ti = create_task_instance(
task_id="test_start_date_preserved",
state=State.QUEUED,
dagrun_state=DagRunState.RUNNING,
session=session,
start_date=TIMESTAMP,
)
assert ti.dag_run.start_date == TIMESTAMP # DagRun has already started
session.commit()
response = old_ver_client.patch(f"/execution/task-instances/{ti.id}/run", json=RUN_PATCH_BODY)
dag_run = response.json()["dag_run"]
assert response.status_code == 200
assert dag_run["start_date"] is not None, "start_date should not be None when DagRun has started"
assert dag_run["start_date"] == TIMESTAMP.isoformat().replace("+00:00", "Z")
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/tests/unit/api_fastapi/execution_api/versions/v2026_03_31/test_task_instances.py",
"license": "Apache License 2.0",
"lines": 104,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/openlineage/src/airflow/providers/openlineage/utils/sql_hook_lineage.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Utilities for processing hook-level lineage into OpenLineage events."""
from __future__ import annotations
import datetime as dt
import logging
from openlineage.client.event_v2 import Job, Run, RunEvent, RunState
from openlineage.client.facet_v2 import external_query_run, job_type_job, sql_job
from openlineage.client.uuid import generate_new_uuid
from airflow.providers.common.compat.sdk import timezone
from airflow.providers.common.sql.hooks.lineage import SqlJobHookLineageExtra
from airflow.providers.openlineage.extractors.base import OperatorLineage
from airflow.providers.openlineage.plugins.listener import get_openlineage_listener
from airflow.providers.openlineage.plugins.macros import (
_get_logical_date,
lineage_job_name,
lineage_job_namespace,
lineage_root_job_name,
lineage_root_job_namespace,
lineage_root_run_id,
lineage_run_id,
)
from airflow.providers.openlineage.sqlparser import SQLParser, get_openlineage_facets_with_sql
from airflow.providers.openlineage.utils.utils import _get_parent_run_facet
log = logging.getLogger(__name__)
def emit_lineage_from_sql_extras(task_instance, sql_extras: list, is_successful: bool = True) -> None:
"""
Process ``sql_job`` extras and emit per-query OpenLineage events.
For each extra that contains sql text or job id:
* Parse SQL via :func:`get_openlineage_facets_with_sql` to obtain inputs,
outputs and facets (schema enrichment, column lineage, etc.).
* Emit a separate START + COMPLETE/FAIL event pair (child job of the task).
"""
if not sql_extras:
return None
log.info("OpenLineage will process %s SQL hook lineage extra(s).", len(sql_extras))
common_job_facets: dict = {
"jobType": job_type_job.JobTypeJobFacet(
jobType="QUERY",
integration="AIRFLOW",
processingType="BATCH",
)
}
events: list[RunEvent] = []
query_count = 0
for extra_info in sql_extras:
value = extra_info.value
sql_text = value.get(SqlJobHookLineageExtra.VALUE__SQL_STATEMENT.value, "")
job_id = value.get(SqlJobHookLineageExtra.VALUE__JOB_ID.value)
if not sql_text and not job_id:
log.debug("SQL extra has no SQL text and no job ID, skipping.")
continue
query_count += 1
hook = extra_info.context
conn_id = _get_hook_conn_id(hook)
namespace = _resolve_namespace(hook, conn_id)
# Parse SQL to obtain lineage (inputs, outputs, facets)
query_lineage: OperatorLineage | None = None
if sql_text and conn_id:
try:
query_lineage = get_openlineage_facets_with_sql(
hook=hook,
sql=sql_text,
conn_id=conn_id,
database=value.get(SqlJobHookLineageExtra.VALUE__DEFAULT_DB.value),
use_connection=False, # Temporary solution before we figure out timeouts for queries
)
except Exception as e:
log.debug("Failed to parse SQL for query %s: %s", query_count, e)
# If parsing SQL failed, just attach SQL text as a facet
if query_lineage is None:
job_facets: dict = {}
if sql_text:
job_facets["sql"] = sql_job.SQLJobFacet(query=SQLParser.normalize_sql(sql_text))
query_lineage = OperatorLineage(job_facets=job_facets)
# Enrich run facets with external query info when available.
if job_id and namespace:
query_lineage.run_facets.setdefault(
"externalQuery",
external_query_run.ExternalQueryRunFacet(
externalQueryId=str(job_id),
source=namespace,
),
)
events.extend(
_create_ol_event_pair(
task_instance=task_instance,
job_name=f"{task_instance.dag_id}.{task_instance.task_id}.query.{query_count}",
is_successful=is_successful,
inputs=query_lineage.inputs,
outputs=query_lineage.outputs,
run_facets=query_lineage.run_facets,
job_facets={**common_job_facets, **query_lineage.job_facets},
)
)
if events:
log.debug("Emitting %s OpenLineage event(s) for SQL hook lineage.", len(events))
try:
adapter = get_openlineage_listener().adapter
for event in events:
adapter.emit(event)
except Exception as e:
log.warning("Failed to emit OpenLineage events for SQL hook lineage: %s", e)
log.debug("Emission failure details:", exc_info=True)
return None
def _resolve_namespace(hook, conn_id: str | None) -> str | None:
"""
Resolve the OpenLineage namespace from a hook.
Tries ``hook.get_openlineage_database_info`` to build the namespace.
Returns ``None`` when the hook does not expose this method.
"""
if conn_id:
try:
connection = hook.get_connection(conn_id)
database_info = hook.get_openlineage_database_info(connection)
except Exception as e:
log.debug("Failed to get OpenLineage database info: %s", e)
database_info = None
if database_info is not None:
return SQLParser.create_namespace(database_info)
return None
def _get_hook_conn_id(hook) -> str | None:
"""
Try to extract the connection ID from a hook instance.
Checks for ``get_conn_id()`` first, then falls back to the attribute
named by ``hook.conn_name_attr``.
"""
if callable(getattr(hook, "get_conn_id", None)):
return hook.get_conn_id()
conn_name_attr = getattr(hook, "conn_name_attr", None)
if conn_name_attr:
return getattr(hook, conn_name_attr, None)
return None
def _create_ol_event_pair(
task_instance,
job_name: str,
is_successful: bool,
inputs: list | None = None,
outputs: list | None = None,
run_facets: dict | None = None,
job_facets: dict | None = None,
event_time: dt.datetime | None = None,
) -> tuple[RunEvent, RunEvent]:
"""
Create a START + COMPLETE/FAIL child event pair linked to a task instance.
Handles parent-run facet generation, run-ID creation and event timestamps
so callers only need to supply the query-specific facets and datasets.
"""
parent_facets = _get_parent_run_facet(
parent_run_id=lineage_run_id(task_instance),
parent_job_name=lineage_job_name(task_instance),
parent_job_namespace=lineage_job_namespace(),
root_parent_run_id=lineage_root_run_id(task_instance),
root_parent_job_name=lineage_root_job_name(task_instance),
root_parent_job_namespace=lineage_root_job_namespace(task_instance),
)
run = Run(
runId=str(generate_new_uuid(instant=_get_logical_date(task_instance))),
facets={**parent_facets, **(run_facets or {})},
)
job = Job(namespace=lineage_job_namespace(), name=job_name, facets=job_facets or {})
event_time = event_time or timezone.utcnow()
start = RunEvent(
eventType=RunState.START,
eventTime=event_time.isoformat(),
run=run,
job=job,
inputs=inputs or [],
outputs=outputs or [],
)
end = RunEvent(
eventType=RunState.COMPLETE if is_successful else RunState.FAIL,
eventTime=event_time.isoformat(),
run=run,
job=job,
inputs=inputs or [],
outputs=outputs or [],
)
return start, end
| {
"repo_id": "apache/airflow",
"file_path": "providers/openlineage/src/airflow/providers/openlineage/utils/sql_hook_lineage.py",
"license": "Apache License 2.0",
"lines": 193,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/openlineage/tests/unit/openlineage/utils/test_sql_hook_lineage.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import datetime as dt
import logging
from unittest import mock
import pytest
from openlineage.client.event_v2 import Dataset as OpenLineageDataset, Job, Run, RunEvent, RunState
from openlineage.client.facet_v2 import external_query_run, job_type_job, sql_job
from airflow.providers.common.sql.hooks.lineage import SqlJobHookLineageExtra
from airflow.providers.openlineage.extractors.base import OperatorLineage
from airflow.providers.openlineage.sqlparser import SQLParser
from airflow.providers.openlineage.utils.sql_hook_lineage import (
_create_ol_event_pair,
_get_hook_conn_id,
_resolve_namespace,
emit_lineage_from_sql_extras,
)
from airflow.providers.openlineage.utils.utils import _get_parent_run_facet
_VALID_UUID = "01941f29-7c00-7087-8906-40e512c257bd"
_MODULE = "airflow.providers.openlineage.utils.sql_hook_lineage"
_JOB_TYPE_FACET = job_type_job.JobTypeJobFacet(jobType="QUERY", integration="AIRFLOW", processingType="BATCH")
def _make_extra(sql="", job_id=None, hook=None, default_db=None):
"""Helper to create a mock ExtraLineageInfo with the expected structure."""
value = {}
if sql:
value[SqlJobHookLineageExtra.VALUE__SQL_STATEMENT.value] = sql
if job_id is not None:
value[SqlJobHookLineageExtra.VALUE__JOB_ID.value] = job_id
if default_db is not None:
value[SqlJobHookLineageExtra.VALUE__DEFAULT_DB.value] = default_db
extra = mock.MagicMock()
extra.value = value
extra.context = hook or mock.MagicMock()
return extra
class TestGetHookConnId:
def test_get_conn_id_from_method(self):
hook = mock.MagicMock()
hook.get_conn_id.return_value = "my_conn"
assert _get_hook_conn_id(hook) == "my_conn"
def test_get_conn_id_from_attribute(self):
hook = mock.MagicMock(spec=[])
hook.conn_name_attr = "my_conn_attr"
hook.my_conn_attr = "fallback_conn"
assert _get_hook_conn_id(hook) == "fallback_conn"
def test_returns_none_when_nothing_available(self):
hook = mock.MagicMock(spec=[])
assert _get_hook_conn_id(hook) is None
class TestResolveNamespace:
def test_from_ol_database_info(self):
hook = mock.MagicMock()
connection = mock.MagicMock()
hook.get_connection.return_value = connection
database_info = mock.MagicMock()
hook.get_openlineage_database_info.return_value = database_info
with mock.patch(
"airflow.providers.openlineage.utils.sql_hook_lineage.SQLParser.create_namespace",
return_value="postgres://host:5432/mydb",
) as mock_create_ns:
result = _resolve_namespace(hook, "my_conn")
hook.get_connection.assert_called_once_with("my_conn")
hook.get_openlineage_database_info.assert_called_once_with(connection)
mock_create_ns.assert_called_once_with(database_info)
assert result == "postgres://host:5432/mydb"
def test_returns_none_when_no_namespace_available(self):
hook = mock.MagicMock()
hook.__class__.__name__ = "SomeUnknownHook"
hook.get_connection.side_effect = Exception("no method")
with mock.patch.dict("sys.modules"):
result = _resolve_namespace(hook, "my_conn")
assert result is None
def test_returns_none_when_no_conn_id(self):
hook = mock.MagicMock()
hook.__class__.__name__ = "SomeUnknownHook"
with mock.patch.dict("sys.modules"):
result = _resolve_namespace(hook, None)
assert result is None
class TestCreateOlEventPair:
@pytest.fixture(autouse=True)
def _mock_ol_macros(self):
with (
mock.patch(f"{_MODULE}.lineage_run_id", return_value=_VALID_UUID),
mock.patch(f"{_MODULE}.lineage_job_name", return_value="dag.task"),
mock.patch(f"{_MODULE}.lineage_job_namespace", return_value="default"),
mock.patch(f"{_MODULE}.lineage_root_run_id", return_value=_VALID_UUID),
mock.patch(f"{_MODULE}.lineage_root_job_name", return_value="dag"),
mock.patch(f"{_MODULE}.lineage_root_job_namespace", return_value="default"),
mock.patch(f"{_MODULE}._get_logical_date", return_value=None),
):
yield
@mock.patch(f"{_MODULE}.generate_new_uuid")
def test_creates_start_and_complete_events(self, mock_uuid):
fake_uuid = "01941f29-7c00-7087-8906-40e512c257bd"
mock_uuid.return_value = fake_uuid
mock_ti = mock.MagicMock(
dag_id="dag_id",
task_id="task_id",
map_index=-1,
try_number=1,
)
mock_ti.dag_run = mock.MagicMock(
logical_date=mock.MagicMock(isoformat=lambda: "2025-01-01T00:00:00+00:00"),
clear_number=0,
)
event_time = dt.datetime(2025, 1, 1, tzinfo=dt.timezone.utc)
start, end = _create_ol_event_pair(
task_instance=mock_ti,
job_name="dag_id.task_id.query.1",
is_successful=True,
run_facets={"custom_run": "value"},
job_facets={"custom_job": "value"},
event_time=event_time,
)
expected_parent = _get_parent_run_facet(
parent_run_id=_VALID_UUID,
parent_job_name="dag.task",
parent_job_namespace="default",
root_parent_run_id=_VALID_UUID,
root_parent_job_name="dag",
root_parent_job_namespace="default",
)
expected_run = Run(
runId=fake_uuid,
facets={**expected_parent, "custom_run": "value"},
)
expected_job = Job(namespace="default", name="dag_id.task_id.query.1", facets={"custom_job": "value"})
expected_start = RunEvent(
eventType=RunState.START,
eventTime=event_time.isoformat(),
run=expected_run,
job=expected_job,
inputs=[],
outputs=[],
)
expected_end = RunEvent(
eventType=RunState.COMPLETE,
eventTime=event_time.isoformat(),
run=expected_run,
job=expected_job,
inputs=[],
outputs=[],
)
assert start == expected_start
assert end == expected_end
@mock.patch(f"{_MODULE}.generate_new_uuid")
def test_creates_fail_event_when_not_successful(self, mock_uuid):
mock_uuid.return_value = _VALID_UUID
mock_ti = mock.MagicMock(
dag_id="dag_id",
task_id="task_id",
map_index=-1,
try_number=1,
)
mock_ti.dag_run = mock.MagicMock(
logical_date=mock.MagicMock(isoformat=lambda: "2025-01-01T00:00:00+00:00"),
clear_number=0,
)
event_time = dt.datetime(2025, 1, 1, tzinfo=dt.timezone.utc)
start, end = _create_ol_event_pair(
task_instance=mock_ti,
job_name="dag_id.task_id.query.1",
is_successful=False,
event_time=event_time,
)
expected_parent = _get_parent_run_facet(
parent_run_id=_VALID_UUID,
parent_job_name="dag.task",
parent_job_namespace="default",
root_parent_run_id=_VALID_UUID,
root_parent_job_name="dag",
root_parent_job_namespace="default",
)
expected_run = Run(runId=_VALID_UUID, facets=expected_parent)
expected_job = Job(namespace="default", name="dag_id.task_id.query.1", facets={})
expected_start = RunEvent(
eventType=RunState.START,
eventTime=event_time.isoformat(),
run=expected_run,
job=expected_job,
inputs=[],
outputs=[],
)
expected_end = RunEvent(
eventType=RunState.FAIL,
eventTime=event_time.isoformat(),
run=expected_run,
job=expected_job,
inputs=[],
outputs=[],
)
assert start == expected_start
assert end == expected_end
@mock.patch(f"{_MODULE}.generate_new_uuid")
def test_includes_inputs_and_outputs(self, mock_uuid):
mock_uuid.return_value = _VALID_UUID
mock_ti = mock.MagicMock(
dag_id="dag_id",
task_id="task_id",
map_index=-1,
try_number=1,
)
mock_ti.dag_run = mock.MagicMock(
logical_date=mock.MagicMock(isoformat=lambda: "2025-01-01T00:00:00+00:00"),
clear_number=0,
)
inputs = [OpenLineageDataset(namespace="ns", name="input_table")]
outputs = [OpenLineageDataset(namespace="ns", name="output_table")]
start, end = _create_ol_event_pair(
task_instance=mock_ti,
job_name="dag_id.task_id.query.1",
is_successful=True,
inputs=inputs,
outputs=outputs,
)
assert start.inputs == inputs
assert start.outputs == outputs
assert end.inputs == inputs
assert end.outputs == outputs
class TestEmitLineageFromSqlExtras:
@pytest.fixture(autouse=True)
def _mock_ol_macros(self):
with (
mock.patch(f"{_MODULE}.lineage_run_id", return_value=_VALID_UUID),
mock.patch(f"{_MODULE}.lineage_job_name", return_value="dag.task"),
mock.patch(f"{_MODULE}.lineage_job_namespace", return_value="default"),
mock.patch(f"{_MODULE}.lineage_root_run_id", return_value=_VALID_UUID),
mock.patch(f"{_MODULE}.lineage_root_job_name", return_value="dag"),
mock.patch(f"{_MODULE}.lineage_root_job_namespace", return_value="default"),
mock.patch(f"{_MODULE}._get_logical_date", return_value=None),
):
yield
@pytest.fixture(autouse=True)
def _patch_sql_extras_deps(self):
with (
mock.patch(f"{_MODULE}.generate_new_uuid", return_value=_VALID_UUID) as mock_uuid,
mock.patch(f"{_MODULE}._get_hook_conn_id", return_value="my_conn") as mock_conn_id,
mock.patch(f"{_MODULE}._resolve_namespace") as mock_ns,
mock.patch(f"{_MODULE}.get_openlineage_facets_with_sql") as mock_facets_fn,
mock.patch(f"{_MODULE}.get_openlineage_listener") as mock_listener,
mock.patch(f"{_MODULE}._create_ol_event_pair") as mock_event_pair,
):
self.mock_uuid = mock_uuid
self.mock_conn_id = mock_conn_id
self.mock_ns = mock_ns
self.mock_facets_fn = mock_facets_fn
self.mock_listener = mock_listener
self.mock_event_pair = mock_event_pair
mock_event_pair.return_value = (mock.sentinel.start_event, mock.sentinel.end_event)
yield
@pytest.mark.parametrize(
"sql_extras",
[
pytest.param([], id="empty_list"),
pytest.param([_make_extra(sql="", job_id=None)], id="single_empty_extra"),
pytest.param(
[_make_extra(sql=None, job_id=None), _make_extra(sql="", job_id=None), _make_extra(sql="")],
id="multiple_empty_extras",
),
],
)
def test_no_processable_extras(self, sql_extras):
result = emit_lineage_from_sql_extras(
task_instance=mock.MagicMock(),
sql_extras=sql_extras,
)
assert result is None
self.mock_conn_id.assert_not_called()
self.mock_ns.assert_not_called()
self.mock_facets_fn.assert_not_called()
self.mock_event_pair.assert_not_called()
self.mock_listener.assert_not_called()
def test_single_query_emits_events(self):
self.mock_ns.return_value = "postgres://host/db"
mock_ti = mock.MagicMock(dag_id="dag_id", task_id="task_id")
expected_sql_facet = sql_job.SQLJobFacet(query="SELECT 1")
self.mock_facets_fn.return_value = OperatorLineage(
inputs=[OpenLineageDataset(namespace="ns", name="in_table")],
outputs=[OpenLineageDataset(namespace="ns", name="out_table")],
job_facets={"sql": expected_sql_facet},
)
extra = _make_extra(sql="SELECT 1", job_id="qid-1")
result = emit_lineage_from_sql_extras(
task_instance=mock_ti,
sql_extras=[extra],
is_successful=True,
)
assert result is None
expected_ext_query = external_query_run.ExternalQueryRunFacet(
externalQueryId="qid-1", source="postgres://host/db"
)
self.mock_event_pair.assert_called_once_with(
task_instance=mock_ti,
job_name="dag_id.task_id.query.1",
is_successful=True,
inputs=[OpenLineageDataset(namespace="ns", name="in_table")],
outputs=[OpenLineageDataset(namespace="ns", name="out_table")],
run_facets={"externalQuery": expected_ext_query},
job_facets={**{"jobType": _JOB_TYPE_FACET}, "sql": expected_sql_facet},
)
start, end = self.mock_event_pair.return_value
adapter = self.mock_listener.return_value.adapter
assert adapter.emit.call_args_list == [mock.call(start), mock.call(end)]
def test_multiple_queries_emits_events(self):
self.mock_ns.return_value = "postgres://host/db"
mock_ti = mock.MagicMock(dag_id="dag_id", task_id="task_id")
self.mock_facets_fn.side_effect = lambda **kw: OperatorLineage(
job_facets={"sql": sql_job.SQLJobFacet(query=kw.get("sql", ""))},
)
pair1 = (mock.MagicMock(), mock.MagicMock())
pair2 = (mock.MagicMock(), mock.MagicMock())
self.mock_event_pair.side_effect = [pair1, pair2]
extras = [
_make_extra(sql="SELECT 1", job_id="qid-1"),
_make_extra(sql="SELECT 2", job_id="qid-2"),
]
result = emit_lineage_from_sql_extras(
task_instance=mock_ti,
sql_extras=extras,
)
assert result is None
assert self.mock_event_pair.call_count == 2
call1, call2 = self.mock_event_pair.call_args_list
assert call1.kwargs["job_name"] == "dag_id.task_id.query.1"
assert call2.kwargs["job_name"] == "dag_id.task_id.query.2"
adapter = self.mock_listener.return_value.adapter
assert adapter.emit.call_args_list == [
mock.call(pair1[0]),
mock.call(pair1[1]),
mock.call(pair2[0]),
mock.call(pair2[1]),
]
def test_sql_parsing_failure_falls_back_to_sql_facet(self):
self.mock_ns.return_value = "ns"
self.mock_facets_fn.side_effect = Exception("parse error")
mock_ti = mock.MagicMock(dag_id="dag_id", task_id="task_id")
extra = _make_extra(sql="SELECT broken(", job_id="qid-1")
result = emit_lineage_from_sql_extras(
task_instance=mock_ti,
sql_extras=[extra],
)
assert result is None
expected_sql_facet = sql_job.SQLJobFacet(query=SQLParser.normalize_sql("SELECT broken("))
expected_ext_query = external_query_run.ExternalQueryRunFacet(externalQueryId="qid-1", source="ns")
self.mock_event_pair.assert_called_once_with(
task_instance=mock_ti,
job_name="dag_id.task_id.query.1",
is_successful=True,
inputs=[],
outputs=[],
run_facets={"externalQuery": expected_ext_query},
job_facets={**{"jobType": _JOB_TYPE_FACET}, "sql": expected_sql_facet},
)
start, end = self.mock_event_pair.return_value
adapter = self.mock_listener.return_value.adapter
assert adapter.emit.call_args_list == [mock.call(start), mock.call(end)]
def test_no_external_query_facet_when_no_namespace(self):
self.mock_ns.return_value = None
self.mock_facets_fn.return_value = None
mock_ti = mock.MagicMock(dag_id="dag_id", task_id="task_id")
extra = _make_extra(sql="SELECT 1", job_id="qid-1")
result = emit_lineage_from_sql_extras(
task_instance=mock_ti,
sql_extras=[extra],
)
assert result is None
expected_sql_facet = sql_job.SQLJobFacet(query=SQLParser.normalize_sql("SELECT 1"))
self.mock_event_pair.assert_called_once()
call_kwargs = self.mock_event_pair.call_args.kwargs
assert "externalQuery" not in call_kwargs["run_facets"]
assert call_kwargs["job_facets"]["sql"] == expected_sql_facet
def test_failed_state_emits_fail_events(self):
self.mock_ns.return_value = "postgres://host/db"
mock_ti = mock.MagicMock(dag_id="dag_id", task_id="task_id")
expected_sql_facet = sql_job.SQLJobFacet(query="SELECT 1")
self.mock_facets_fn.return_value = OperatorLineage(
job_facets={"sql": expected_sql_facet},
)
extra = _make_extra(sql="SELECT 1", job_id="qid-1")
result = emit_lineage_from_sql_extras(
task_instance=mock_ti,
sql_extras=[extra],
is_successful=False,
)
assert result is None
expected_ext_query = external_query_run.ExternalQueryRunFacet(
externalQueryId="qid-1", source="postgres://host/db"
)
self.mock_event_pair.assert_called_once_with(
task_instance=mock_ti,
job_name="dag_id.task_id.query.1",
is_successful=False,
inputs=[],
outputs=[],
run_facets={"externalQuery": expected_ext_query},
job_facets={**{"jobType": _JOB_TYPE_FACET}, "sql": expected_sql_facet},
)
start, end = self.mock_event_pair.return_value
adapter = self.mock_listener.return_value.adapter
assert adapter.emit.call_args_list == [mock.call(start), mock.call(end)]
def test_job_name_uses_query_count_skipping_empty_extras(self):
"""Skipped extras don't create gaps in job numbering."""
self.mock_ns.return_value = "ns"
self.mock_facets_fn.return_value = OperatorLineage()
mock_ti = mock.MagicMock(dag_id="dag_id", task_id="task_id")
extras = [
_make_extra(sql="", job_id=None), # skipped
_make_extra(sql="SELECT 1"),
]
result = emit_lineage_from_sql_extras(
task_instance=mock_ti,
sql_extras=extras,
)
assert result is None
self.mock_event_pair.assert_called_once()
assert self.mock_event_pair.call_args.kwargs["job_name"] == "dag_id.task_id.query.1"
def test_emission_failure_does_not_raise(self, caplog):
"""Failure to emit events should be caught and not propagate."""
self.mock_ns.return_value = None
self.mock_facets_fn.return_value = OperatorLineage()
self.mock_listener.side_effect = Exception("listener unavailable")
mock_ti = mock.MagicMock(dag_id="dag_id", task_id="task_id")
extra = _make_extra(sql="SELECT 1")
with caplog.at_level(logging.WARNING, logger=_MODULE):
result = emit_lineage_from_sql_extras(
task_instance=mock_ti,
sql_extras=[extra],
)
assert result is None
assert "Failed to emit OpenLineage events for SQL hook lineage" in caplog.text
def test_job_id_only_extra_emits_events(self):
"""An extra with only job_id (no SQL text) should still produce events."""
self.mock_conn_id.return_value = None
self.mock_ns.return_value = "ns"
self.mock_facets_fn.return_value = None
mock_ti = mock.MagicMock(dag_id="dag_id", task_id="task_id")
extra = _make_extra(sql="", job_id="external-123")
result = emit_lineage_from_sql_extras(
task_instance=mock_ti,
sql_extras=[extra],
)
assert result is None
expected_ext_query = external_query_run.ExternalQueryRunFacet(
externalQueryId="external-123", source="ns"
)
self.mock_event_pair.assert_called_once_with(
task_instance=mock_ti,
job_name="dag_id.task_id.query.1",
is_successful=True,
inputs=[],
outputs=[],
run_facets={"externalQuery": expected_ext_query},
job_facets={"jobType": _JOB_TYPE_FACET},
)
start, end = self.mock_event_pair.return_value
adapter = self.mock_listener.return_value.adapter
assert adapter.emit.call_args_list == [mock.call(start), mock.call(end)]
def test_events_include_inputs_and_outputs(self):
self.mock_ns.return_value = "pg://h/db"
self.mock_conn_id.return_value = "conn"
mock_ti = mock.MagicMock(dag_id="dag_id", task_id="task_id")
parsed_inputs = [OpenLineageDataset(namespace="ns", name="in")]
parsed_outputs = [OpenLineageDataset(namespace="ns", name="out")]
self.mock_facets_fn.return_value = OperatorLineage(
inputs=parsed_inputs,
outputs=parsed_outputs,
)
extra = _make_extra(sql="INSERT INTO out SELECT * FROM in")
emit_lineage_from_sql_extras(
task_instance=mock_ti,
sql_extras=[extra],
)
self.mock_event_pair.assert_called_once()
call_kwargs = self.mock_event_pair.call_args.kwargs
assert call_kwargs["inputs"] == parsed_inputs
assert call_kwargs["outputs"] == parsed_outputs
def test_existing_run_facets_not_overwritten(self):
"""Parser-produced run facets take priority over external-query facet via setdefault."""
self.mock_ns.return_value = "ns"
self.mock_conn_id.return_value = "conn"
mock_ti = mock.MagicMock(dag_id="dag_id", task_id="task_id")
original_ext_query = external_query_run.ExternalQueryRunFacet(
externalQueryId="parser-produced-id", source="parser-source"
)
self.mock_facets_fn.return_value = OperatorLineage(
run_facets={"externalQuery": original_ext_query},
)
extra = _make_extra(sql="SELECT 1", job_id="qid-1")
result = emit_lineage_from_sql_extras(
task_instance=mock_ti,
sql_extras=[extra],
)
assert result is None
call_kwargs = self.mock_event_pair.call_args.kwargs
assert call_kwargs["run_facets"]["externalQuery"] is original_ext_query
| {
"repo_id": "apache/airflow",
"file_path": "providers/openlineage/tests/unit/openlineage/utils/test_sql_hook_lineage.py",
"license": "Apache License 2.0",
"lines": 508,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/google/src/airflow/providers/google/cloud/utils/lineage.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import logging
from google.cloud.bigquery import CopyJob, ExtractJob, LoadJob, QueryJob
from airflow.providers.common.compat.lineage.hook import get_hook_lineage_collector
from airflow.providers.common.sql.hooks.lineage import send_sql_hook_lineage
log = logging.getLogger(__name__)
def _add_bq_table_to_lineage(collector, context, table_ref, *, is_input: bool):
method = collector.add_input_asset if is_input else collector.add_output_asset
method(
context=context,
scheme="bigquery",
asset_kwargs={
"project_id": table_ref.project,
"dataset_id": table_ref.dataset_id,
"table_id": table_ref.table_id,
},
)
def _add_gcs_uris_to_lineage(collector, context, uris, *, is_input: bool):
method = collector.add_input_asset if is_input else collector.add_output_asset
for uri in uris or []:
method(context=context, uri=uri)
def send_hook_lineage_for_bq_job(context, job):
"""
Send hook-level lineage for a BigQuery job to the lineage collector.
Handles all four BigQuery job types:
- QUERY: delegates to send_sql_hook_lineage for SQL parsing
- LOAD: source URIs (GCS) as inputs, destination table as output
- COPY: source tables as inputs, destination table as output
- EXTRACT: source table as input, destination URIs (GCS) as outputs
:param context: The hook instance used as lineage context.
:param job: A BigQuery job object (QueryJob, LoadJob, CopyJob, or ExtractJob).
"""
collector = get_hook_lineage_collector()
if isinstance(job, QueryJob):
log.debug("Sending Hook Level Lineage for Query job.")
send_sql_hook_lineage(
context=context,
sql=job.query,
job_id=job.job_id,
default_db=job.default_dataset.project if job.default_dataset else None,
default_schema=job.default_dataset.dataset_id if job.default_dataset else None,
)
return
try:
if isinstance(job, LoadJob):
log.debug("Sending Hook Level Lineage for Load job.")
_add_gcs_uris_to_lineage(collector, context, job.source_uris, is_input=True)
if job.destination:
_add_bq_table_to_lineage(collector, context, job.destination, is_input=False)
elif isinstance(job, CopyJob):
log.debug("Sending Hook Level Lineage for Copy job.")
for source_table in job.sources or []:
_add_bq_table_to_lineage(collector, context, source_table, is_input=True)
if job.destination:
_add_bq_table_to_lineage(collector, context, job.destination, is_input=False)
elif isinstance(job, ExtractJob):
log.debug("Sending Hook Level Lineage for Extract job.")
if job.source:
_add_bq_table_to_lineage(collector, context, job.source, is_input=True)
_add_gcs_uris_to_lineage(collector, context, job.destination_uris, is_input=False)
except Exception as e:
log.warning("Sending BQ job hook level lineage failed: %s", f"{e.__class__.__name__}: {str(e)}")
log.debug("Exception details:", exc_info=True)
| {
"repo_id": "apache/airflow",
"file_path": "providers/google/src/airflow/providers/google/cloud/utils/lineage.py",
"license": "Apache License 2.0",
"lines": 80,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/google/tests/unit/google/cloud/utils/test_lineage.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from unittest import mock
from google.cloud.bigquery import CopyJob, DatasetReference, ExtractJob, LoadJob, QueryJob, TableReference
from airflow.providers.common.compat.assets import Asset
from airflow.providers.google.cloud.utils.lineage import (
_add_bq_table_to_lineage,
_add_gcs_uris_to_lineage,
send_hook_lineage_for_bq_job,
)
PROJECT_ID = "test-project"
DATASET_ID = "test_dataset"
TABLE_ID = "test_table"
JOB_ID = "test-job-123"
TABLE_REFERENCE = TableReference(DatasetReference(PROJECT_ID, DATASET_ID), TABLE_ID)
def _make_table_ref(project, dataset, table):
return TableReference(DatasetReference(project, dataset), table)
class TestAddBqTableToLineage:
def test_add_as_input(self):
collector = mock.MagicMock()
context = mock.sentinel.context
_add_bq_table_to_lineage(collector, context, TABLE_REFERENCE, is_input=True)
collector.add_input_asset.assert_called_once_with(
context=context,
scheme="bigquery",
asset_kwargs={
"project_id": PROJECT_ID,
"dataset_id": DATASET_ID,
"table_id": TABLE_ID,
},
)
collector.add_output_asset.assert_not_called()
def test_add_as_output(self):
collector = mock.MagicMock()
context = mock.sentinel.context
_add_bq_table_to_lineage(collector, context, TABLE_REFERENCE, is_input=False)
collector.add_output_asset.assert_called_once_with(
context=context,
scheme="bigquery",
asset_kwargs={
"project_id": PROJECT_ID,
"dataset_id": DATASET_ID,
"table_id": TABLE_ID,
},
)
collector.add_input_asset.assert_not_called()
class TestAddGcsUrisToLineage:
def test_add_uris_as_input(self):
collector = mock.MagicMock()
context = mock.sentinel.context
uris = ["gs://bucket1/path/file.csv", "gs://bucket2/other.json"]
_add_gcs_uris_to_lineage(collector, context, uris, is_input=True)
assert collector.add_input_asset.call_count == 2
collector.add_input_asset.assert_any_call(context=context, uri="gs://bucket1/path/file.csv")
collector.add_input_asset.assert_any_call(context=context, uri="gs://bucket2/other.json")
collector.add_output_asset.assert_not_called()
def test_add_uris_as_output(self):
collector = mock.MagicMock()
context = mock.sentinel.context
uris = ["gs://bucket/export/data.csv"]
_add_gcs_uris_to_lineage(collector, context, uris, is_input=False)
collector.add_output_asset.assert_called_once_with(context=context, uri="gs://bucket/export/data.csv")
collector.add_input_asset.assert_not_called()
def test_empty_uris(self):
collector = mock.MagicMock()
_add_gcs_uris_to_lineage(collector, mock.sentinel.context, [], is_input=True)
collector.add_input_asset.assert_not_called()
def test_none_uris(self):
collector = mock.MagicMock()
_add_gcs_uris_to_lineage(collector, mock.sentinel.context, None, is_input=True)
collector.add_input_asset.assert_not_called()
class TestSendHookLineageForBqJob:
@mock.patch("airflow.providers.google.cloud.utils.lineage.send_sql_hook_lineage")
def test_query_job(self, mock_send_sql):
job = mock.MagicMock(spec=QueryJob)
job.query = "SELECT * FROM dataset.table"
job.job_id = JOB_ID
job.default_dataset = DatasetReference(PROJECT_ID, DATASET_ID)
context = mock.sentinel.context
send_hook_lineage_for_bq_job(context=context, job=job)
mock_send_sql.assert_called_once_with(
context=context,
sql="SELECT * FROM dataset.table",
job_id=JOB_ID,
default_db=PROJECT_ID,
default_schema=DATASET_ID,
)
@mock.patch("airflow.providers.google.cloud.utils.lineage.send_sql_hook_lineage")
def test_query_job_no_default_dataset(self, mock_send_sql):
job = mock.MagicMock(spec=QueryJob)
job.query = "SELECT 1"
job.job_id = JOB_ID
job.default_dataset = None
context = mock.sentinel.context
send_hook_lineage_for_bq_job(context=context, job=job)
mock_send_sql.assert_called_once_with(
context=context,
sql="SELECT 1",
job_id=JOB_ID,
default_db=None,
default_schema=None,
)
def test_load_job(self, hook_lineage_collector):
job = mock.MagicMock(spec=LoadJob)
job.source_uris = ["gs://bucket/data.csv", "gs://bucket/data2.csv"]
job.destination = TABLE_REFERENCE
context = mock.sentinel.context
send_hook_lineage_for_bq_job(context=context, job=job)
assert len(hook_lineage_collector.collected_assets.inputs) == 2
assert len(hook_lineage_collector.collected_assets.outputs) == 1
assert hook_lineage_collector.collected_assets.outputs[0].asset == Asset(
uri=f"bigquery://{PROJECT_ID}/{DATASET_ID}/{TABLE_ID}"
)
def test_load_job_no_destination(self, hook_lineage_collector):
job = mock.MagicMock(spec=LoadJob)
job.source_uris = ["gs://bucket/data.csv"]
job.destination = None
context = mock.sentinel.context
send_hook_lineage_for_bq_job(context=context, job=job)
assert len(hook_lineage_collector.collected_assets.inputs) == 1
assert len(hook_lineage_collector.collected_assets.outputs) == 0
def test_copy_job(self, hook_lineage_collector):
source1 = _make_table_ref(PROJECT_ID, DATASET_ID, "source1")
source2 = _make_table_ref(PROJECT_ID, DATASET_ID, "source2")
dest = _make_table_ref(PROJECT_ID, DATASET_ID, "dest")
job = mock.MagicMock(spec=CopyJob)
job.sources = [source1, source2]
job.destination = dest
context = mock.sentinel.context
send_hook_lineage_for_bq_job(context=context, job=job)
assert len(hook_lineage_collector.collected_assets.inputs) == 2
assert len(hook_lineage_collector.collected_assets.outputs) == 1
assert hook_lineage_collector.collected_assets.inputs[0].asset == Asset(
uri=f"bigquery://{PROJECT_ID}/{DATASET_ID}/source1"
)
assert hook_lineage_collector.collected_assets.inputs[1].asset == Asset(
uri=f"bigquery://{PROJECT_ID}/{DATASET_ID}/source2"
)
assert hook_lineage_collector.collected_assets.outputs[0].asset == Asset(
uri=f"bigquery://{PROJECT_ID}/{DATASET_ID}/dest"
)
def test_extract_job(self, hook_lineage_collector):
job = mock.MagicMock(spec=ExtractJob)
job.source = TABLE_REFERENCE
job.destination_uris = ["gs://bucket/export/file1.csv", "gs://bucket/export/file2.csv"]
context = mock.sentinel.context
send_hook_lineage_for_bq_job(context=context, job=job)
assert len(hook_lineage_collector.collected_assets.inputs) == 1
assert len(hook_lineage_collector.collected_assets.outputs) == 2
assert hook_lineage_collector.collected_assets.inputs[0].asset == Asset(
uri=f"bigquery://{PROJECT_ID}/{DATASET_ID}/{TABLE_ID}"
)
def test_extract_job_no_source(self, hook_lineage_collector):
job = mock.MagicMock(spec=ExtractJob)
job.source = None
job.destination_uris = ["gs://bucket/export/file.csv"]
context = mock.sentinel.context
send_hook_lineage_for_bq_job(context=context, job=job)
assert len(hook_lineage_collector.collected_assets.inputs) == 0
assert len(hook_lineage_collector.collected_assets.outputs) == 1
@mock.patch("airflow.providers.google.cloud.utils.lineage.send_sql_hook_lineage")
def test_unknown_job_type_does_not_raise(self, mock_send_sql, hook_lineage_collector):
job = mock.MagicMock()
send_hook_lineage_for_bq_job(context=mock.sentinel.context, job=job)
mock_send_sql.assert_not_called()
assert len(hook_lineage_collector.collected_assets.inputs) == 0
assert len(hook_lineage_collector.collected_assets.outputs) == 0
def test_exception_in_non_query_job_is_caught(self, hook_lineage_collector):
job = mock.MagicMock(spec=LoadJob)
type(job).source_uris = mock.PropertyMock(side_effect=RuntimeError("boom"))
context = mock.sentinel.context
send_hook_lineage_for_bq_job(context=context, job=job)
| {
"repo_id": "apache/airflow",
"file_path": "providers/google/tests/unit/google/cloud/utils/test_lineage.py",
"license": "Apache License 2.0",
"lines": 188,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-core/src/airflow/api_fastapi/core_api/datamodels/ui/partitioned_dag_runs.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from airflow.api_fastapi.core_api.base import BaseModel
class PartitionedDagRunResponse(BaseModel):
"""Single partitioned Dag run item."""
id: int
partition_key: str
created_at: str | None = None
total_received: int
total_required: int
dag_id: str | None = None
state: str | None = None
created_dag_run_id: str | None = None
class PartitionedDagRunCollectionResponse(BaseModel):
"""Collection of partitioned Dag runs."""
partitioned_dag_runs: list[PartitionedDagRunResponse]
total: int
asset_expressions: dict[str, dict | None] | None = None
class PartitionedDagRunAssetResponse(BaseModel):
"""Asset info within a partitioned Dag run detail."""
asset_id: int
asset_name: str
asset_uri: str
received: bool
class PartitionedDagRunDetailResponse(BaseModel):
"""Detail of a single partitioned Dag run."""
id: int
dag_id: str
partition_key: str
created_at: str | None = None
updated_at: str | None = None
created_dag_run_id: str | None = None
assets: list[PartitionedDagRunAssetResponse]
total_required: int
total_received: int
asset_expression: dict | None = None
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/api_fastapi/core_api/datamodels/ui/partitioned_dag_runs.py",
"license": "Apache License 2.0",
"lines": 51,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:airflow-core/src/airflow/api_fastapi/core_api/routes/ui/partitioned_dag_runs.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from fastapi import Depends, HTTPException, status
from sqlalchemy import exists, func, select
from airflow.api_fastapi.common.db.common import SessionDep, apply_filters_to_select
from airflow.api_fastapi.common.parameters import (
QueryPartitionedDagRunDagIdFilter,
QueryPartitionedDagRunHasCreatedDagRunIdFilter,
)
from airflow.api_fastapi.common.router import AirflowRouter
from airflow.api_fastapi.core_api.datamodels.ui.partitioned_dag_runs import (
PartitionedDagRunAssetResponse,
PartitionedDagRunCollectionResponse,
PartitionedDagRunDetailResponse,
PartitionedDagRunResponse,
)
from airflow.api_fastapi.core_api.security import requires_access_asset
from airflow.models import DagModel
from airflow.models.asset import (
AssetModel,
AssetPartitionDagRun,
DagScheduleAssetReference,
PartitionedAssetKeyLog,
)
from airflow.models.dagrun import DagRun
partitioned_dag_runs_router = AirflowRouter(tags=["PartitionedDagRun"])
def _build_response(row, required_count: int) -> PartitionedDagRunResponse:
return PartitionedDagRunResponse(
id=row.id,
dag_id=row.target_dag_id,
partition_key=row.partition_key,
created_at=row.created_at.isoformat() if row.created_at else None,
total_received=row.total_received or 0,
total_required=required_count,
state=row.dag_run_state if row.created_dag_run_id else "pending",
created_dag_run_id=row.dag_run_id,
)
@partitioned_dag_runs_router.get(
"/partitioned_dag_runs",
dependencies=[Depends(requires_access_asset(method="GET"))],
)
def get_partitioned_dag_runs(
session: SessionDep,
dag_id: QueryPartitionedDagRunDagIdFilter,
has_created_dag_run_id: QueryPartitionedDagRunHasCreatedDagRunIdFilter,
) -> PartitionedDagRunCollectionResponse:
"""Return PartitionedDagRuns. Filter by dag_id and/or has_created_dag_run_id."""
if dag_id.value is not None:
# Single query: validate Dag + get required count
dag_info = session.execute(
select(
DagModel.timetable_summary,
func.count(DagScheduleAssetReference.asset_id).label("required_count"),
)
.outerjoin(
DagScheduleAssetReference,
(DagScheduleAssetReference.dag_id == DagModel.dag_id)
& DagScheduleAssetReference.asset_id.in_(
select(AssetModel.id).where(AssetModel.active.has())
),
)
.where(DagModel.dag_id == dag_id.value)
.group_by(DagModel.dag_id)
).one_or_none()
if dag_info is None:
raise HTTPException(status.HTTP_404_NOT_FOUND, f"Dag with id {dag_id.value} was not found")
if dag_info.timetable_summary != "Partitioned Asset":
return PartitionedDagRunCollectionResponse(partitioned_dag_runs=[], total=0)
required_count = dag_info.required_count
# Subquery for received count per partition (only count required assets)
required_assets_subq = (
select(DagScheduleAssetReference.asset_id)
.join(AssetModel, AssetModel.id == DagScheduleAssetReference.asset_id)
.where(
DagScheduleAssetReference.dag_id == AssetPartitionDagRun.target_dag_id,
AssetModel.active.has(),
)
)
received_subq = (
select(func.count(func.distinct(PartitionedAssetKeyLog.asset_id)))
.where(
PartitionedAssetKeyLog.asset_partition_dag_run_id == AssetPartitionDagRun.id,
PartitionedAssetKeyLog.asset_id.in_(required_assets_subq),
)
.correlate(AssetPartitionDagRun)
.scalar_subquery()
)
query = select(
AssetPartitionDagRun.id,
AssetPartitionDagRun.target_dag_id,
AssetPartitionDagRun.partition_key,
AssetPartitionDagRun.created_at,
AssetPartitionDagRun.created_dag_run_id,
DagRun.run_id.label("dag_run_id"),
DagRun.state.label("dag_run_state"),
received_subq.label("total_received"),
).outerjoin(DagRun, AssetPartitionDagRun.created_dag_run_id == DagRun.id)
query = apply_filters_to_select(statement=query, filters=[dag_id, has_created_dag_run_id])
query = query.order_by(AssetPartitionDagRun.created_at.desc())
if not (rows := session.execute(query).all()):
return PartitionedDagRunCollectionResponse(partitioned_dag_runs=[], total=0)
if dag_id.value is not None:
results = [_build_response(row, required_count) for row in rows]
return PartitionedDagRunCollectionResponse(partitioned_dag_runs=results, total=len(results))
# No dag_id: need to get required counts and expressions per dag
dag_ids = list({row.target_dag_id for row in rows})
dag_rows = session.execute(
select(
DagModel.dag_id,
DagModel.asset_expression,
func.count(DagScheduleAssetReference.asset_id).label("required_count"),
)
.outerjoin(
DagScheduleAssetReference,
(DagScheduleAssetReference.dag_id == DagModel.dag_id)
& DagScheduleAssetReference.asset_id.in_(select(AssetModel.id).where(AssetModel.active.has())),
)
.where(DagModel.dag_id.in_(dag_ids))
.group_by(DagModel.dag_id)
).all()
required_counts = {r.dag_id: r.required_count for r in dag_rows}
asset_expressions = {r.dag_id: r.asset_expression for r in dag_rows}
results = [_build_response(row, required_counts.get(row.target_dag_id, 0)) for row in rows]
return PartitionedDagRunCollectionResponse(
partitioned_dag_runs=results,
total=len(results),
asset_expressions=asset_expressions,
)
@partitioned_dag_runs_router.get(
"/pending_partitioned_dag_run/{dag_id}/{partition_key}",
dependencies=[Depends(requires_access_asset(method="GET"))],
)
def get_pending_partitioned_dag_run(
dag_id: str,
partition_key: str,
session: SessionDep,
) -> PartitionedDagRunDetailResponse:
"""Return full details for pending PartitionedDagRun."""
partitioned_dag_run = session.execute(
select(
AssetPartitionDagRun.id,
AssetPartitionDagRun.target_dag_id,
AssetPartitionDagRun.partition_key,
AssetPartitionDagRun.created_at,
AssetPartitionDagRun.updated_at,
DagRun.run_id.label("created_dag_run_id"),
)
.outerjoin(DagRun, AssetPartitionDagRun.created_dag_run_id == DagRun.id)
.where(
AssetPartitionDagRun.target_dag_id == dag_id,
AssetPartitionDagRun.partition_key == partition_key,
AssetPartitionDagRun.created_dag_run_id.is_(None),
)
).one_or_none()
if partitioned_dag_run is None:
raise HTTPException(
status.HTTP_404_NOT_FOUND,
f"No PartitionedDagRun for dag={dag_id} partition={partition_key}",
)
received_subq = (
select(PartitionedAssetKeyLog.asset_id).where(
PartitionedAssetKeyLog.asset_partition_dag_run_id == partitioned_dag_run.id
)
).correlate(AssetModel)
received_expr = exists(received_subq.where(PartitionedAssetKeyLog.asset_id == AssetModel.id))
asset_expression_subq = (
select(DagModel.asset_expression).where(DagModel.dag_id == dag_id).scalar_subquery()
)
asset_rows = session.execute(
select(
AssetModel.id,
AssetModel.uri,
AssetModel.name,
received_expr.label("received"),
asset_expression_subq.label("asset_expression"),
)
.join(DagScheduleAssetReference, DagScheduleAssetReference.asset_id == AssetModel.id)
.where(DagScheduleAssetReference.dag_id == dag_id, AssetModel.active.has())
.order_by(received_expr.asc(), AssetModel.uri)
).all()
assets = [
PartitionedDagRunAssetResponse(
asset_id=row.id, asset_name=row.name, asset_uri=row.uri, received=row.received
)
for row in asset_rows
]
total_received = sum(1 for a in assets if a.received)
asset_expression = asset_rows[0].asset_expression if asset_rows else None
return PartitionedDagRunDetailResponse(
id=partitioned_dag_run.id,
dag_id=dag_id,
partition_key=partition_key,
created_at=partitioned_dag_run.created_at.isoformat() if partitioned_dag_run.created_at else None,
updated_at=partitioned_dag_run.updated_at.isoformat() if partitioned_dag_run.updated_at else None,
created_dag_run_id=partitioned_dag_run.created_dag_run_id,
assets=assets,
total_required=len(assets),
total_received=total_received,
asset_expression=asset_expression,
)
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/api_fastapi/core_api/routes/ui/partitioned_dag_runs.py",
"license": "Apache License 2.0",
"lines": 215,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:airflow-core/tests/unit/api_fastapi/core_api/routes/ui/test_partitioned_dag_runs.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import pendulum
import pytest
from sqlalchemy import select
from airflow.models.asset import AssetEvent, AssetModel, AssetPartitionDagRun, PartitionedAssetKeyLog
from airflow.providers.standard.operators.empty import EmptyOperator
from airflow.sdk.definitions.asset import Asset
from airflow.sdk.definitions.timetables.assets import PartitionedAssetTimetable
from tests_common.test_utils.asserts import assert_queries_count
from tests_common.test_utils.db import clear_db_apdr, clear_db_dags, clear_db_pakl, clear_db_serialized_dags
pytestmark = pytest.mark.db_test
@pytest.fixture(autouse=True)
def cleanup():
clear_db_dags()
clear_db_serialized_dags()
clear_db_apdr()
clear_db_pakl()
class TestGetPartitionedDagRuns:
def test_should_response_401(self, unauthenticated_test_client):
response = unauthenticated_test_client.get("/partitioned_dag_runs?dag_id=any")
assert response.status_code == 401
def test_should_response_403(self, unauthorized_test_client):
response = unauthorized_test_client.get("/partitioned_dag_runs?dag_id=any")
assert response.status_code == 403
def test_should_response_404(self, test_client):
assert test_client.get("/partitioned_dag_runs?dag_id=no_such_dag").status_code == 404
def test_should_response_200_non_partitioned_dag_returns_empty(self, test_client, dag_maker):
with dag_maker(dag_id="normal", schedule=[Asset(uri="s3://bucket/a", name="a")], serialized=True):
EmptyOperator(task_id="t")
dag_maker.create_dagrun()
dag_maker.sync_dagbag_to_db()
with assert_queries_count(2):
resp = test_client.get("/partitioned_dag_runs?dag_id=normal&has_created_dag_run_id=false")
assert resp.status_code == 200
assert resp.json() == {"partitioned_dag_runs": [], "total": 0, "asset_expressions": None}
@pytest.mark.parametrize(
(
"num_assets",
"received_count",
"fulfilled",
"has_created_dag_run_id",
"expected_total",
"expected_state",
),
[
(1, 1, False, False, 1, "pending"),
(1, 1, True, False, 0, None),
(1, 1, True, True, 1, "running"),
(3, 0, False, False, 1, "pending"),
(3, 1, False, False, 1, "pending"),
(3, 2, False, False, 1, "pending"),
(3, 3, False, False, 1, "pending"),
],
ids=[
"filter-pending-included",
"filter-fulfilled-excluded",
"filter-fulfilled-included",
"received-0/3",
"received-1/3",
"received-2/3",
"received-3/3",
],
)
def test_should_response_200(
self,
test_client,
dag_maker,
session,
num_assets,
received_count,
fulfilled,
has_created_dag_run_id,
expected_total,
expected_state,
):
uris = [f"s3://bucket/lr{i}" for i in range(num_assets)]
asset_defs = [Asset(uri=uri, name=f"lr{i}") for i, uri in enumerate(uris)]
schedule = asset_defs[0]
for a in asset_defs[1:]:
schedule = schedule & a
with dag_maker(
dag_id="list_dag",
schedule=PartitionedAssetTimetable(assets=schedule),
serialized=True,
):
EmptyOperator(task_id="t")
dr = dag_maker.create_dagrun()
dag_maker.sync_dagbag_to_db()
assets = {a.uri: a for a in session.scalars(select(AssetModel).where(AssetModel.uri.in_(uris)))}
pdr = AssetPartitionDagRun(
target_dag_id="list_dag",
partition_key="2024-06-01",
created_dag_run_id=dr.id if fulfilled else None,
)
session.add(pdr)
session.flush()
for uri in uris[:received_count]:
event = AssetEvent(asset_id=assets[uri].id, timestamp=pendulum.now())
session.add(event)
session.flush()
session.add(
PartitionedAssetKeyLog(
asset_id=assets[uri].id,
asset_event_id=event.id,
asset_partition_dag_run_id=pdr.id,
source_partition_key="2024-06-01",
target_dag_id="list_dag",
target_partition_key="2024-06-01",
)
)
session.commit()
with assert_queries_count(2):
resp = test_client.get(
f"/partitioned_dag_runs?dag_id=list_dag"
f"&has_created_dag_run_id={str(has_created_dag_run_id).lower()}"
)
assert resp.status_code == 200
body = resp.json()
assert body["total"] == expected_total
if expected_total > 0:
pdr_resp = body["partitioned_dag_runs"][0]
assert pdr_resp["state"] == expected_state
assert pdr_resp["total_received"] == received_count
assert pdr_resp["total_required"] == num_assets
class TestGetPendingPartitionedDagRun:
def test_should_response_401(self, unauthenticated_test_client):
response = unauthenticated_test_client.get("/pending_partitioned_dag_run/any_dag/any_key")
assert response.status_code == 401
def test_should_response_403(self, unauthorized_test_client):
response = unauthorized_test_client.get("/pending_partitioned_dag_run/any_dag/any_key")
assert response.status_code == 403
@pytest.mark.parametrize(
("dag_id", "partition_key", "fulfilled"),
[
("no_dag", "no_key", False),
("fulfilled_dag", "2024-07-01", True),
],
ids=[
"not-found",
"fulfilled-excluded",
],
)
def test_should_response_404(self, test_client, dag_maker, session, dag_id, partition_key, fulfilled):
if fulfilled:
with dag_maker(
dag_id="fulfilled_dag",
schedule=PartitionedAssetTimetable(assets=Asset(uri="s3://bucket/ful0", name="ful0")),
serialized=True,
):
EmptyOperator(task_id="t")
dr = dag_maker.create_dagrun()
dag_maker.sync_dagbag_to_db()
session.add(
AssetPartitionDagRun(
target_dag_id="fulfilled_dag",
partition_key="2024-07-01",
created_dag_run_id=dr.id,
)
)
session.commit()
resp = test_client.get(f"/pending_partitioned_dag_run/{dag_id}/{partition_key}")
assert resp.status_code == 404
@pytest.mark.parametrize(
("num_assets", "received_count"),
[
(1, 1),
(1, 0),
(2, 1),
(2, 2),
(2, 0),
],
ids=[
"1-asset-received-pending",
"1-asset-none-received-pending",
"2-assets-partial-pending",
"2-assets-all-received-pending",
"2-assets-none-received-pending",
],
)
def test_should_response_200(self, test_client, dag_maker, session, num_assets, received_count):
uris = [f"s3://bucket/dt{i}" for i in range(num_assets)]
asset_defs = [Asset(uri=uri, name=f"dt{i}") for i, uri in enumerate(uris)]
schedule = asset_defs[0] if num_assets == 1 else asset_defs[0] & asset_defs[1]
with dag_maker(
dag_id="detail_dag",
schedule=PartitionedAssetTimetable(assets=schedule),
serialized=True,
):
EmptyOperator(task_id="t")
dag_maker.create_dagrun()
dag_maker.sync_dagbag_to_db()
assets = {a.uri: a for a in session.scalars(select(AssetModel).where(AssetModel.uri.in_(uris)))}
pdr = AssetPartitionDagRun(
target_dag_id="detail_dag",
partition_key="2024-07-01",
created_dag_run_id=None,
)
session.add(pdr)
session.flush()
for uri in uris[:received_count]:
event = AssetEvent(asset_id=assets[uri].id, timestamp=pendulum.now())
session.add(event)
session.flush()
session.add(
PartitionedAssetKeyLog(
asset_id=assets[uri].id,
asset_event_id=event.id,
asset_partition_dag_run_id=pdr.id,
source_partition_key="2024-07-01",
target_dag_id="detail_dag",
target_partition_key="2024-07-01",
)
)
session.commit()
resp = test_client.get("/pending_partitioned_dag_run/detail_dag/2024-07-01")
assert resp.status_code == 200
body = resp.json()
assert body["dag_id"] == "detail_dag"
assert body["partition_key"] == "2024-07-01"
assert body["total_required"] == num_assets
assert body["total_received"] == received_count
assert len(body["assets"]) == num_assets
assert body["asset_expression"] is not None
assert body["created_dag_run_id"] is None
received_uris = {a["asset_uri"] for a in body["assets"] if a["received"]}
assert received_uris == set(uris[:received_count])
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/tests/unit/api_fastapi/core_api/routes/ui/test_partitioned_dag_runs.py",
"license": "Apache License 2.0",
"lines": 242,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/cncf/kubernetes/src/airflow/providers/cncf/kubernetes/secrets/kubernetes_secrets_backend.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Objects relating to sourcing connections, variables, and configs from Kubernetes Secrets."""
from __future__ import annotations
import base64
from functools import cached_property
from pathlib import Path
from kubernetes.client import ApiClient, CoreV1Api
from kubernetes.config import load_incluster_config
from airflow.exceptions import AirflowException
from airflow.secrets import BaseSecretsBackend
from airflow.utils.log.logging_mixin import LoggingMixin
class KubernetesSecretsBackend(BaseSecretsBackend, LoggingMixin):
"""
Retrieve connections, variables, and configs from Kubernetes Secrets using labels.
This backend discovers secrets by querying Kubernetes labels, enabling integration
with External Secrets Operator (ESO), Sealed Secrets, or any tool that creates
Kubernetes secrets — regardless of the secret's name.
Configurable via ``airflow.cfg``:
.. code-block:: ini
[secrets]
backend = airflow.providers.cncf.kubernetes.secrets.kubernetes_secrets_backend.KubernetesSecretsBackend
backend_kwargs = {"namespace": "airflow", "connections_label": "airflow.apache.org/connection-id"}
The secret must have a label whose key matches the configured label and whose value
matches the requested identifier (conn_id, variable key, or config key). The actual
secret value is read from the ``value`` key in the secret's data.
Example Kubernetes secret for a connection named ``my_db``:
.. code-block:: yaml
apiVersion: v1
kind: Secret
metadata:
name: anything
labels:
airflow.apache.org/connection-id: my_db
data:
value: <base64-encoded-connection-uri>
**Authentication:** Uses ``kubernetes.config.load_incluster_config()`` directly
for in-cluster authentication. Does not use KubernetesHook or any Airflow connection,
avoiding circular dependencies since this IS the secrets backend.
The namespace can be set explicitly via ``backend_kwargs``. If not set, it is
auto-detected from the pod's service account metadata at
``/var/run/secrets/kubernetes.io/serviceaccount/namespace``. If auto-detection
fails (e.g. ``automountServiceAccountToken`` is disabled), an error is raised.
**Performance:** Queries use ``resource_version="0"`` so the Kubernetes API server
serves results from its in-memory watch cache, making lookups very fast without
requiring Airflow-side caching.
:param namespace: Kubernetes namespace to query for secrets. If not set, the
namespace is auto-detected from the pod's service account metadata. If
auto-detection fails, an ``AirflowException`` is raised.
:param connections_label: Label key used to discover connection secrets.
If set to None, requests for connections will not be sent to Kubernetes.
:param variables_label: Label key used to discover variable secrets.
If set to None, requests for variables will not be sent to Kubernetes.
:param config_label: Label key used to discover config secrets.
If set to None, requests for configurations will not be sent to Kubernetes.
:param connections_data_key: The data key in the Kubernetes secret that holds the
connection value. Default: ``"value"``
:param variables_data_key: The data key in the Kubernetes secret that holds the
variable value. Default: ``"value"``
:param config_data_key: The data key in the Kubernetes secret that holds the
config value. Default: ``"value"``
"""
DEFAULT_CONNECTIONS_LABEL = "airflow.apache.org/connection-id"
DEFAULT_VARIABLES_LABEL = "airflow.apache.org/variable-key"
DEFAULT_CONFIG_LABEL = "airflow.apache.org/config-key"
SERVICE_ACCOUNT_NAMESPACE_PATH = "/var/run/secrets/kubernetes.io/serviceaccount/namespace"
def __init__(
self,
namespace: str | None = None,
connections_label: str = DEFAULT_CONNECTIONS_LABEL,
variables_label: str = DEFAULT_VARIABLES_LABEL,
config_label: str = DEFAULT_CONFIG_LABEL,
connections_data_key: str = "value",
variables_data_key: str = "value",
config_data_key: str = "value",
**kwargs,
):
super().__init__(**kwargs)
self._namespace = namespace
self.connections_label = connections_label
self.variables_label = variables_label
self.config_label = config_label
self.connections_data_key = connections_data_key
self.variables_data_key = variables_data_key
self.config_data_key = config_data_key
@cached_property
def namespace(self) -> str:
"""Return the configured namespace, or auto-detect from service account metadata."""
if self._namespace:
return self._namespace
try:
return Path(self.SERVICE_ACCOUNT_NAMESPACE_PATH).read_text().strip()
except FileNotFoundError:
raise AirflowException(
f"Could not auto-detect Kubernetes namespace from "
f"{self.SERVICE_ACCOUNT_NAMESPACE_PATH}. "
f"Is automountServiceAccountToken disabled for this pod? "
f"Set the 'namespace' parameter explicitly in backend_kwargs."
)
@cached_property
def client(self) -> CoreV1Api:
"""Lazy-init Kubernetes CoreV1Api client using in-cluster config directly."""
load_incluster_config()
return CoreV1Api(ApiClient())
def get_conn_value(self, conn_id: str, team_name: str | None = None) -> str | None:
"""
Get serialized representation of Connection from a Kubernetes secret.
Multi-team isolation is not currently supported; ``team_name`` is accepted
for API compatibility but ignored.
:param conn_id: connection id
:param team_name: Team name (unused — multi-team is not currently supported)
"""
return self._get_secret(self.connections_label, conn_id, self.connections_data_key)
def get_variable(self, key: str, team_name: str | None = None) -> str | None:
"""
Get Airflow Variable from a Kubernetes secret.
Multi-team isolation is not currently supported; ``team_name`` is accepted
for API compatibility but ignored.
:param key: Variable Key
:param team_name: Team name (unused — multi-team is not currently supported)
:return: Variable Value
"""
return self._get_secret(self.variables_label, key, self.variables_data_key)
def get_config(self, key: str) -> str | None:
"""
Get Airflow Configuration from a Kubernetes secret.
:param key: Configuration Option Key
:return: Configuration Option Value
"""
return self._get_secret(self.config_label, key, self.config_data_key)
def _get_secret(self, label_key: str | None, label_value: str, data_key: str) -> str | None:
"""
Get secret value from Kubernetes by label selector.
Queries for secrets with a label ``{label_key}={label_value}`` using
``resource_version="0"`` for fast cached reads from the API server.
:param label_key: The label key to search for. If None, returns None immediately
(used to skip lookups when a label is not configured).
:param label_value: The label value to match (e.g. conn_id or variable key)
:param data_key: The key within the secret's data dict to read
:return: Secret value or None if not found
"""
if label_key is None:
return None
label_selector = f"{label_key}={label_value}"
secret_list = self.client.list_namespaced_secret(
self.namespace,
label_selector=label_selector,
resource_version="0",
)
if not secret_list.items:
self.log.warning(
"No secret found with label %s in namespace %s.",
label_selector,
self.namespace,
)
return None
if len(secret_list.items) > 1:
self.log.warning(
"Multiple secrets found with label %s in namespace %s. Using the first one.",
label_selector,
self.namespace,
)
secret = secret_list.items[0]
if secret.data is None or data_key not in secret.data:
self.log.warning(
"Secret '%s' does not have data key '%s'.",
secret.metadata.name,
data_key,
)
return None
return base64.b64decode(secret.data[data_key]).decode("utf-8")
| {
"repo_id": "apache/airflow",
"file_path": "providers/cncf/kubernetes/src/airflow/providers/cncf/kubernetes/secrets/kubernetes_secrets_backend.py",
"license": "Apache License 2.0",
"lines": 186,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | documentation |
apache/airflow:providers/cncf/kubernetes/tests/unit/cncf/kubernetes/secrets/test_kubernetes_secrets_backend.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import base64
import json
from unittest import mock
import pytest
from kubernetes.client.exceptions import ApiException
from airflow.exceptions import AirflowException
from airflow.providers.cncf.kubernetes.secrets.kubernetes_secrets_backend import (
KubernetesSecretsBackend,
)
MODULE_PATH = "airflow.providers.cncf.kubernetes.secrets.kubernetes_secrets_backend.KubernetesSecretsBackend"
def _make_secret(data: dict[str, str], name: str = "some-secret"):
"""Create a mock V1Secret with base64-encoded data."""
encoded = {k: base64.b64encode(v.encode("utf-8")).decode("utf-8") for k, v in data.items()}
secret = mock.MagicMock()
secret.data = encoded
secret.metadata.name = name
return secret
def _make_secret_list(secrets: list):
"""Create a mock V1SecretList with the given items."""
secret_list = mock.MagicMock()
secret_list.items = secrets
return secret_list
class TestKubernetesSecretsBackendConnections:
@mock.patch(f"{MODULE_PATH}.namespace", new_callable=mock.PropertyMock, return_value="default")
@mock.patch(f"{MODULE_PATH}.client", new_callable=mock.PropertyMock)
def test_get_conn_value_uri(self, mock_client, mock_namespace):
"""Test reading a connection URI from a Kubernetes secret."""
uri = "postgresql://user:pass@host:5432/db"
mock_client.return_value.list_namespaced_secret.return_value = _make_secret_list(
[_make_secret({"value": uri})]
)
backend = KubernetesSecretsBackend()
result = backend.get_conn_value("my_db")
assert result == uri
mock_client.return_value.list_namespaced_secret.assert_called_once_with(
"default",
label_selector="airflow.apache.org/connection-id=my_db",
resource_version="0",
)
@mock.patch(f"{MODULE_PATH}.namespace", new_callable=mock.PropertyMock, return_value="default")
@mock.patch(f"{MODULE_PATH}.client", new_callable=mock.PropertyMock)
def test_get_conn_value_json(self, mock_client, mock_namespace):
"""Test reading a JSON-formatted connection from a Kubernetes secret."""
conn_json = json.dumps(
{
"conn_type": "postgres",
"login": "user",
"password": "pass",
"host": "host",
"port": 5432,
"schema": "db",
}
)
mock_client.return_value.list_namespaced_secret.return_value = _make_secret_list(
[_make_secret({"value": conn_json})]
)
backend = KubernetesSecretsBackend()
result = backend.get_conn_value("my_db")
assert result == conn_json
parsed = json.loads(result)
assert parsed["conn_type"] == "postgres"
@mock.patch(f"{MODULE_PATH}.namespace", new_callable=mock.PropertyMock, return_value="default")
@mock.patch(f"{MODULE_PATH}.client", new_callable=mock.PropertyMock)
def test_get_conn_value_not_found(self, mock_client, mock_namespace):
"""Test that a missing secret returns None."""
mock_client.return_value.list_namespaced_secret.return_value = _make_secret_list([])
backend = KubernetesSecretsBackend()
result = backend.get_conn_value("nonexistent")
assert result is None
class TestKubernetesSecretsBackendVariables:
@mock.patch(f"{MODULE_PATH}.namespace", new_callable=mock.PropertyMock, return_value="default")
@mock.patch(f"{MODULE_PATH}.client", new_callable=mock.PropertyMock)
def test_get_variable(self, mock_client, mock_namespace):
"""Test reading a variable from a Kubernetes secret."""
mock_client.return_value.list_namespaced_secret.return_value = _make_secret_list(
[_make_secret({"value": "my-value"})]
)
backend = KubernetesSecretsBackend()
result = backend.get_variable("api_key")
assert result == "my-value"
mock_client.return_value.list_namespaced_secret.assert_called_once_with(
"default",
label_selector="airflow.apache.org/variable-key=api_key",
resource_version="0",
)
@mock.patch(f"{MODULE_PATH}.namespace", new_callable=mock.PropertyMock, return_value="default")
@mock.patch(f"{MODULE_PATH}.client", new_callable=mock.PropertyMock)
def test_get_variable_not_found(self, mock_client, mock_namespace):
"""Test that a missing variable secret returns None."""
mock_client.return_value.list_namespaced_secret.return_value = _make_secret_list([])
backend = KubernetesSecretsBackend()
result = backend.get_variable("nonexistent")
assert result is None
class TestKubernetesSecretsBackendConfig:
@mock.patch(f"{MODULE_PATH}.namespace", new_callable=mock.PropertyMock, return_value="default")
@mock.patch(f"{MODULE_PATH}.client", new_callable=mock.PropertyMock)
def test_get_config(self, mock_client, mock_namespace):
"""Test reading a config value from a Kubernetes secret."""
mock_client.return_value.list_namespaced_secret.return_value = _make_secret_list(
[_make_secret({"value": "sqlite:///airflow.db"})]
)
backend = KubernetesSecretsBackend()
result = backend.get_config("sql_alchemy_conn")
assert result == "sqlite:///airflow.db"
mock_client.return_value.list_namespaced_secret.assert_called_once_with(
"default",
label_selector="airflow.apache.org/config-key=sql_alchemy_conn",
resource_version="0",
)
@mock.patch(f"{MODULE_PATH}.namespace", new_callable=mock.PropertyMock, return_value="default")
@mock.patch(f"{MODULE_PATH}.client", new_callable=mock.PropertyMock)
def test_get_config_not_found(self, mock_client, mock_namespace):
"""Test that a missing config secret returns None."""
mock_client.return_value.list_namespaced_secret.return_value = _make_secret_list([])
backend = KubernetesSecretsBackend()
result = backend.get_config("nonexistent")
assert result is None
class TestKubernetesSecretsBackendCustomConfig:
@mock.patch(f"{MODULE_PATH}.namespace", new_callable=mock.PropertyMock, return_value="default")
@mock.patch(f"{MODULE_PATH}.client", new_callable=mock.PropertyMock)
def test_custom_label(self, mock_client, mock_namespace):
"""Test using a custom label key."""
mock_client.return_value.list_namespaced_secret.return_value = _make_secret_list(
[_make_secret({"value": "postgresql://localhost/db"})]
)
backend = KubernetesSecretsBackend(connections_label="my-org/conn")
result = backend.get_conn_value("my_db")
assert result == "postgresql://localhost/db"
mock_client.return_value.list_namespaced_secret.assert_called_once_with(
"default",
label_selector="my-org/conn=my_db",
resource_version="0",
)
@mock.patch(f"{MODULE_PATH}.namespace", new_callable=mock.PropertyMock, return_value="default")
@mock.patch(f"{MODULE_PATH}.client", new_callable=mock.PropertyMock)
def test_custom_data_key(self, mock_client, mock_namespace):
"""Test using a custom data key for connections."""
mock_client.return_value.list_namespaced_secret.return_value = _make_secret_list(
[_make_secret({"conn_uri": "postgresql://localhost/db"})]
)
backend = KubernetesSecretsBackend(connections_data_key="conn_uri")
result = backend.get_conn_value("my_db")
assert result == "postgresql://localhost/db"
@mock.patch(f"{MODULE_PATH}.namespace", new_callable=mock.PropertyMock, return_value="default")
@mock.patch(f"{MODULE_PATH}.client", new_callable=mock.PropertyMock)
def test_missing_value_data_key_returns_none(self, mock_client, mock_namespace):
"""Test that a secret without the 'value' data key returns None."""
mock_client.return_value.list_namespaced_secret.return_value = _make_secret_list(
[_make_secret({"wrong_key": "some-value"})]
)
backend = KubernetesSecretsBackend()
result = backend.get_conn_value("my_db")
assert result is None
@mock.patch(f"{MODULE_PATH}.namespace", new_callable=mock.PropertyMock, return_value="default")
@mock.patch(f"{MODULE_PATH}.client", new_callable=mock.PropertyMock)
def test_secret_with_none_data_returns_none(self, mock_client, mock_namespace):
"""Test that a secret with None data returns None."""
secret = mock.MagicMock()
secret.data = None
secret.metadata.name = "some-secret"
mock_client.return_value.list_namespaced_secret.return_value = _make_secret_list([secret])
backend = KubernetesSecretsBackend()
result = backend.get_conn_value("my_db")
assert result is None
class TestKubernetesSecretsBackendLabelNone:
@mock.patch(f"{MODULE_PATH}.client", new_callable=mock.PropertyMock)
def test_connections_label_none(self, mock_client):
"""Test that setting connections_label to None skips connection lookups."""
backend = KubernetesSecretsBackend(connections_label=None)
result = backend.get_conn_value("my_db")
assert result is None
mock_client.return_value.list_namespaced_secret.assert_not_called()
@mock.patch(f"{MODULE_PATH}.client", new_callable=mock.PropertyMock)
def test_variables_label_none(self, mock_client):
"""Test that setting variables_label to None skips variable lookups."""
backend = KubernetesSecretsBackend(variables_label=None)
result = backend.get_variable("my_var")
assert result is None
mock_client.return_value.list_namespaced_secret.assert_not_called()
@mock.patch(f"{MODULE_PATH}.client", new_callable=mock.PropertyMock)
def test_config_label_none(self, mock_client):
"""Test that setting config_label to None skips config lookups."""
backend = KubernetesSecretsBackend(config_label=None)
result = backend.get_config("my_config")
assert result is None
mock_client.return_value.list_namespaced_secret.assert_not_called()
class TestKubernetesSecretsBackendMultipleMatches:
@mock.patch(f"{MODULE_PATH}.namespace", new_callable=mock.PropertyMock, return_value="default")
@mock.patch(f"{MODULE_PATH}.client", new_callable=mock.PropertyMock)
def test_multiple_secrets_uses_first_and_warns(self, mock_client, mock_namespace, caplog):
"""Test that multiple matching secrets uses the first and logs a warning."""
mock_client.return_value.list_namespaced_secret.return_value = _make_secret_list(
[
_make_secret({"value": "first-value"}, name="secret-1"),
_make_secret({"value": "second-value"}, name="secret-2"),
]
)
backend = KubernetesSecretsBackend()
import logging
with caplog.at_level(logging.WARNING):
result = backend.get_conn_value("my_db")
assert result == "first-value"
assert "Multiple secrets found" in caplog.text
class TestKubernetesSecretsBackendClientInit:
@mock.patch("airflow.providers.cncf.kubernetes.secrets.kubernetes_secrets_backend.CoreV1Api")
@mock.patch("airflow.providers.cncf.kubernetes.secrets.kubernetes_secrets_backend.ApiClient")
@mock.patch("airflow.providers.cncf.kubernetes.secrets.kubernetes_secrets_backend.load_incluster_config")
def test_client_uses_incluster_config(self, mock_load_incluster, mock_api_client, mock_core_v1):
"""Test that the client is obtained via load_incluster_config directly."""
backend = KubernetesSecretsBackend()
result = backend.client
mock_load_incluster.assert_called_once()
mock_api_client.assert_called_once()
mock_core_v1.assert_called_once_with(mock_api_client.return_value)
assert result is mock_core_v1.return_value
class TestKubernetesSecretsBackendNamespace:
@mock.patch("airflow.providers.cncf.kubernetes.secrets.kubernetes_secrets_backend.Path")
def test_namespace_auto_detected(self, mock_path_cls):
"""Test that namespace is auto-detected from the service account metadata."""
mock_path_cls.return_value.read_text.return_value = "airflow\n"
backend = KubernetesSecretsBackend()
assert backend.namespace == "airflow"
mock_path_cls.assert_called_once_with(KubernetesSecretsBackend.SERVICE_ACCOUNT_NAMESPACE_PATH)
@mock.patch("airflow.providers.cncf.kubernetes.secrets.kubernetes_secrets_backend.Path")
def test_namespace_raises_when_not_found(self, mock_path_cls):
"""Test that namespace raises AirflowException when file is not found."""
mock_path_cls.return_value.read_text.side_effect = FileNotFoundError
backend = KubernetesSecretsBackend()
with pytest.raises(
AirflowException, match="Could not auto-detect Kubernetes namespace.*automountServiceAccountToken"
):
_ = backend.namespace
def test_namespace_explicit(self):
"""Test that an explicitly passed namespace is used without reading the file."""
backend = KubernetesSecretsBackend(namespace="my-ns")
assert backend.namespace == "my-ns"
@mock.patch(f"{MODULE_PATH}.namespace", new_callable=mock.PropertyMock, return_value="airflow")
@mock.patch(f"{MODULE_PATH}.client", new_callable=mock.PropertyMock)
def test_namespace_used_in_api_calls(self, mock_client, mock_namespace):
"""Test that auto-detected namespace is used when listing secrets."""
mock_client.return_value.list_namespaced_secret.return_value = _make_secret_list(
[_make_secret({"value": "postgresql://localhost/db"})]
)
backend = KubernetesSecretsBackend()
backend.get_conn_value("my_db")
mock_client.return_value.list_namespaced_secret.assert_called_once_with(
"airflow",
label_selector="airflow.apache.org/connection-id=my_db",
resource_version="0",
)
class TestKubernetesSecretsBackendApiErrors:
@mock.patch(f"{MODULE_PATH}.namespace", new_callable=mock.PropertyMock, return_value="default")
@mock.patch(f"{MODULE_PATH}.client", new_callable=mock.PropertyMock)
def test_api_exception_is_raised(self, mock_client, mock_namespace):
"""Test that API exceptions are re-raised."""
mock_client.return_value.list_namespaced_secret.side_effect = ApiException(status=403)
backend = KubernetesSecretsBackend()
with pytest.raises(ApiException) as exc_info:
backend.get_conn_value("my_db")
assert exc_info.value.status == 403
| {
"repo_id": "apache/airflow",
"file_path": "providers/cncf/kubernetes/tests/unit/cncf/kubernetes/secrets/test_kubernetes_secrets_backend.py",
"license": "Apache License 2.0",
"lines": 277,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/edge3/src/airflow/providers/edge3/models/edge_base.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from sqlalchemy import MetaData
from sqlalchemy.orm import registry
from airflow.models.base import _get_schema, naming_convention
# Isolated metadata for Edge3 provider tables.
# By using a dedicated MetaData + registry + Base, Edge3 tables are never
# registered in Airflow core's Base.metadata, avoiding validation conflicts
# without needing the post-hoc Base.metadata.remove() hack.
edge_metadata = MetaData(schema=_get_schema(), naming_convention=naming_convention)
_edge_mapper_registry = registry(metadata=edge_metadata)
Base = _edge_mapper_registry.generate_base()
Base.__allow_unmapped__ = True # match core Base workaround for unmapped v1.4 models
| {
"repo_id": "apache/airflow",
"file_path": "providers/edge3/src/airflow/providers/edge3/models/edge_base.py",
"license": "Apache License 2.0",
"lines": 28,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/edge3/tests/unit/edge3/models/test_edge_base.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from airflow.models.base import naming_convention
from airflow.providers.edge3.models.edge_base import Base, edge_metadata
class TestEdgeBase:
def test_edge_metadata_is_isolated_from_core(self):
"""edge_metadata must not be the same object as Airflow core's Base.metadata."""
from airflow.models.base import Base as CoreBase
assert edge_metadata is not CoreBase.metadata
def test_edge_tables_not_in_core_metadata(self):
"""Edge3 tables must never appear in Airflow core's Base.metadata."""
from airflow.models.base import Base as CoreBase
edge_table_names = {"edge_worker", "edge_job", "edge_logs"}
core_table_names = set(CoreBase.metadata.tables.keys())
assert not edge_table_names & core_table_names
def test_edge_metadata_contains_edge_tables(self):
"""edge_metadata must contain all three Edge3 tables."""
# Import models to ensure they are registered
import airflow.providers.edge3.models.edge_job
import airflow.providers.edge3.models.edge_logs
import airflow.providers.edge3.models.edge_worker # noqa: F401
assert "edge_worker" in edge_metadata.tables
assert "edge_job" in edge_metadata.tables
assert "edge_logs" in edge_metadata.tables
def test_edge_metadata_uses_same_naming_convention_as_core(self):
"""edge_metadata should use the same naming convention as Airflow core."""
assert edge_metadata.naming_convention == naming_convention
def test_base_allow_unmapped(self):
"""Base must have __allow_unmapped__ set to match core Base workaround."""
assert Base.__allow_unmapped__ is True
| {
"repo_id": "apache/airflow",
"file_path": "providers/edge3/tests/unit/edge3/models/test_edge_base.py",
"license": "Apache License 2.0",
"lines": 45,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:scripts/ci/docs/store_stable_versions.py | #!/usr/bin/env python3
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# /// script
# requires-python = ">=3.8"
# dependencies = [
# "pyyaml>=6.0",
# ]
# ///
"""
This script retrieves versions from versioned doc packages built and stores them in stable.txt files.
It should be run after building docs but before saving/uploading the build artifacts.
"""
from __future__ import annotations
import os
import re
import shutil
import sys
from pathlib import Path
import yaml
def get_airflow_version(airflow_root: Path) -> str | None:
"""Get Airflow version from airflow/__init__.py."""
# Try Airflow 3.x location first
init_file = airflow_root / "airflow-core" / "src" / "airflow" / "__init__.py"
if not init_file.exists():
# Fallback to Airflow 2.x location
init_file = airflow_root / "airflow" / "__init__.py"
if not init_file.exists():
return None
content = init_file.read_text()
match = re.search(r'^__version__\s*=\s*["\']([^"\']+)["\']', content, re.MULTILINE)
if match:
return match.group(1)
return None
def get_version_from_provider_yaml(provider_yaml_path: Path) -> str | None:
"""Get version from provider.yaml file (first version in the versions list)."""
if not provider_yaml_path.exists():
return None
try:
with open(provider_yaml_path) as f:
data = yaml.safe_load(f)
if "versions" in data and len(data["versions"]) > 0:
# versions is a list of version strings, get the first one
return str(data["versions"][0])
except Exception:
pass
return None
def get_version_from_pyproject_toml(pyproject_path: Path) -> str | None:
"""Get version from pyproject.toml file."""
if not pyproject_path.exists():
return None
content = pyproject_path.read_text()
match = re.search(r'^version\s*=\s*["\']([^"\']+)["\']', content, re.MULTILINE)
if match:
return match.group(1)
return None
def get_helm_chart_version(chart_yaml_path: Path) -> str | None:
"""Get version from Chart.yaml file."""
if not chart_yaml_path.exists():
return None
content = chart_yaml_path.read_text()
match = re.search(r"^version:\s*(.+)$", content, re.MULTILINE)
if match:
return match.group(1).strip()
return None
def get_package_version(package_name: str, airflow_root: Path) -> str | None:
"""Get version for a package based on its type and metadata location."""
if package_name == "apache-airflow":
return get_airflow_version(airflow_root)
if package_name == "apache-airflow-ctl":
# Try provider.yaml first
provider_yaml = airflow_root / "airflow-ctl" / "src" / "airflow_ctl" / "provider.yaml"
version = get_version_from_provider_yaml(provider_yaml)
if version:
return version
# Fallback to pyproject.toml
pyproject = airflow_root / "airflow-ctl" / "pyproject.toml"
return get_version_from_pyproject_toml(pyproject)
if package_name == "task-sdk":
# Try provider.yaml first
provider_yaml = airflow_root / "task-sdk" / "src" / "task_sdk" / "provider.yaml"
version = get_version_from_provider_yaml(provider_yaml)
if version:
return version
# Fallback to pyproject.toml
pyproject = airflow_root / "task-sdk" / "pyproject.toml"
return get_version_from_pyproject_toml(pyproject)
if package_name == "helm-chart":
chart_yaml = airflow_root / "chart" / "Chart.yaml"
return get_helm_chart_version(chart_yaml)
if package_name.startswith("apache-airflow-providers-"):
# Get provider version from provider.yaml
provider_short_name = package_name.replace("apache-airflow-providers-", "").replace("-", "/")
# Try Airflow 3.x location first (providers/{provider}/provider.yaml)
provider_yaml = airflow_root / "providers" / provider_short_name / "provider.yaml"
version = get_version_from_provider_yaml(provider_yaml)
if version:
return version
# Fallback to Airflow 2.x location (airflow/providers/{provider}/provider.yaml)
provider_yaml = airflow_root / "airflow" / "providers" / provider_short_name / "provider.yaml"
return get_version_from_provider_yaml(provider_yaml)
print(f"Unknown package type: {package_name}")
return None
def main() -> int:
"""Main function to process all documentation packages."""
# Get configuration from environment or defaults
docs_build_dir = Path(os.environ.get("DOCS_BUILD_DIR", "generated/_build/docs"))
airflow_root = Path(os.environ.get("AIRFLOW_ROOT", os.getcwd()))
# Change to airflow root directory
os.chdir(airflow_root)
print("=" * 42)
print("Storing stable versions for built docs")
print("=" * 42)
# Check if docs build directory exists
if not docs_build_dir.exists():
print(f"Error: Docs build directory not found at {docs_build_dir}")
# Try alternate location for Airflow 2 compatibility
alt_docs_dir = Path("docs/_build/docs")
if alt_docs_dir.exists():
docs_build_dir = alt_docs_dir
print(f"Found alternate location at {docs_build_dir}")
else:
print("No docs build directory found, exiting")
return 1
# Non-versioned packages to skip
non_versioned_packages = {"apache-airflow-providers", "docker-stack"}
stable_files_created = []
# Process each package in the docs build directory
for package_dir in sorted(docs_build_dir.iterdir()):
if not package_dir.is_dir():
continue
package_name = package_dir.name
# Skip non-versioned packages
if package_name in non_versioned_packages:
print(f"Skipping non-versioned package: {package_name}")
continue
# Check if this package has a stable directory (indicating it's versioned)
stable_dir = package_dir / "stable"
if not stable_dir.exists() or not stable_dir.is_dir():
print(f"Skipping non-versioned package (no stable dir): {package_name}")
continue
print(f"Processing versioned package: {package_name}")
# Get the version for this package
version = get_package_version(package_name, airflow_root)
if not version:
print(f" Warning: Could not determine version for {package_name}, skipping")
continue
print(f" Version: {version}")
# Create stable.txt file
stable_file = package_dir / "stable.txt"
stable_file.write_text(version + "\n")
print(f" Created: {stable_file}")
stable_files_created.append((package_name, version))
# Also create a version-specific copy of the stable docs
version_dir = package_dir / version
if not version_dir.exists():
print(f" Copying stable docs to versioned directory: {version_dir}")
shutil.copytree(stable_dir, version_dir)
else:
print(f" Version directory already exists: {version_dir}")
print()
print("=" * 42)
print("Stable version files created successfully")
print("=" * 42)
print()
if stable_files_created:
print("Summary of stable.txt files:")
for package_name, version in stable_files_created:
print(f" {package_name}: {version}")
else:
print("No stable.txt files created")
print()
print("Done!")
return 0
if __name__ == "__main__":
sys.exit(main())
| {
"repo_id": "apache/airflow",
"file_path": "scripts/ci/docs/store_stable_versions.py",
"license": "Apache License 2.0",
"lines": 190,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:airflow-core/src/airflow/api_fastapi/core_api/datamodels/ui/deadline.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from collections.abc import Iterable
from datetime import datetime
from uuid import UUID
from pydantic import AliasPath, Field
from airflow.api_fastapi.core_api.base import BaseModel
class DeadlineResponse(BaseModel):
"""Deadline serializer for responses."""
id: UUID
deadline_time: datetime
missed: bool
created_at: datetime
alert_name: str | None = Field(validation_alias=AliasPath("deadline_alert", "name"), default=None)
alert_description: str | None = Field(
validation_alias=AliasPath("deadline_alert", "description"), default=None
)
class DeadlineCollectionResponse(BaseModel):
"""Deadline Collection serializer for responses."""
deadlines: Iterable[DeadlineResponse]
total_entries: int
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/api_fastapi/core_api/datamodels/ui/deadline.py",
"license": "Apache License 2.0",
"lines": 36,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:airflow-core/src/airflow/api_fastapi/core_api/routes/ui/deadlines.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import Annotated
from fastapi import Depends, HTTPException, status
from sqlalchemy import select
from sqlalchemy.orm import joinedload
from airflow.api_fastapi.auth.managers.models.resource_details import DagAccessEntity
from airflow.api_fastapi.common.db.common import SessionDep, paginated_select
from airflow.api_fastapi.common.parameters import QueryLimit, QueryOffset, SortParam
from airflow.api_fastapi.common.router import AirflowRouter
from airflow.api_fastapi.core_api.datamodels.ui.deadline import DeadlineCollectionResponse
from airflow.api_fastapi.core_api.openapi.exceptions import create_openapi_http_exception_doc
from airflow.api_fastapi.core_api.security import requires_access_dag
from airflow.models.dagrun import DagRun
from airflow.models.deadline import Deadline
from airflow.models.deadline_alert import DeadlineAlert
deadlines_router = AirflowRouter(prefix="/dags/{dag_id}/dagRuns/{dag_run_id}/deadlines", tags=["Deadlines"])
@deadlines_router.get(
"",
responses=create_openapi_http_exception_doc(
[
status.HTTP_404_NOT_FOUND,
]
),
dependencies=[
Depends(
requires_access_dag(
method="GET",
access_entity=DagAccessEntity.RUN,
)
),
],
)
def get_dag_run_deadlines(
dag_id: str,
dag_run_id: str,
session: SessionDep,
limit: QueryLimit,
offset: QueryOffset,
order_by: Annotated[
SortParam,
Depends(
SortParam(
["id", "deadline_time", "created_at"],
Deadline,
to_replace={
"alert_name": DeadlineAlert.name,
},
).dynamic_depends(default="deadline_time")
),
],
) -> DeadlineCollectionResponse:
"""Get all deadlines for a specific DAG run."""
dag_run = session.scalar(select(DagRun).where(DagRun.dag_id == dag_id, DagRun.run_id == dag_run_id))
if not dag_run:
raise HTTPException(
status.HTTP_404_NOT_FOUND,
f"No DAG run found for dag_id={dag_id} dag_run_id={dag_run_id}",
)
query = (
select(Deadline)
.join(Deadline.dagrun)
.outerjoin(Deadline.deadline_alert)
.where(Deadline.dagrun_id == dag_run.id)
.where(DagRun.dag_id == dag_id)
.options(joinedload(Deadline.deadline_alert))
)
deadlines_select, total_entries = paginated_select(
statement=query,
filters=None,
order_by=order_by,
offset=offset,
limit=limit,
session=session,
)
deadlines = session.scalars(deadlines_select)
return DeadlineCollectionResponse(deadlines=deadlines, total_entries=total_entries)
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/api_fastapi/core_api/routes/ui/deadlines.py",
"license": "Apache License 2.0",
"lines": 92,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:airflow-core/tests/unit/api_fastapi/core_api/routes/ui/test_deadlines.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import pytest
from sqlalchemy import select
from airflow._shared.timezones import timezone
from airflow.models.deadline import Deadline
from airflow.models.deadline_alert import DeadlineAlert
from airflow.models.serialized_dag import SerializedDagModel
from airflow.providers.standard.operators.empty import EmptyOperator
from airflow.sdk.definitions.callback import AsyncCallback
from airflow.sdk.definitions.deadline import DeadlineReference
from airflow.utils.state import DagRunState
from airflow.utils.types import DagRunTriggeredByType, DagRunType
from tests_common.test_utils.asserts import assert_queries_count
from tests_common.test_utils.db import (
clear_db_dags,
clear_db_deadline,
clear_db_deadline_alert,
clear_db_runs,
clear_db_serialized_dags,
)
pytestmark = pytest.mark.db_test
DAG_ID = "test_deadlines_dag"
# Each run represents a different deadline scenario tested below.
RUN_EMPTY = "run_empty" # no deadlines
RUN_SINGLE = "run_single" # 1 deadline, not missed, no alert
RUN_MISSED = "run_missed" # 1 deadline, missed=True
RUN_ALERT = "run_alert" # 1 deadline linked to a DeadlineAlert
RUN_MULTI = "run_multi" # 3 deadlines added out-of-order (ordering test)
RUN_OTHER = "run_other" # has 1 deadline; used to verify per-run isolation
ALERT_NAME = "SLA Breach Alert"
ALERT_DESCRIPTION = "Fires when SLA is breached"
_CALLBACK_PATH = "tests.unit.api_fastapi.core_api.routes.ui.test_deadlines._noop_callback"
async def _noop_callback(**kwargs):
"""No-op callback used to satisfy Deadline creation in tests."""
def _cb() -> AsyncCallback:
return AsyncCallback(_CALLBACK_PATH)
@pytest.fixture(autouse=True)
def setup(dag_maker, session):
clear_db_deadline()
clear_db_deadline_alert()
clear_db_runs()
clear_db_dags()
clear_db_serialized_dags()
with dag_maker(DAG_ID, serialized=True, session=session):
EmptyOperator(task_id="task")
# ---- create runs -------------------------------------------------------
dag_maker.create_dagrun(
run_id=RUN_EMPTY,
state=DagRunState.SUCCESS,
run_type=DagRunType.SCHEDULED,
logical_date=timezone.datetime(2024, 11, 1),
triggered_by=DagRunTriggeredByType.TEST,
)
run_single = dag_maker.create_dagrun(
run_id=RUN_SINGLE,
state=DagRunState.SUCCESS,
run_type=DagRunType.SCHEDULED,
logical_date=timezone.datetime(2024, 11, 2),
triggered_by=DagRunTriggeredByType.TEST,
)
run_missed = dag_maker.create_dagrun(
run_id=RUN_MISSED,
state=DagRunState.SUCCESS,
run_type=DagRunType.SCHEDULED,
logical_date=timezone.datetime(2024, 11, 3),
triggered_by=DagRunTriggeredByType.TEST,
)
run_alert = dag_maker.create_dagrun(
run_id=RUN_ALERT,
state=DagRunState.SUCCESS,
run_type=DagRunType.SCHEDULED,
logical_date=timezone.datetime(2024, 11, 4),
triggered_by=DagRunTriggeredByType.TEST,
)
run_multi = dag_maker.create_dagrun(
run_id=RUN_MULTI,
state=DagRunState.SUCCESS,
run_type=DagRunType.SCHEDULED,
logical_date=timezone.datetime(2024, 11, 5),
triggered_by=DagRunTriggeredByType.TEST,
)
run_other = dag_maker.create_dagrun(
run_id=RUN_OTHER,
state=DagRunState.SUCCESS,
run_type=DagRunType.SCHEDULED,
logical_date=timezone.datetime(2024, 11, 6),
triggered_by=DagRunTriggeredByType.TEST,
)
# ---- deadlines ---------------------------------------------------------
# run_empty: intentionally no deadlines
# run_single: one active, non-missed deadline with no alert
session.add(
Deadline(
deadline_time=timezone.datetime(2025, 1, 1, 12, 0, 0),
callback=_cb(),
dagrun_id=run_single.id,
deadline_alert_id=None,
)
)
# run_missed: one missed deadline
missed_dl = Deadline(
deadline_time=timezone.datetime(2024, 12, 1),
callback=_cb(),
dagrun_id=run_missed.id,
deadline_alert_id=None,
)
missed_dl.missed = True
session.add(missed_dl)
# run_alert: one deadline linked to a DeadlineAlert
serialized_dag = session.scalar(select(SerializedDagModel).where(SerializedDagModel.dag_id == DAG_ID))
alert = DeadlineAlert(
serialized_dag_id=serialized_dag.id,
name=ALERT_NAME,
description=ALERT_DESCRIPTION,
reference=DeadlineReference.DAGRUN_QUEUED_AT.serialize_reference(),
interval=3600.0,
callback_def={"path": _CALLBACK_PATH},
)
session.add(alert)
session.flush()
session.add(
Deadline(
deadline_time=timezone.datetime(2025, 1, 1, 12, 0, 0),
callback=_cb(),
dagrun_id=run_alert.id,
deadline_alert_id=alert.id,
)
)
# run_multi: three deadlines intentionally added in non-chronological order
for dl_time in [
timezone.datetime(2025, 3, 1),
timezone.datetime(2025, 1, 1),
timezone.datetime(2025, 2, 1),
]:
session.add(
Deadline(
deadline_time=dl_time,
callback=_cb(),
dagrun_id=run_multi.id,
deadline_alert_id=None,
)
)
# run_other: one deadline (for isolation verification)
session.add(
Deadline(
deadline_time=timezone.datetime(2025, 6, 1),
callback=_cb(),
dagrun_id=run_other.id,
deadline_alert_id=None,
)
)
dag_maker.sync_dagbag_to_db()
session.commit()
yield
clear_db_deadline()
clear_db_deadline_alert()
clear_db_runs()
clear_db_dags()
clear_db_serialized_dags()
class TestGetDagRunDeadlines:
"""Tests for GET /dags/{dag_id}/dagRuns/{dag_run_id}/deadlines."""
# ------------------------------------------------------------------
# 200 – happy paths
# ------------------------------------------------------------------
def test_no_deadlines_returns_empty_list(self, test_client):
response = test_client.get(f"/dags/{DAG_ID}/dagRuns/{RUN_EMPTY}/deadlines")
assert response.status_code == 200
assert response.json() == {"deadlines": [], "total_entries": 0}
def test_single_deadline_without_alert(self, test_client):
response = test_client.get(f"/dags/{DAG_ID}/dagRuns/{RUN_SINGLE}/deadlines")
assert response.status_code == 200
data = response.json()
assert data["total_entries"] == 1
deadline1 = data["deadlines"][0]
assert deadline1["deadline_time"] == "2025-01-01T12:00:00Z"
assert deadline1["missed"] is False
assert deadline1["alert_name"] is None
assert deadline1["alert_description"] is None
assert "id" in deadline1
assert "created_at" in deadline1
def test_missed_deadline_is_reflected(self, test_client):
response = test_client.get(f"/dags/{DAG_ID}/dagRuns/{RUN_MISSED}/deadlines")
assert response.status_code == 200
data = response.json()
assert data["total_entries"] == 1
assert data["deadlines"][0]["missed"] is True
def test_deadline_with_alert_name_and_description(self, test_client):
with assert_queries_count(4):
response = test_client.get(f"/dags/{DAG_ID}/dagRuns/{RUN_ALERT}/deadlines")
assert response.status_code == 200
data = response.json()
assert data["total_entries"] == 1
assert data["deadlines"][0]["alert_name"] == ALERT_NAME
assert data["deadlines"][0]["alert_description"] == ALERT_DESCRIPTION
def test_deadlines_ordered_by_deadline_time_ascending(self, test_client):
with assert_queries_count(4):
response = test_client.get(f"/dags/{DAG_ID}/dagRuns/{RUN_MULTI}/deadlines")
assert response.status_code == 200
data = response.json()
assert data["total_entries"] == 3
returned_times = [d["deadline_time"] for d in data["deadlines"]]
assert returned_times == sorted(returned_times)
@pytest.mark.parametrize(
"order_by",
["deadline_time", "id", "created_at", "alert_name"],
ids=["deadline_time", "id", "created_at", "alert_name"],
)
def test_should_response_200_order_by(self, test_client, order_by):
url = f"/dags/{DAG_ID}/dagRuns/{RUN_MULTI}/deadlines"
with assert_queries_count(8):
response_asc = test_client.get(url, params={"order_by": order_by})
response_desc = test_client.get(url, params={"order_by": f"-{order_by}"})
assert response_asc.status_code == 200
assert response_desc.status_code == 200
ids_asc = [d["id"] for d in response_asc.json()["deadlines"]]
ids_desc = [d["id"] for d in response_desc.json()["deadlines"]]
assert ids_desc == list(reversed(ids_asc))
def test_only_returns_deadlines_for_requested_run(self, test_client):
"""Deadlines belonging to a different run must not appear in the response."""
# RUN_EMPTY has no deadlines; RUN_OTHER has one — querying RUN_EMPTY must return [].
response = test_client.get(f"/dags/{DAG_ID}/dagRuns/{RUN_EMPTY}/deadlines")
assert response.status_code == 200
assert response.json() == {"deadlines": [], "total_entries": 0}
# And querying RUN_OTHER returns only its own deadline.
response = test_client.get(f"/dags/{DAG_ID}/dagRuns/{RUN_OTHER}/deadlines")
assert response.status_code == 200
assert response.json()["total_entries"] == 1
# ------------------------------------------------------------------
# 404
# ------------------------------------------------------------------
@pytest.mark.parametrize(
("dag_id", "run_id"),
[
pytest.param("nonexistent_dag", RUN_EMPTY, id="wrong_dag_id"),
pytest.param(DAG_ID, "nonexistent_run", id="wrong_run_id"),
pytest.param("nonexistent_dag", "nonexistent_run", id="both_wrong"),
],
)
def test_should_response_404(self, test_client, dag_id, run_id):
response = test_client.get(f"/dags/{dag_id}/dagRuns/{run_id}/deadlines")
assert response.status_code == 404
# ------------------------------------------------------------------
# 401 / 403
# ------------------------------------------------------------------
def test_should_response_401(self, unauthenticated_test_client):
response = unauthenticated_test_client.get(f"/dags/{DAG_ID}/dagRuns/{RUN_EMPTY}/deadlines")
assert response.status_code == 401
def test_should_response_403(self, unauthorized_test_client):
response = unauthorized_test_client.get(f"/dags/{DAG_ID}/dagRuns/{RUN_EMPTY}/deadlines")
assert response.status_code == 403
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/tests/unit/api_fastapi/core_api/routes/ui/test_deadlines.py",
"license": "Apache License 2.0",
"lines": 266,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/fab/src/airflow/providers/fab/auth_manager/api_fastapi/routes/router.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from enum import Enum
from airflow.api_fastapi.common.router import AirflowRouter
FAB_AUTH_TAGS: list[str | Enum] = ["FabAuthManager"]
FAB_AUTH_PREFIX = "/fab/v1"
auth_router = AirflowRouter(tags=FAB_AUTH_TAGS)
fab_router = AirflowRouter(prefix=FAB_AUTH_PREFIX, tags=FAB_AUTH_TAGS)
def register_routes() -> None:
"""Register FastAPI routes by importing modules for side effects."""
import importlib
importlib.import_module("airflow.providers.fab.auth_manager.api_fastapi.routes.login")
importlib.import_module("airflow.providers.fab.auth_manager.api_fastapi.routes.roles")
importlib.import_module("airflow.providers.fab.auth_manager.api_fastapi.routes.users")
| {
"repo_id": "apache/airflow",
"file_path": "providers/fab/src/airflow/providers/fab/auth_manager/api_fastapi/routes/router.py",
"license": "Apache License 2.0",
"lines": 29,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/fab/tests/unit/fab/auth_manager/api_fastapi/routes/test_router.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from airflow.providers.fab.auth_manager.api_fastapi.routes.router import (
FAB_AUTH_PREFIX,
auth_router,
fab_router,
)
def test_root_routers_share_tags() -> None:
assert auth_router.tags == fab_router.tags
def test_fab_router_prefix() -> None:
assert fab_router.prefix == FAB_AUTH_PREFIX
def test_auth_router_prefix() -> None:
assert auth_router.prefix == ""
| {
"repo_id": "apache/airflow",
"file_path": "providers/fab/tests/unit/fab/auth_manager/api_fastapi/routes/test_router.py",
"license": "Apache License 2.0",
"lines": 28,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.