sample_id stringlengths 21 196 | text stringlengths 105 936k | metadata dict | category stringclasses 6
values |
|---|---|---|---|
apache/airflow:scripts/ci/prek/check_notice_files.py | #!/usr/bin/env python
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# /// script
# requires-python = ">=3.10, <3.11"
# dependencies = []
# ///
"""
Check that NOTICE files contain the current year and Apache Software Foundation reference.
This script validates NOTICE files to ensure they:
- Include the current year in copyright statements
- Reference the Apache Software Foundation
Usage: check_notice_files.py <notice_file_paths...>
"""
from __future__ import annotations
import sys
from datetime import datetime
from pathlib import Path
CURRENT_YEAR = str(datetime.now().year)
errors = 0
for notice_file in sys.argv[1:]:
content = Path(notice_file).read_text()
expected = f"Copyright 2016-{CURRENT_YEAR} The Apache Software Foundation"
if "Copyright" in content and expected not in content:
print(f"❌ {notice_file}: Missing expected string: {expected!r}")
errors += 1
sys.exit(1 if errors else 0)
| {
"repo_id": "apache/airflow",
"file_path": "scripts/ci/prek/check_notice_files.py",
"license": "Apache License 2.0",
"lines": 41,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/fab/src/airflow/providers/fab/auth_manager/api_fastapi/datamodels/users.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from datetime import datetime
from pydantic import Field, SecretStr
from airflow.api_fastapi.core_api.base import BaseModel, StrictBaseModel
from airflow.providers.fab.auth_manager.api_fastapi.datamodels.roles import Role
class UserBody(StrictBaseModel):
"""Incoming payload for creating a user."""
username: str = Field(min_length=1)
email: str = Field(min_length=1)
first_name: str = Field(min_length=1)
last_name: str = Field(min_length=1)
roles: list[Role] | None = None
password: SecretStr
class UserPatchBody(StrictBaseModel):
"""Incoming payload for updating a user (all fields optional)."""
username: str | None = Field(default=None, min_length=1)
email: str | None = Field(default=None, min_length=1)
first_name: str | None = Field(default=None, min_length=1)
last_name: str | None = Field(default=None, min_length=1)
roles: list[Role] | None = None
password: SecretStr | None = None
class UserResponse(BaseModel):
"""Outgoing representation of a user (no password)."""
username: str
email: str
first_name: str
last_name: str
roles: list[Role] | None = None
active: bool | None = None
last_login: datetime | None = None
login_count: int | None = None
fail_login_count: int | None = None
created_on: datetime | None = None
changed_on: datetime | None = None
class UserCollectionResponse(BaseModel):
"""Response model for a collection of users."""
users: list[UserResponse]
total_entries: int
| {
"repo_id": "apache/airflow",
"file_path": "providers/fab/src/airflow/providers/fab/auth_manager/api_fastapi/datamodels/users.py",
"license": "Apache License 2.0",
"lines": 54,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/fab/src/airflow/providers/fab/auth_manager/api_fastapi/routes/users.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from fastapi import Depends, Path, Query, status
from airflow.api_fastapi.core_api.openapi.exceptions import create_openapi_http_exception_doc
from airflow.providers.fab.auth_manager.api_fastapi.datamodels.users import (
UserBody,
UserCollectionResponse,
UserPatchBody,
UserResponse,
)
from airflow.providers.fab.auth_manager.api_fastapi.parameters import get_effective_limit
from airflow.providers.fab.auth_manager.api_fastapi.routes.router import fab_router
from airflow.providers.fab.auth_manager.api_fastapi.security import requires_fab_custom_view
from airflow.providers.fab.auth_manager.api_fastapi.services.users import FABAuthManagerUsers
from airflow.providers.fab.auth_manager.cli_commands.utils import get_application_builder
from airflow.providers.fab.www.security import permissions
@fab_router.post(
"/users",
responses=create_openapi_http_exception_doc(
[
status.HTTP_400_BAD_REQUEST,
status.HTTP_401_UNAUTHORIZED,
status.HTTP_403_FORBIDDEN,
status.HTTP_409_CONFLICT,
status.HTTP_500_INTERNAL_SERVER_ERROR,
]
),
dependencies=[Depends(requires_fab_custom_view("POST", permissions.RESOURCE_USER))],
)
def create_user(body: UserBody) -> UserResponse:
with get_application_builder():
return FABAuthManagerUsers.create_user(body=body)
@fab_router.get(
"/users",
response_model=UserCollectionResponse,
responses=create_openapi_http_exception_doc(
[
status.HTTP_400_BAD_REQUEST,
status.HTTP_401_UNAUTHORIZED,
status.HTTP_403_FORBIDDEN,
]
),
dependencies=[Depends(requires_fab_custom_view("GET", permissions.RESOURCE_USER))],
)
def get_users(
order_by: str = Query("id", description="Field to order by. Prefix with '-' for descending."),
limit: int = Depends(get_effective_limit()),
offset: int = Query(0, ge=0, description="Number of items to skip before starting to collect results."),
) -> UserCollectionResponse:
"""List users with pagination and ordering."""
with get_application_builder():
return FABAuthManagerUsers.get_users(order_by=order_by, limit=limit, offset=offset)
@fab_router.get(
"/users/{username}",
responses=create_openapi_http_exception_doc(
[
status.HTTP_401_UNAUTHORIZED,
status.HTTP_403_FORBIDDEN,
status.HTTP_404_NOT_FOUND,
]
),
dependencies=[Depends(requires_fab_custom_view("GET", permissions.RESOURCE_USER))],
)
def get_user(username: str = Path(..., min_length=1)) -> UserResponse:
"""Get a user by username."""
with get_application_builder():
return FABAuthManagerUsers.get_user(username=username)
@fab_router.patch(
"/users/{username}",
responses=create_openapi_http_exception_doc(
[
status.HTTP_400_BAD_REQUEST,
status.HTTP_401_UNAUTHORIZED,
status.HTTP_403_FORBIDDEN,
status.HTTP_404_NOT_FOUND,
status.HTTP_409_CONFLICT,
]
),
dependencies=[Depends(requires_fab_custom_view("PUT", permissions.RESOURCE_USER))],
)
def update_user(
body: UserPatchBody,
username: str = Path(..., min_length=1),
update_mask: str | None = Query(None, description="Comma-separated list of fields to update"),
) -> UserResponse:
"""Update an existing user."""
with get_application_builder():
return FABAuthManagerUsers.update_user(username=username, body=body, update_mask=update_mask)
@fab_router.delete(
"/users/{username}",
status_code=status.HTTP_204_NO_CONTENT,
responses=create_openapi_http_exception_doc(
[
status.HTTP_401_UNAUTHORIZED,
status.HTTP_403_FORBIDDEN,
status.HTTP_404_NOT_FOUND,
]
),
dependencies=[Depends(requires_fab_custom_view("DELETE", permissions.RESOURCE_USER))],
)
def delete_user(username: str = Path(..., min_length=1)):
"""Delete a user by username."""
with get_application_builder():
FABAuthManagerUsers.delete_user(username=username)
| {
"repo_id": "apache/airflow",
"file_path": "providers/fab/src/airflow/providers/fab/auth_manager/api_fastapi/routes/users.py",
"license": "Apache License 2.0",
"lines": 119,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/fab/src/airflow/providers/fab/auth_manager/api_fastapi/services/users.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from fastapi import HTTPException, status
from sqlalchemy import func, select
from werkzeug.security import generate_password_hash
from airflow.providers.fab.auth_manager.api_fastapi.datamodels.roles import Role
from airflow.providers.fab.auth_manager.api_fastapi.datamodels.users import (
UserBody,
UserCollectionResponse,
UserPatchBody,
UserResponse,
)
from airflow.providers.fab.auth_manager.api_fastapi.sorting import build_ordering
from airflow.providers.fab.auth_manager.models import User
from airflow.providers.fab.auth_manager.security_manager.override import FabAirflowSecurityManagerOverride
from airflow.providers.fab.www.utils import get_fab_auth_manager
class FABAuthManagerUsers:
"""Service layer for FAB Auth Manager user operations."""
@staticmethod
def _resolve_roles(
sm: FabAirflowSecurityManagerOverride, role_refs: list[Role] | None
) -> tuple[list, list[str]]:
seen = set()
roles: list = []
missing: list[str] = []
for r in role_refs or []:
if r.name in seen:
continue
seen.add(r.name)
role = sm.find_role(r.name)
(roles if role else missing).append(role or r.name)
return roles, missing
@classmethod
def get_user(cls, username: str) -> UserResponse:
"""Get a user by username."""
security_manager = get_fab_auth_manager().security_manager
user = security_manager.find_user(username=username)
if not user:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"The User with username `{username}` was not found",
)
return UserResponse.model_validate(user)
@classmethod
def get_users(cls, *, order_by: str, limit: int, offset: int) -> UserCollectionResponse:
"""Get users with pagination and ordering."""
security_manager = get_fab_auth_manager().security_manager
session = security_manager.session
total_entries = session.scalars(select(func.count(User.id))).one()
ordering = build_ordering(
order_by,
allowed={
"id": User.id,
"user_id": User.id,
"first_name": User.first_name,
"last_name": User.last_name,
"username": User.username,
"email": User.email,
"active": User.active,
},
)
stmt = select(User).order_by(ordering).offset(offset).limit(limit)
users = session.scalars(stmt).unique().all()
return UserCollectionResponse(
users=[UserResponse.model_validate(u) for u in users],
total_entries=total_entries,
)
@classmethod
def create_user(cls, body: UserBody) -> UserResponse:
security_manager = get_fab_auth_manager().security_manager
existing_username = security_manager.find_user(username=body.username)
if existing_username:
raise HTTPException(
status_code=status.HTTP_409_CONFLICT,
detail=f"Username `{body.username}` already exists. Use PATCH to update.",
)
existing_email = security_manager.find_user(email=body.email)
if existing_email:
raise HTTPException(
status_code=status.HTTP_409_CONFLICT,
detail=f"The email `{body.email}` is already taken.",
)
roles_to_add, missing_role_names = cls._resolve_roles(security_manager, body.roles)
if missing_role_names:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=f"Unknown roles: {', '.join(repr(n) for n in missing_role_names)}",
)
if not roles_to_add:
default_role = security_manager.find_role(security_manager.auth_user_registration_role)
if default_role is None:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Default registration role is not configured or not found.",
)
roles_to_add.append(default_role)
created = security_manager.add_user(
username=body.username,
email=body.email,
first_name=body.first_name,
last_name=body.last_name,
role=roles_to_add,
password=body.password.get_secret_value(),
)
if not created:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Failed to add user `{body.username}`",
)
return UserResponse.model_validate(created)
@classmethod
def update_user(cls, username: str, body: UserPatchBody, update_mask: str | None = None) -> UserResponse:
"""Update an existing user."""
security_manager = get_fab_auth_manager().security_manager
user = security_manager.find_user(username=username)
if user is None:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"The User with username `{username}` was not found",
)
if body.username is not None and body.username != username:
if security_manager.find_user(username=body.username):
raise HTTPException(
status_code=status.HTTP_409_CONFLICT,
detail=f"The username `{body.username}` already exists",
)
if body.email is not None and body.email != user.email:
if security_manager.find_user(email=body.email):
raise HTTPException(
status_code=status.HTTP_409_CONFLICT,
detail=f"The email `{body.email}` already exists",
)
all_fields = {"username", "email", "first_name", "last_name", "roles", "password"}
if update_mask is not None:
fields_to_update = {f.strip() for f in update_mask.split(",") if f.strip()}
invalid_fields = fields_to_update - all_fields
if invalid_fields:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=f"Unknown update masks: {', '.join(repr(f) for f in invalid_fields)}",
)
else:
fields_to_update = all_fields
if "roles" in fields_to_update and body.roles is not None:
roles_to_update, missing_role_names = cls._resolve_roles(security_manager, body.roles)
if missing_role_names:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=f"Unknown roles: {', '.join(repr(n) for n in missing_role_names)}",
)
user.roles = roles_to_update
if "password" in fields_to_update and body.password is not None:
user.password = generate_password_hash(body.password.get_secret_value())
if "username" in fields_to_update and body.username is not None:
user.username = body.username
if "email" in fields_to_update and body.email is not None:
user.email = body.email
if "first_name" in fields_to_update and body.first_name is not None:
user.first_name = body.first_name
if "last_name" in fields_to_update and body.last_name is not None:
user.last_name = body.last_name
security_manager.update_user(user)
return UserResponse.model_validate(user)
@classmethod
def delete_user(cls, username: str) -> None:
"""Delete a user by username."""
security_manager = get_fab_auth_manager().security_manager
user = security_manager.find_user(username=username)
if user is None:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"The User with username `{username}` was not found",
)
user.roles = []
security_manager.session.delete(user)
security_manager.session.commit()
| {
"repo_id": "apache/airflow",
"file_path": "providers/fab/src/airflow/providers/fab/auth_manager/api_fastapi/services/users.py",
"license": "Apache License 2.0",
"lines": 191,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/fab/tests/unit/fab/auth_manager/api_fastapi/datamodels/test_users.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import json
import types
from datetime import datetime, timedelta, timezone
import pytest
from pydantic import SecretStr, ValidationError
from airflow.providers.fab.auth_manager.api_fastapi.datamodels.roles import Role
from airflow.providers.fab.auth_manager.api_fastapi.datamodels.users import (
UserBody,
UserCollectionResponse,
UserPatchBody,
UserResponse,
)
class TestUserModels:
def test_userbody_accepts_role_dicts_and_Roles_and_secretstr(self):
data = {
"username": "alice",
"email": "alice@example.com",
"first_name": "Alice",
"last_name": "Liddell",
"password": "s3cr3t", # should coerce into SecretStr
"roles": [{"name": "Admin"}, Role(name="User")],
}
body = UserBody.model_validate(data)
assert body.username == "alice"
assert body.email == "alice@example.com"
assert isinstance(body.password, SecretStr)
assert body.password.get_secret_value() == "s3cr3t"
assert body.roles is not None
assert [r.name for r in body.roles] == ["Admin", "User"]
# SecretStr should be masked on JSON serialization by default
dumped_json = body.model_dump_json()
payload = json.loads(dumped_json)
assert payload["password"] == "**********"
def test_userbody_roles_default_none_when_omitted(self):
body = UserBody.model_validate(
{
"username": "bob",
"email": "bob@example.com",
"first_name": "Bob",
"last_name": "Builder",
"password": "pw",
}
)
assert body.roles is None
@pytest.mark.parametrize(
"patch",
[
{"username": ""}, # min_length=1
{"email": ""}, # min_length=1
{"first_name": ""}, # min_length=1
{"last_name": ""}, # min_length=1
],
)
def test_userbody_min_length_enforced(self, patch):
base = {
"username": "ok",
"email": "ok@example.com",
"first_name": "OK",
"last_name": "User",
"password": "pw",
}
base.update(patch)
with pytest.raises(ValidationError):
UserBody.model_validate(base)
def test_userbody_password_is_required(self):
with pytest.raises(ValidationError):
UserBody.model_validate(
{
"username": "no-pass",
"email": "np@example.com",
"first_name": "No",
"last_name": "Pass",
}
)
def test_userresponse_accepts_naive_datetimes(self):
naive_created = datetime(2025, 1, 2, 3, 4, 5)
resp = UserResponse.model_validate(
{
"username": "alice",
"email": "alice@example.com",
"first_name": "Alice",
"last_name": "Liddell",
"created_on": naive_created,
}
)
assert resp.created_on is not None
assert resp.created_on.tzinfo is None
assert resp.created_on == naive_created
def test_userresponse_accepts_aware_datetimes(self):
aware = datetime(2024, 12, 1, 9, 30, tzinfo=timezone(timedelta(hours=9)))
resp = UserResponse.model_validate(
{
"username": "bob",
"email": "bob@example.com",
"first_name": "Bob",
"last_name": "Builder",
"changed_on": aware,
}
)
assert resp.changed_on == aware
def test_userresponse_model_validate_from_simple_namespace(self):
obj = types.SimpleNamespace(
username="eve",
email="eve@example.com",
first_name="Eve",
last_name="Adams",
roles=[types.SimpleNamespace(name="Viewer")],
active=True,
login_count=10,
)
resp = UserResponse.model_validate(obj)
assert resp.username == "eve"
assert resp.roles is not None
assert resp.roles[0].name == "Viewer"
assert resp.active is True
assert resp.login_count == 10
def test_userpatchbody_all_fields_optional(self):
"""UserPatchBody should accept an empty payload (all fields optional)."""
body = UserPatchBody.model_validate({})
assert body.username is None
assert body.email is None
assert body.first_name is None
assert body.last_name is None
assert body.roles is None
assert body.password is None
def test_userpatchbody_partial_update(self):
"""UserPatchBody should accept partial updates."""
body = UserPatchBody.model_validate({"last_name": "Updated"})
assert body.last_name == "Updated"
assert body.username is None
assert body.email is None
def test_userpatchbody_password_coerces_to_secretstr(self):
"""Password field should coerce to SecretStr when provided."""
body = UserPatchBody.model_validate({"password": "newpass"})
assert isinstance(body.password, SecretStr)
assert body.password.get_secret_value() == "newpass"
def test_userpatchbody_roles_accepts_dicts_and_role_objects(self):
"""Roles field should accept both dicts and Role objects."""
body = UserPatchBody.model_validate({"roles": [{"name": "Admin"}, Role(name="User")]})
assert body.roles is not None
assert [r.name for r in body.roles] == ["Admin", "User"]
@pytest.mark.parametrize(
("field", "value"),
[
("username", ""),
("email", ""),
("first_name", ""),
("last_name", ""),
],
)
def test_userpatchbody_min_length_enforced(self, field, value):
"""Non-null string fields should enforce min_length=1."""
with pytest.raises(ValidationError):
UserPatchBody.model_validate({field: value})
def test_usercollectionresponse_structure(self):
"""UserCollectionResponse should contain users list and total_entries."""
resp = UserCollectionResponse.model_validate(
{
"users": [
{
"username": "alice",
"email": "alice@example.com",
"first_name": "Alice",
"last_name": "Liddell",
}
],
"total_entries": 1,
}
)
assert resp.total_entries == 1
assert len(resp.users) == 1
assert resp.users[0].username == "alice"
def test_usercollectionresponse_empty_users(self):
"""UserCollectionResponse should handle empty users list."""
resp = UserCollectionResponse.model_validate(
{
"users": [],
"total_entries": 0,
}
)
assert resp.total_entries == 0
assert resp.users == []
def test_usercollectionresponse_multiple_users(self):
"""UserCollectionResponse should handle multiple users."""
resp = UserCollectionResponse.model_validate(
{
"users": [
{"username": "alice", "email": "a@b.com", "first_name": "A", "last_name": "L"},
{"username": "bob", "email": "b@b.com", "first_name": "B", "last_name": "B"},
],
"total_entries": 100,
}
)
assert resp.total_entries == 100
assert len(resp.users) == 2
| {
"repo_id": "apache/airflow",
"file_path": "providers/fab/tests/unit/fab/auth_manager/api_fastapi/datamodels/test_users.py",
"license": "Apache License 2.0",
"lines": 212,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/fab/tests/unit/fab/auth_manager/api_fastapi/routes/test_users.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from contextlib import nullcontext as _noop_cm
from unittest.mock import ANY, MagicMock, patch
import pytest
from fastapi import HTTPException, status
from airflow.providers.fab.auth_manager.api_fastapi.datamodels.users import (
UserCollectionResponse,
UserResponse,
)
@pytest.mark.db_test
class TestUsers:
@patch("airflow.providers.fab.auth_manager.api_fastapi.routes.users.FABAuthManagerUsers")
@patch("airflow.providers.fab.auth_manager.api_fastapi.security.get_auth_manager")
@patch(
"airflow.providers.fab.auth_manager.api_fastapi.routes.users.get_application_builder",
return_value=_noop_cm(),
)
def test_create_user_ok(
self, mock_get_application_builder, mock_get_auth_manager, mock_users, test_client, as_user
):
mgr = MagicMock()
mgr.is_authorized_custom_view.return_value = True
mock_get_auth_manager.return_value = mgr
dummy_out = UserResponse(
username="alice",
email="alice@example.com",
first_name="Alice",
last_name="Liddell",
roles=None,
active=True,
login_count=0,
fail_login_count=0,
)
mock_users.create_user.return_value = dummy_out
with as_user():
payload = {
"username": "alice",
"email": "alice@example.com",
"first_name": "Alice",
"last_name": "Liddell",
"password": "s3cr3t",
"roles": [{"name": "Viewer"}],
}
resp = test_client.post("/fab/v1/users", json=payload)
assert resp.status_code == 200
assert resp.json() == dummy_out.model_dump(by_alias=True)
mock_users.create_user.assert_called_once()
@patch("airflow.providers.fab.auth_manager.api_fastapi.routes.users.FABAuthManagerUsers")
@patch("airflow.providers.fab.auth_manager.api_fastapi.security.get_auth_manager")
@patch(
"airflow.providers.fab.auth_manager.api_fastapi.routes.users.get_application_builder",
return_value=_noop_cm(),
)
def test_create_user_forbidden(
self, mock_get_application_builder, mock_get_auth_manager, mock_users, test_client, as_user
):
mgr = MagicMock()
mgr.is_authorized_custom_view.return_value = False
mock_get_auth_manager.return_value = mgr
with as_user():
resp = test_client.post(
"/fab/v1/users",
json={
"username": "bob",
"email": "bob@example.com",
"first_name": "Bob",
"last_name": "Builder",
"password": "pw",
},
)
assert resp.status_code == 403
mock_users.create_user.assert_not_called()
@patch("airflow.providers.fab.auth_manager.api_fastapi.routes.users.FABAuthManagerUsers")
@patch("airflow.providers.fab.auth_manager.api_fastapi.security.get_auth_manager")
@patch(
"airflow.providers.fab.auth_manager.api_fastapi.routes.users.get_application_builder",
return_value=_noop_cm(),
)
def test_create_user_validation_422_empty_username(
self, mock_get_application_builder, mock_get_auth_manager, mock_users, test_client, as_user
):
mgr = MagicMock()
mgr.is_authorized_custom_view.return_value = True
mock_get_auth_manager.return_value = mgr
with as_user():
resp = test_client.post(
"/fab/v1/users",
json={
"username": "",
"email": "e@example.com",
"first_name": "E",
"last_name": "Mpty",
"password": "pw",
},
)
assert resp.status_code == 422
mock_users.create_user.assert_not_called()
@patch("airflow.providers.fab.auth_manager.api_fastapi.routes.users.FABAuthManagerUsers")
@patch("airflow.providers.fab.auth_manager.api_fastapi.security.get_auth_manager")
@patch(
"airflow.providers.fab.auth_manager.api_fastapi.routes.users.get_application_builder",
return_value=_noop_cm(),
)
def test_create_user_validation_422_missing_username(
self, mock_get_application_builder, mock_get_auth_manager, mock_users, test_client, as_user
):
mgr = MagicMock()
mgr.is_authorized_custom_view.return_value = True
mock_get_auth_manager.return_value = mgr
with as_user():
resp = test_client.post(
"/fab/v1/users",
json={
"email": "nouser@example.com",
"first_name": "No",
"last_name": "User",
"password": "pw",
},
)
assert resp.status_code == 422
mock_users.create_user.assert_not_called()
@patch("airflow.providers.fab.auth_manager.api_fastapi.routes.users.FABAuthManagerUsers")
@patch("airflow.providers.fab.auth_manager.api_fastapi.security.get_auth_manager")
@patch(
"airflow.providers.fab.auth_manager.api_fastapi.routes.users.get_application_builder",
return_value=_noop_cm(),
)
def test_create_user_validation_422_missing_password(
self, mock_get_application_builder, mock_get_auth_manager, mock_users, test_client, as_user
):
mgr = MagicMock()
mgr.is_authorized_custom_view.return_value = True
mock_get_auth_manager.return_value = mgr
with as_user():
resp = test_client.post(
"/fab/v1/users",
json={
"username": "no-pass",
"email": "np@example.com",
"first_name": "No",
"last_name": "Pass",
# password missing
},
)
assert resp.status_code == 422
mock_users.create_user.assert_not_called()
@patch("airflow.providers.fab.auth_manager.api_fastapi.routes.users.FABAuthManagerUsers")
@patch("airflow.providers.fab.auth_manager.api_fastapi.security.get_auth_manager")
@patch(
"airflow.providers.fab.auth_manager.api_fastapi.routes.users.get_application_builder",
return_value=_noop_cm(),
)
@patch("airflow.providers.fab.auth_manager.api_fastapi.parameters.conf")
def test_get_users_success_defaults(
self,
conf_mock,
mock_get_application_builder,
mock_get_auth_manager,
mock_users,
test_client,
as_user,
):
conf_mock.getint.side_effect = lambda section, option: {
"maximum_page_limit": 500,
"fallback_page_limit": 25,
}[option]
mgr = MagicMock()
mgr.is_authorized_custom_view.return_value = True
mock_get_auth_manager.return_value = mgr
dummy = UserCollectionResponse(
users=[
UserResponse(
username="alice",
email="alice@example.com",
first_name="Alice",
last_name="Liddell",
)
],
total_entries=1,
)
mock_users.get_users.return_value = dummy
with as_user():
resp = test_client.get("/fab/v1/users")
assert resp.status_code == 200
assert resp.json() == dummy.model_dump(by_alias=True)
mock_users.get_users.assert_called_once_with(order_by="id", limit=100, offset=0)
@patch("airflow.providers.fab.auth_manager.api_fastapi.routes.users.FABAuthManagerUsers")
@patch("airflow.providers.fab.auth_manager.api_fastapi.security.get_auth_manager")
@patch(
"airflow.providers.fab.auth_manager.api_fastapi.routes.users.get_application_builder",
return_value=_noop_cm(),
)
@patch("airflow.providers.fab.auth_manager.api_fastapi.parameters.conf")
def test_get_users_with_params(
self,
conf_mock,
mock_get_application_builder,
mock_get_auth_manager,
mock_users,
test_client,
as_user,
):
conf_mock.getint.side_effect = lambda section, option: {
"maximum_page_limit": 50,
"fallback_page_limit": 20,
}[option]
mgr = MagicMock()
mgr.is_authorized_custom_view.return_value = True
mock_get_auth_manager.return_value = mgr
dummy = UserCollectionResponse(users=[], total_entries=0)
mock_users.get_users.return_value = dummy
with as_user():
resp = test_client.get(
"/fab/v1/users", params={"order_by": "-username", "limit": 1000, "offset": 5}
)
assert resp.status_code == 200
mock_users.get_users.assert_called_once_with(order_by="-username", limit=50, offset=5)
@patch("airflow.providers.fab.auth_manager.api_fastapi.routes.users.FABAuthManagerUsers")
@patch("airflow.providers.fab.auth_manager.api_fastapi.security.get_auth_manager")
@patch(
"airflow.providers.fab.auth_manager.api_fastapi.routes.users.get_application_builder",
return_value=_noop_cm(),
)
def test_get_users_forbidden(
self, mock_get_application_builder, mock_get_auth_manager, mock_users, test_client, as_user
):
mgr = MagicMock()
mgr.is_authorized_custom_view.return_value = False
mock_get_auth_manager.return_value = mgr
with as_user():
resp = test_client.get("/fab/v1/users")
assert resp.status_code == 403
mock_users.get_users.assert_not_called()
@patch("airflow.providers.fab.auth_manager.api_fastapi.routes.users.FABAuthManagerUsers")
@patch("airflow.providers.fab.auth_manager.api_fastapi.security.get_auth_manager")
@patch(
"airflow.providers.fab.auth_manager.api_fastapi.routes.users.get_application_builder",
return_value=_noop_cm(),
)
def test_get_users_validation_422_negative_offset(
self, mock_get_application_builder, mock_get_auth_manager, mock_users, test_client, as_user
):
mgr = MagicMock()
mgr.is_authorized_custom_view.return_value = True
mock_get_auth_manager.return_value = mgr
with as_user():
resp = test_client.get("/fab/v1/users", params={"offset": -1})
assert resp.status_code == 422
mock_users.get_users.assert_not_called()
@patch("airflow.providers.fab.auth_manager.api_fastapi.routes.users.FABAuthManagerUsers")
@patch("airflow.providers.fab.auth_manager.api_fastapi.security.get_auth_manager")
@patch(
"airflow.providers.fab.auth_manager.api_fastapi.routes.users.get_application_builder",
return_value=_noop_cm(),
)
def test_get_user_success(
self, mock_get_application_builder, mock_get_auth_manager, mock_users, test_client, as_user
):
mgr = MagicMock()
mgr.is_authorized_custom_view.return_value = True
mock_get_auth_manager.return_value = mgr
dummy_out = UserResponse(
username="alice",
email="alice@example.com",
first_name="Alice",
last_name="Liddell",
)
mock_users.get_user.return_value = dummy_out
with as_user():
resp = test_client.get("/fab/v1/users/alice")
assert resp.status_code == 200
assert resp.json() == dummy_out.model_dump(by_alias=True)
mock_users.get_user.assert_called_once_with(username="alice")
@patch("airflow.providers.fab.auth_manager.api_fastapi.routes.users.FABAuthManagerUsers")
@patch("airflow.providers.fab.auth_manager.api_fastapi.security.get_auth_manager")
@patch(
"airflow.providers.fab.auth_manager.api_fastapi.routes.users.get_application_builder",
return_value=_noop_cm(),
)
def test_get_user_forbidden(
self, mock_get_application_builder, mock_get_auth_manager, mock_users, test_client, as_user
):
mgr = MagicMock()
mgr.is_authorized_custom_view.return_value = False
mock_get_auth_manager.return_value = mgr
with as_user():
resp = test_client.get("/fab/v1/users/alice")
assert resp.status_code == 403
mock_users.get_user.assert_not_called()
@patch("airflow.providers.fab.auth_manager.api_fastapi.routes.users.FABAuthManagerUsers")
@patch("airflow.providers.fab.auth_manager.api_fastapi.security.get_auth_manager")
@patch(
"airflow.providers.fab.auth_manager.api_fastapi.routes.users.get_application_builder",
return_value=_noop_cm(),
)
def test_get_user_not_found(
self, mock_get_application_builder, mock_get_auth_manager, mock_users, test_client, as_user
):
mgr = MagicMock()
mgr.is_authorized_custom_view.return_value = True
mock_get_auth_manager.return_value = mgr
mock_users.get_user.side_effect = HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="The User with username `nonexistent` was not found",
)
with as_user():
resp = test_client.get("/fab/v1/users/nonexistent")
assert resp.status_code == 404
mock_users.get_user.assert_called_once_with(username="nonexistent")
@patch("airflow.providers.fab.auth_manager.api_fastapi.routes.users.FABAuthManagerUsers")
@patch("airflow.providers.fab.auth_manager.api_fastapi.security.get_auth_manager")
@patch(
"airflow.providers.fab.auth_manager.api_fastapi.routes.users.get_application_builder",
return_value=_noop_cm(),
)
def test_get_user_empty_username_404(
self, mock_get_application_builder, mock_get_auth_manager, mock_users, test_client, as_user
):
mgr = MagicMock()
mgr.is_authorized_custom_view.return_value = True
mock_get_auth_manager.return_value = mgr
with as_user():
resp = test_client.get("/fab/v1/users/")
assert resp.status_code == 404
mock_users.get_user.assert_not_called()
@patch("airflow.providers.fab.auth_manager.api_fastapi.routes.users.FABAuthManagerUsers")
@patch("airflow.providers.fab.auth_manager.api_fastapi.security.get_auth_manager")
@patch(
"airflow.providers.fab.auth_manager.api_fastapi.routes.users.get_application_builder",
return_value=_noop_cm(),
)
def test_update_user_success(
self, mock_get_application_builder, mock_get_auth_manager, mock_users, test_client, as_user
):
mgr = MagicMock()
mgr.is_authorized_custom_view.return_value = True
mock_get_auth_manager.return_value = mgr
dummy_out = UserResponse(
username="alice",
email="alice_updated@example.com",
first_name="Alice",
last_name="Updated",
)
mock_users.update_user.return_value = dummy_out
with as_user():
resp = test_client.patch(
"/fab/v1/users/alice",
json={"email": "alice_updated@example.com", "last_name": "Updated"},
)
assert resp.status_code == 200
assert resp.json() == dummy_out.model_dump(by_alias=True)
mock_users.update_user.assert_called_once_with(username="alice", body=ANY, update_mask=None)
@patch("airflow.providers.fab.auth_manager.api_fastapi.routes.users.FABAuthManagerUsers")
@patch("airflow.providers.fab.auth_manager.api_fastapi.security.get_auth_manager")
@patch(
"airflow.providers.fab.auth_manager.api_fastapi.routes.users.get_application_builder",
return_value=_noop_cm(),
)
def test_update_user_with_update_mask(
self, mock_get_application_builder, mock_get_auth_manager, mock_users, test_client, as_user
):
mgr = MagicMock()
mgr.is_authorized_custom_view.return_value = True
mock_get_auth_manager.return_value = mgr
dummy_out = UserResponse(
username="alice",
email="alice@example.com",
first_name="Alice",
last_name="Updated",
)
mock_users.update_user.return_value = dummy_out
with as_user():
resp = test_client.patch(
"/fab/v1/users/alice",
json={"last_name": "Updated"},
params={"update_mask": "last_name"},
)
assert resp.status_code == 200
mock_users.update_user.assert_called_once_with(
username="alice", body=ANY, update_mask="last_name"
)
@patch("airflow.providers.fab.auth_manager.api_fastapi.routes.users.FABAuthManagerUsers")
@patch("airflow.providers.fab.auth_manager.api_fastapi.security.get_auth_manager")
@patch(
"airflow.providers.fab.auth_manager.api_fastapi.routes.users.get_application_builder",
return_value=_noop_cm(),
)
def test_update_user_forbidden(
self, mock_get_application_builder, mock_get_auth_manager, mock_users, test_client, as_user
):
mgr = MagicMock()
mgr.is_authorized_custom_view.return_value = False
mock_get_auth_manager.return_value = mgr
with as_user():
resp = test_client.patch("/fab/v1/users/alice", json={"last_name": "Updated"})
assert resp.status_code == 403
mock_users.update_user.assert_not_called()
@patch("airflow.providers.fab.auth_manager.api_fastapi.routes.users.FABAuthManagerUsers")
@patch("airflow.providers.fab.auth_manager.api_fastapi.security.get_auth_manager")
@patch(
"airflow.providers.fab.auth_manager.api_fastapi.routes.users.get_application_builder",
return_value=_noop_cm(),
)
def test_update_user_not_found(
self, mock_get_application_builder, mock_get_auth_manager, mock_users, test_client, as_user
):
mgr = MagicMock()
mgr.is_authorized_custom_view.return_value = True
mock_get_auth_manager.return_value = mgr
mock_users.update_user.side_effect = HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="The User with username `nonexistent` was not found",
)
with as_user():
resp = test_client.patch("/fab/v1/users/nonexistent", json={"last_name": "Updated"})
assert resp.status_code == 404
mock_users.update_user.assert_called_once()
@patch("airflow.providers.fab.auth_manager.api_fastapi.routes.users.FABAuthManagerUsers")
@patch("airflow.providers.fab.auth_manager.api_fastapi.security.get_auth_manager")
@patch(
"airflow.providers.fab.auth_manager.api_fastapi.routes.users.get_application_builder",
return_value=_noop_cm(),
)
def test_update_user_unknown_update_mask(
self, mock_get_application_builder, mock_get_auth_manager, mock_users, test_client, as_user
):
mgr = MagicMock()
mgr.is_authorized_custom_view.return_value = True
mock_get_auth_manager.return_value = mgr
mock_users.update_user.side_effect = HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Unknown update masks: 'invalid_field'",
)
with as_user():
resp = test_client.patch(
"/fab/v1/users/alice",
json={"last_name": "Updated"},
params={"update_mask": "invalid_field"},
)
assert resp.status_code == 400
mock_users.update_user.assert_called_once()
@patch("airflow.providers.fab.auth_manager.api_fastapi.routes.users.FABAuthManagerUsers")
@patch("airflow.providers.fab.auth_manager.api_fastapi.security.get_auth_manager")
@patch(
"airflow.providers.fab.auth_manager.api_fastapi.routes.users.get_application_builder",
return_value=_noop_cm(),
)
def test_delete_user_success(
self, mock_get_application_builder, mock_get_auth_manager, mock_users, test_client, as_user
):
mgr = MagicMock()
mgr.is_authorized_custom_view.return_value = True
mock_users.delete_user.return_value = None
mock_get_auth_manager.return_value = mgr
with as_user():
resp = test_client.delete("/fab/v1/users/alice")
assert resp.status_code == 204
mock_users.delete_user.assert_called_once_with(username="alice")
@patch("airflow.providers.fab.auth_manager.api_fastapi.routes.users.FABAuthManagerUsers")
@patch("airflow.providers.fab.auth_manager.api_fastapi.security.get_auth_manager")
@patch(
"airflow.providers.fab.auth_manager.api_fastapi.routes.users.get_application_builder",
return_value=_noop_cm(),
)
def test_delete_user_forbidden(
self, mock_get_application_builder, mock_get_auth_manager, mock_users, test_client, as_user
):
mgr = MagicMock()
mgr.is_authorized_custom_view.return_value = False
mock_get_auth_manager.return_value = mgr
with as_user():
resp = test_client.delete("/fab/v1/users/alice")
assert resp.status_code == 403
mock_users.delete_user.assert_not_called()
@patch("airflow.providers.fab.auth_manager.api_fastapi.routes.users.FABAuthManagerUsers")
@patch("airflow.providers.fab.auth_manager.api_fastapi.security.get_auth_manager")
@patch(
"airflow.providers.fab.auth_manager.api_fastapi.routes.users.get_application_builder",
return_value=_noop_cm(),
)
def test_delete_user_not_found(
self, mock_get_application_builder, mock_get_auth_manager, mock_users, test_client, as_user
):
mgr = MagicMock()
mgr.is_authorized_custom_view.return_value = True
mock_get_auth_manager.return_value = mgr
mock_users.delete_user.side_effect = HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="The User with username `nonexistent` was not found",
)
with as_user():
resp = test_client.delete("/fab/v1/users/nonexistent")
assert resp.status_code == 404
mock_users.delete_user.assert_called_once_with(username="nonexistent")
@patch("airflow.providers.fab.auth_manager.api_fastapi.routes.users.FABAuthManagerUsers")
@patch("airflow.providers.fab.auth_manager.api_fastapi.security.get_auth_manager")
@patch(
"airflow.providers.fab.auth_manager.api_fastapi.routes.users.get_application_builder",
return_value=_noop_cm(),
)
def test_delete_user_empty_username_404(
self, mock_get_application_builder, mock_get_auth_manager, mock_users, test_client, as_user
):
mgr = MagicMock()
mgr.is_authorized_custom_view.return_value = True
mock_get_auth_manager.return_value = mgr
with as_user():
resp = test_client.delete("/fab/v1/users/")
assert resp.status_code == 404
mock_users.delete_user.assert_not_called()
| {
"repo_id": "apache/airflow",
"file_path": "providers/fab/tests/unit/fab/auth_manager/api_fastapi/routes/test_users.py",
"license": "Apache License 2.0",
"lines": 526,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/fab/tests/unit/fab/auth_manager/api_fastapi/services/test_users.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import types
from unittest.mock import MagicMock, patch
import pytest
from fastapi import HTTPException
from airflow.providers.fab.auth_manager.api_fastapi.datamodels.roles import Role
from airflow.providers.fab.auth_manager.api_fastapi.services.users import FABAuthManagerUsers
@pytest.fixture
def fab_auth_manager():
return MagicMock()
@pytest.fixture
def security_manager():
sm = MagicMock()
def _find_role(name: str):
if name in {"Admin", "User"}:
return types.SimpleNamespace(name=name)
return None
sm.find_role.side_effect = _find_role
sm.auth_user_registration_role = "User"
return sm
def _make_user_obj(
*,
username: str,
email: str,
first_name: str,
last_name: str,
roles: list[str] | None = None,
active: bool = True,
):
role_objs = [types.SimpleNamespace(name=r) for r in (roles or [])]
return types.SimpleNamespace(
username=username,
email=email,
first_name=first_name,
last_name=last_name,
roles=role_objs or None,
active=active,
login_count=0,
fail_login_count=0,
last_login=None,
created_on=None,
changed_on=None,
)
@patch("airflow.providers.fab.auth_manager.api_fastapi.services.users.get_fab_auth_manager")
class TestUsersService:
def setup_method(self):
self.password_mock = MagicMock()
self.password_mock.get_secret_value.return_value = "pw"
self.body_base = types.SimpleNamespace(
username="alice",
email="alice@example.com",
first_name="Alice",
last_name="Liddell",
password=self.password_mock,
roles=None,
)
self.body_with_roles_admin_dupe = types.SimpleNamespace(
username="bob",
email="bob@example.com",
first_name="Bob",
last_name="Builder",
password=MagicMock(get_secret_value=MagicMock(return_value="pw2")),
roles=[types.SimpleNamespace(name="Admin"), types.SimpleNamespace(name="Admin")],
)
self.body_with_missing_role = types.SimpleNamespace(
username="eve",
email="eve@example.com",
first_name="Eve",
last_name="Adams",
password=MagicMock(get_secret_value=MagicMock(return_value="pw3")),
roles=[types.SimpleNamespace(name="NOPE")],
)
def test_create_user_success_with_default_role(
self, get_fab_auth_manager, fab_auth_manager, security_manager
):
security_manager.find_user.side_effect = [None, None]
security_manager.add_user.return_value = _make_user_obj(
username=self.body_base.username,
email=self.body_base.email,
first_name=self.body_base.first_name,
last_name=self.body_base.last_name,
roles=["User"],
)
fab_auth_manager.security_manager = security_manager
get_fab_auth_manager.return_value = fab_auth_manager
out = FABAuthManagerUsers.create_user(self.body_base)
assert out.username == "alice"
assert out.email == "alice@example.com"
called_roles = security_manager.add_user.call_args.kwargs["role"]
assert len(called_roles) == 1
assert called_roles[0].name == "User"
self.password_mock.get_secret_value.assert_called_once()
def test_create_user_success_with_explicit_roles_and_dedup(
self, get_fab_auth_manager, fab_auth_manager, security_manager
):
security_manager.find_user.side_effect = [None, None]
security_manager.add_user.return_value = _make_user_obj(
username=self.body_with_roles_admin_dupe.username,
email=self.body_with_roles_admin_dupe.email,
first_name=self.body_with_roles_admin_dupe.first_name,
last_name=self.body_with_roles_admin_dupe.last_name,
roles=["Admin"],
)
fab_auth_manager.security_manager = security_manager
get_fab_auth_manager.return_value = fab_auth_manager
out = FABAuthManagerUsers.create_user(self.body_with_roles_admin_dupe)
assert out.username == "bob"
roles_arg = security_manager.add_user.call_args.kwargs["role"]
assert len(roles_arg) == 1
assert roles_arg[0].name == "Admin"
def test_create_user_conflict_username(self, get_fab_auth_manager, fab_auth_manager, security_manager):
security_manager.find_user.side_effect = [object()]
fab_auth_manager.security_manager = security_manager
get_fab_auth_manager.return_value = fab_auth_manager
with pytest.raises(HTTPException) as ex:
FABAuthManagerUsers.create_user(self.body_base)
assert ex.value.status_code == 409
assert "Username" in ex.value.detail
def test_create_user_conflict_email(self, get_fab_auth_manager, fab_auth_manager, security_manager):
security_manager.find_user.side_effect = [None, object()]
fab_auth_manager.security_manager = security_manager
get_fab_auth_manager.return_value = fab_auth_manager
with pytest.raises(HTTPException) as ex:
FABAuthManagerUsers.create_user(self.body_base)
assert ex.value.status_code == 409
assert "email" in ex.value.detail
def test_create_user_unknown_roles(self, get_fab_auth_manager, fab_auth_manager, security_manager):
security_manager.find_user.side_effect = [None, None]
fab_auth_manager.security_manager = security_manager
get_fab_auth_manager.return_value = fab_auth_manager
with pytest.raises(HTTPException) as ex:
FABAuthManagerUsers.create_user(self.body_with_missing_role)
assert ex.value.status_code == 400
assert "Unknown roles" in ex.value.detail
def test_create_user_default_role_missing(self, get_fab_auth_manager, fab_auth_manager, security_manager):
security_manager.find_user.side_effect = [None, None]
security_manager.auth_user_registration_role = "MissingDefault"
security_manager.find_role.side_effect = lambda n: None if n == "MissingDefault" else None
fab_auth_manager.security_manager = security_manager
get_fab_auth_manager.return_value = fab_auth_manager
with pytest.raises(HTTPException) as ex:
FABAuthManagerUsers.create_user(self.body_base)
assert ex.value.status_code == 500
assert "Default registration role" in ex.value.detail
def test_create_user_add_user_failed(self, get_fab_auth_manager, fab_auth_manager, security_manager):
security_manager.find_user.side_effect = [None, None]
security_manager.add_user.return_value = None
fab_auth_manager.security_manager = security_manager
get_fab_auth_manager.return_value = fab_auth_manager
with pytest.raises(HTTPException) as ex:
FABAuthManagerUsers.create_user(self.body_base)
assert ex.value.status_code == 500
assert "Failed to add user" in ex.value.detail
def test_resolve_roles_returns_found_and_missing(self, get_fab_auth_manager, security_manager):
found, missing = FABAuthManagerUsers._resolve_roles(
security_manager,
[Role(name="Admin"), Role(name="NOPE"), Role(name="Admin")],
)
assert [r.name for r in found] == ["Admin"]
assert missing == ["NOPE"]
def test_get_user_success(self, get_fab_auth_manager, fab_auth_manager, security_manager):
user_obj = _make_user_obj(
username="alice",
email="alice@example.com",
first_name="Alice",
last_name="Liddell",
roles=["User"],
)
security_manager.find_user.return_value = user_obj
fab_auth_manager.security_manager = security_manager
get_fab_auth_manager.return_value = fab_auth_manager
out = FABAuthManagerUsers.get_user("alice")
assert out.username == "alice"
assert out.email == "alice@example.com"
security_manager.find_user.assert_called_once_with(username="alice")
def test_get_user_not_found(self, get_fab_auth_manager, fab_auth_manager, security_manager):
security_manager.find_user.return_value = None
fab_auth_manager.security_manager = security_manager
get_fab_auth_manager.return_value = fab_auth_manager
with pytest.raises(HTTPException) as ex:
FABAuthManagerUsers.get_user("nonexistent")
assert ex.value.status_code == 404
assert "nonexistent" in ex.value.detail
@patch("airflow.providers.fab.auth_manager.api_fastapi.services.users.build_ordering")
@patch("airflow.providers.fab.auth_manager.api_fastapi.services.users.select")
def test_get_users_success(
self, mock_select, mock_build_ordering, get_fab_auth_manager, fab_auth_manager, security_manager
):
user1 = _make_user_obj(
username="alice", email="alice@example.com", first_name="Alice", last_name="Liddell"
)
user2 = _make_user_obj(username="bob", email="bob@example.com", first_name="Bob", last_name="Builder")
mock_session = MagicMock()
mock_session.scalars.return_value.one.return_value = 2
mock_session.scalars.return_value.unique.return_value.all.return_value = [user1, user2]
security_manager.session = mock_session
fab_auth_manager.security_manager = security_manager
get_fab_auth_manager.return_value = fab_auth_manager
mock_build_ordering.return_value = "ordering"
out = FABAuthManagerUsers.get_users(order_by="username", limit=10, offset=0)
assert out.total_entries == 2
assert len(out.users) == 2
assert out.users[0].username == "alice"
assert out.users[1].username == "bob"
@patch("airflow.providers.fab.auth_manager.api_fastapi.services.users.build_ordering")
def test_get_users_invalid_order_by(
self, mock_build_ordering, get_fab_auth_manager, fab_auth_manager, security_manager
):
mock_build_ordering.side_effect = HTTPException(
status_code=400,
detail="Ordering with 'invalid' is disallowed or the attribute does not exist on the model",
)
fab_auth_manager.security_manager = security_manager
get_fab_auth_manager.return_value = fab_auth_manager
with pytest.raises(HTTPException) as ex:
FABAuthManagerUsers.get_users(order_by="invalid", limit=10, offset=0)
assert ex.value.status_code == 400
def test_update_user_success(self, get_fab_auth_manager, fab_auth_manager, security_manager):
user_obj = _make_user_obj(
username="alice",
email="alice@example.com",
first_name="Alice",
last_name="Liddell",
roles=["User"],
)
security_manager.find_user.return_value = user_obj
fab_auth_manager.security_manager = security_manager
get_fab_auth_manager.return_value = fab_auth_manager
patch_body = types.SimpleNamespace(
username=None,
email=None,
first_name=None,
last_name="Updated",
roles=None,
password=None,
)
out = FABAuthManagerUsers.update_user("alice", patch_body)
assert out.last_name == "Updated"
security_manager.update_user.assert_called_once()
def test_update_user_not_found(self, get_fab_auth_manager, fab_auth_manager, security_manager):
security_manager.find_user.return_value = None
fab_auth_manager.security_manager = security_manager
get_fab_auth_manager.return_value = fab_auth_manager
patch_body = types.SimpleNamespace(
username=None,
email=None,
first_name=None,
last_name="Updated",
roles=None,
password=None,
)
with pytest.raises(HTTPException) as ex:
FABAuthManagerUsers.update_user("nonexistent", patch_body)
assert ex.value.status_code == 404
def test_update_user_conflict_username(self, get_fab_auth_manager, fab_auth_manager, security_manager):
user_obj = _make_user_obj(
username="alice",
email="alice@example.com",
first_name="Alice",
last_name="Liddell",
)
security_manager.find_user.side_effect = [user_obj, object()]
fab_auth_manager.security_manager = security_manager
get_fab_auth_manager.return_value = fab_auth_manager
patch_body = types.SimpleNamespace(
username="bob",
email=None,
first_name=None,
last_name=None,
roles=None,
password=None,
)
with pytest.raises(HTTPException) as ex:
FABAuthManagerUsers.update_user("alice", patch_body)
assert ex.value.status_code == 409
assert "username" in ex.value.detail
def test_update_user_conflict_email(self, get_fab_auth_manager, fab_auth_manager, security_manager):
user_obj = _make_user_obj(
username="alice",
email="alice@example.com",
first_name="Alice",
last_name="Liddell",
)
security_manager.find_user.side_effect = [user_obj, object()]
fab_auth_manager.security_manager = security_manager
get_fab_auth_manager.return_value = fab_auth_manager
patch_body = types.SimpleNamespace(
username=None,
email="taken@example.com",
first_name=None,
last_name=None,
roles=None,
password=None,
)
with pytest.raises(HTTPException) as ex:
FABAuthManagerUsers.update_user("alice", patch_body)
assert ex.value.status_code == 409
assert "email" in ex.value.detail
def test_update_user_unknown_update_mask(self, get_fab_auth_manager, fab_auth_manager, security_manager):
user_obj = _make_user_obj(
username="alice",
email="alice@example.com",
first_name="Alice",
last_name="Liddell",
)
security_manager.find_user.return_value = user_obj
fab_auth_manager.security_manager = security_manager
get_fab_auth_manager.return_value = fab_auth_manager
patch_body = types.SimpleNamespace(
username=None,
email=None,
first_name=None,
last_name="Updated",
roles=None,
password=None,
)
with pytest.raises(HTTPException) as ex:
FABAuthManagerUsers.update_user("alice", patch_body, update_mask="invalid_field")
assert ex.value.status_code == 400
assert "Unknown update masks" in ex.value.detail
def test_update_user_with_password(self, get_fab_auth_manager, fab_auth_manager, security_manager):
user_obj = _make_user_obj(
username="alice",
email="alice@example.com",
first_name="Alice",
last_name="Liddell",
)
security_manager.find_user.return_value = user_obj
fab_auth_manager.security_manager = security_manager
get_fab_auth_manager.return_value = fab_auth_manager
password_mock = MagicMock()
password_mock.get_secret_value.return_value = "newpassword"
patch_body = types.SimpleNamespace(
username=None,
email=None,
first_name=None,
last_name=None,
roles=None,
password=password_mock,
)
with patch(
"airflow.providers.fab.auth_manager.api_fastapi.services.users.generate_password_hash"
) as mock_hash:
mock_hash.return_value = "hashed_password"
FABAuthManagerUsers.update_user("alice", patch_body, update_mask="password")
password_mock.get_secret_value.assert_called_once()
mock_hash.assert_called_once_with("newpassword")
# delete_user tests
def test_delete_user_success(self, get_fab_auth_manager, fab_auth_manager, security_manager):
user_obj = _make_user_obj(
username="alice",
email="alice@example.com",
first_name="Alice",
last_name="Liddell",
roles=["User"],
)
security_manager.find_user.return_value = user_obj
fab_auth_manager.security_manager = security_manager
get_fab_auth_manager.return_value = fab_auth_manager
FABAuthManagerUsers.delete_user("alice")
assert user_obj.roles == []
security_manager.session.delete.assert_called_once_with(user_obj)
security_manager.session.commit.assert_called_once()
def test_delete_user_not_found(self, get_fab_auth_manager, fab_auth_manager, security_manager):
security_manager.find_user.return_value = None
fab_auth_manager.security_manager = security_manager
get_fab_auth_manager.return_value = fab_auth_manager
with pytest.raises(HTTPException) as ex:
FABAuthManagerUsers.delete_user("nonexistent")
assert ex.value.status_code == 404
assert "nonexistent" in ex.value.detail
| {
"repo_id": "apache/airflow",
"file_path": "providers/fab/tests/unit/fab/auth_manager/api_fastapi/services/test_users.py",
"license": "Apache License 2.0",
"lines": 387,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-core/tests/unit/core/test_dual_stats_manager.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import Any
import pytest
from airflow._shared.observability.metrics import dual_stats_manager
class TestDualStatsManager:
@pytest.mark.parametrize(
("value", "expected"),
[
pytest.param(
1,
True,
id="number",
),
pytest.param(
False,
True,
id="boolean",
),
pytest.param(
None,
False,
id="None",
),
pytest.param(
{},
False,
id="empty_dict",
),
],
)
def test_value_is_provided(self, value: Any, expected: bool):
result = dual_stats_manager._value_is_provided(value)
assert result == expected
@pytest.mark.parametrize(
("count", "rate", "delta", "tags", "expected_args_dict"),
[
pytest.param(
1,
1,
False,
{},
{"count": 1, "rate": 1, "delta": False},
id="all_params_empty_tags",
),
pytest.param(
1,
1,
False,
{"test": True},
{"count": 1, "rate": 1, "delta": False, "tags": {"test": True}},
id="provide_tags",
),
pytest.param(
None,
1,
False,
{},
{"rate": 1, "delta": False},
id="no_count",
),
pytest.param(
1,
None,
False,
{},
{"count": 1, "delta": False},
id="no_rate",
),
pytest.param(
1,
1,
None,
{},
{"count": 1, "rate": 1},
id="no_delta",
),
pytest.param(
1,
1,
False,
None,
{"count": 1, "rate": 1, "delta": False},
id="no_tags",
),
],
)
def test_get_dict_with_defined_args(
self,
count: int | None,
rate: int | None,
delta: bool | None,
tags: dict[str, Any] | None,
expected_args_dict: dict[str, Any],
):
args_dict = dual_stats_manager._get_dict_with_defined_args(count, rate, delta, tags)
assert sorted(args_dict) == sorted(expected_args_dict)
@pytest.mark.parametrize(
("args_dict", "tags", "extra_tags", "expected_args_dict"),
[
pytest.param(
{"count": 1},
{"test": True},
{},
{"count": 1, "tags": {"test": True}},
id="no_extra_tags",
),
pytest.param(
{},
{"test": True},
{},
{"tags": {"test": True}},
id="no_args_no_extra_but_tags",
),
pytest.param(
{},
{"test": True},
{"test_extra": True},
{"tags": {"test": True, "test_extra": True}},
id="no_args_but_tags_and_extra",
),
pytest.param(
{"count": 1},
{"test": True},
{},
{"count": 1, "tags": {"test": True}},
id="no_args_no_tags_but_extra_tags",
),
pytest.param(
{"count": 1},
{"test": True},
{"test_extra": True},
{"count": 1, "tags": {"test": True, "test_extra": True}},
id="all_params_provided",
),
pytest.param(
{"count": 1, "rate": 3},
{"test1": True, "test2": False},
{"test_extra1": True, "test_extra2": False, "test_extra3": True},
{
"count": 1,
"rate": 3,
"tags": {
"test1": True,
"test2": False,
"test_extra1": True,
"test_extra2": False,
"test_extra3": True,
},
},
id="multiple_params",
),
],
)
def test_get_args_dict_with_extra_tags_if_set(
self,
args_dict: dict[str, Any] | None,
tags: dict[str, Any] | None,
extra_tags: dict[str, Any] | None,
expected_args_dict: dict[str, Any],
):
dict_full = dual_stats_manager._get_args_dict_with_extra_tags_if_set(args_dict, tags, extra_tags)
assert sorted(dict_full) == sorted(expected_args_dict)
@pytest.mark.parametrize(
("tags", "extra_tags", "expected_tags_dict"),
[
pytest.param(
{"test": True},
{"test_extra": True},
{"test": True, "test_extra": True},
id="all_params_provided",
),
pytest.param(
{},
{},
{},
id="no_params_provided",
),
pytest.param(
{"test": True},
{},
{"test": True},
id="only_tags",
),
pytest.param(
{},
{"test_extra": True},
{"test_extra": True},
id="only_extra",
),
pytest.param(
{"test1": True, "test2": False},
{"test_extra1": True, "test_extra2": False, "test_extra3": True},
{
"test1": True,
"test2": False,
"test_extra1": True,
"test_extra2": False,
"test_extra3": True,
},
id="multiple_params",
),
],
)
def test_get_tags_with_extra(
self,
tags: dict[str, Any] | None,
extra_tags: dict[str, Any] | None,
expected_tags_dict: dict[str, Any],
):
tags_full = dual_stats_manager._get_tags_with_extra(tags, extra_tags)
assert sorted(tags_full) == sorted(expected_tags_dict)
@pytest.mark.parametrize(
("stat", "variables", "expected_legacy_stat", "raises_value_error", "expected_error_msg"),
[
pytest.param(
"operator_failures",
{"operator_name": "exec1"},
"operator_failures_exec1",
False,
"",
id="no_errors",
),
pytest.param(
"operator_failures",
{},
"operator_failures_exec1",
True,
"Missing required variables for metric",
id="missing_params",
),
pytest.param(
"missing_metric",
{},
"",
True,
"Add the metric to the YAML file before using it.",
id="missing_metric",
),
],
)
def test_get_legacy_stat_from_registry(
self,
stat: str,
variables: dict[str, Any],
expected_legacy_stat: str,
raises_value_error: bool,
expected_error_msg: str,
):
from airflow._shared.observability.metrics.dual_stats_manager import DualStatsManager
manager = DualStatsManager()
if raises_value_error:
with pytest.raises(
ValueError,
match=expected_error_msg,
):
manager.get_legacy_stat(stat, variables)
else:
legacy_stat = manager.get_legacy_stat(stat, variables)
assert legacy_stat == expected_legacy_stat
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/tests/unit/core/test_dual_stats_manager.py",
"license": "Apache License 2.0",
"lines": 275,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:shared/observability/src/airflow_shared/observability/metrics/dual_stats_manager.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from contextlib import AbstractContextManager, ExitStack, nullcontext
from typing import TYPE_CHECKING, Any, ClassVar, cast
from .metrics_registry import MetricsRegistry
from .stats import Stats
if TYPE_CHECKING:
from .protocols import DeltaType
def _value_is_provided(value: Any):
"""Return true if the value is not None and, if it has length > 0."""
if value is None:
return False
try:
# False for empty dicts and strings.
return len(value) > 0
except TypeError:
# Numbers and bools that don't have `len`.
return True
def _get_dict_with_defined_args(
prov_count: int | None = None,
prov_rate: int | float | None = None,
prov_delta: bool | None = None,
prov_tags: dict[str, Any] | None = None,
) -> dict[str, Any]:
"""Create a dict that will include only the parameters that have been provided."""
defined_args_dict: dict[str, Any] = {}
if _value_is_provided(prov_count):
defined_args_dict["count"] = prov_count
if _value_is_provided(prov_rate):
defined_args_dict["rate"] = prov_rate
if _value_is_provided(prov_delta):
defined_args_dict["delta"] = prov_delta
if _value_is_provided(prov_tags):
defined_args_dict["tags"] = prov_tags
return defined_args_dict
def _get_args_dict_with_extra_tags_if_set(
args_dict: dict[str, Any] | None = None,
prov_tags: dict[str, Any] | None = None,
prov_tags_extra: dict[str, Any] | None = None,
) -> dict[str, Any]:
"""
Create a new merged tags dict if there are extra tags.
The new merged tags dict will replace the existing one, in the args dict.
"""
# The args_dict already has the base tags.
# If there are no `extra_tags`, this method is basically
# returning the `args_dict` unchanged.
args_dict_full = dict(args_dict) if args_dict is not None else {}
tags_full = _get_tags_with_extra(prov_tags, prov_tags_extra)
# Set `tags` only if there's something in `tags_full`.
# If it's empty, remove any inherited key.
if tags_full:
args_dict_full["tags"] = tags_full
else:
args_dict_full.pop("tags", None)
return args_dict_full
def _get_tags_with_extra(
prov_tags: dict[str, Any] | None = None,
prov_tags_extra: dict[str, Any] | None = None,
) -> dict[str, Any]:
"""Return a new dict with all tags if extra have been provided."""
# If there are no extra tags then return the original tags.
tags_full: dict[str, Any] = {}
if prov_tags:
tags_full.update(prov_tags)
# If there are `extra_tags`, then add them to the dict.
if prov_tags_extra is not None:
tags_full.update(prov_tags_extra)
return tags_full
class DualStatsManager:
"""Helper class to abstract enabling/disabling the export of metrics with legacy names."""
metrics_dict = MetricsRegistry()
# 'True' is also the default on the config.
export_legacy_names: ClassVar[bool] = True
@classmethod
def initialize(cls, export_legacy_names: bool):
cls.export_legacy_names = export_legacy_names
@classmethod
def get_legacy_stat(cls, stat: str, variables: dict[str, Any]) -> str | None:
stat_from_registry = cls.metrics_dict.get(name=stat)
# Validation 1: The metric must exist in the registry.
if not stat_from_registry:
raise ValueError(
f"Metric '{stat}' not found in the registry. Add the metric to the YAML file before using it."
)
legacy_name = stat_from_registry.get("legacy_name", "-")
if legacy_name == "-":
return None
# Get the required variables for the legacy name.
required_vars = stat_from_registry.get("name_variables", [])
# Validation 2: There must be a value for all required variables.
missing_vars = set(required_vars) - set(variables.keys())
if missing_vars:
raise ValueError(
f"Missing required variables for metric '{stat}': {sorted(missing_vars)}. "
f"Required variables found in the registry: {required_vars}. "
f"Provided variables: {sorted(variables.keys())}. "
f"Provide all required variables."
)
# Extract only the variables needed for the legacy name.
legacy_vars = {k: variables[k] for k in required_vars if k in variables}
# Format and return the legacy name.
return legacy_name.format(**legacy_vars)
@classmethod
def incr(
cls,
stat: str,
count: int | None = None,
rate: int | float | None = None,
*,
tags: dict[str, Any] | None = None,
extra_tags: dict[str, Any] | None = None,
) -> None:
kw = _get_dict_with_defined_args(count, rate, None, tags)
if cls.export_legacy_names and extra_tags is not None:
legacy_stat = cls.get_legacy_stat(stat=stat, variables=extra_tags)
if legacy_stat is not None:
# Emit legacy metric
Stats.incr(legacy_stat, **kw)
else:
raise ValueError(f"Stat '{stat}' doesn't have a legacy name registered in the YAML file.")
kw_with_extra_tags_if_set = _get_args_dict_with_extra_tags_if_set(kw, tags, extra_tags)
Stats.incr(stat, **kw_with_extra_tags_if_set)
@classmethod
def decr(
cls,
stat: str,
count: int | None = None,
rate: int | float | None = None,
*,
tags: dict[str, Any] | None = None,
extra_tags: dict[str, Any] | None = None,
) -> None:
kw = _get_dict_with_defined_args(count, rate, None, tags)
if cls.export_legacy_names and extra_tags is not None:
legacy_stat = cls.get_legacy_stat(stat=stat, variables=extra_tags)
if legacy_stat is not None:
# Emit legacy metric
Stats.decr(legacy_stat, **kw)
else:
raise ValueError(f"Stat '{stat}' doesn't have a legacy name registered in the YAML file.")
kw_with_extra_tags_if_set = _get_args_dict_with_extra_tags_if_set(kw, tags, extra_tags)
Stats.decr(stat, **kw_with_extra_tags_if_set)
@classmethod
def gauge(
cls,
stat: str,
value: float,
rate: int | float | None = None,
delta: bool | None = None,
*,
tags: dict[str, Any] | None = None,
extra_tags: dict[str, Any] | None = None,
) -> None:
kw = _get_dict_with_defined_args(None, rate, delta, tags)
if cls.export_legacy_names and extra_tags is not None:
legacy_stat = cls.get_legacy_stat(stat=stat, variables=extra_tags)
if legacy_stat is not None:
# Emit legacy metric
Stats.gauge(legacy_stat, value, **kw)
else:
raise ValueError(f"Stat '{stat}' doesn't have a legacy name registered in the YAML file.")
kw_with_extra_tags_if_set = _get_args_dict_with_extra_tags_if_set(kw, tags, extra_tags)
Stats.gauge(stat, value, **kw_with_extra_tags_if_set)
@classmethod
def timing(
cls,
stat: str,
dt: DeltaType,
*,
tags: dict[str, Any] | None = None,
extra_tags: dict[str, Any] | None = None,
) -> None:
if cls.export_legacy_names and extra_tags is not None:
legacy_stat = cls.get_legacy_stat(stat=stat, variables=extra_tags)
if legacy_stat is not None:
if tags:
Stats.timing(legacy_stat, dt, tags=tags)
else:
Stats.timing(legacy_stat, dt)
else:
raise ValueError(f"Stat '{stat}' doesn't have a legacy name registered in the YAML file.")
tags_with_extra = _get_tags_with_extra(tags, extra_tags)
if tags_with_extra:
Stats.timing(stat, dt, tags=tags_with_extra)
else:
Stats.timing(stat, dt)
@classmethod
def timer(
cls,
stat: str,
tags: dict[str, Any] | None = None,
extra_tags: dict[str, Any] | None = None,
**kwargs,
):
kw = dict(kwargs)
if tags is not None:
kw["tags"] = tags
# Used with a context manager.
stack = ExitStack()
if cls.export_legacy_names and extra_tags is not None:
legacy_stat = cls.get_legacy_stat(stat=stat, variables=extra_tags)
ctx_mg1: AbstractContextManager[Any] = cast(
"AbstractContextManager[Any]", Stats.timer(legacy_stat, **kw)
)
else:
ctx_mg1 = nullcontext()
stack.enter_context(ctx_mg1)
kw_with_extra_tags_if_set = _get_args_dict_with_extra_tags_if_set(kw, tags, extra_tags)
stack.enter_context(Stats.timer(stat, **kw_with_extra_tags_if_set))
return stack
| {
"repo_id": "apache/airflow",
"file_path": "shared/observability/src/airflow_shared/observability/metrics/dual_stats_manager.py",
"license": "Apache License 2.0",
"lines": 226,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:shared/observability/src/airflow_shared/observability/metrics/metrics_registry.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from pathlib import Path
YAML_REGISTRY_PATH = Path(__file__).parent / "metrics_template.yaml"
RST_TABLE_OUTPUT_PATH = "airflow-core/docs/administration-and-deployment/logging-monitoring/metric_tables.rst"
def read_metrics_yaml(yaml_path: str) -> list:
"""
Read the metrics from a YAML registry file.
Returns:
List of metric dictionaries
"""
from pathlib import Path
import yaml
yaml_file = Path(yaml_path)
if not yaml_file.exists():
raise FileNotFoundError(f"The provided YAML file doesn't exist: '{yaml_path}'")
with open(yaml_file) as f:
data = yaml.safe_load(f)
return data["metrics"]
def convert_to_rst_tables(metrics: list) -> str:
"""
Convert a metrics list to RST tables, separated by type.
Returns:
RST tables as a string, separated by type (counters, gauges, timers)
"""
from tabulate import tabulate
# Group metrics by type
counters = []
gauges = []
timers = []
for metric in metrics:
metric_type = metric.get("type", "").lower()
row = [
f"``{metric['name']}``",
f"``{metric.get('legacy_name', '-')}``",
metric["description"],
]
if metric_type == "counter":
counters.append(row)
elif metric_type == "gauge":
gauges.append(row)
elif metric_type == "timer":
timers.append(row)
headers = ["Name", "Legacy Name", "Description"]
# Build the output
output = []
# Counters section
if counters:
output.append("Counters")
output.append("--------")
output.append("")
output.append(tabulate(counters, headers=headers, tablefmt="rst"))
output.append("")
# Gauges section
if gauges:
output.append("Gauges")
output.append("------")
output.append("")
output.append(tabulate(gauges, headers=headers, tablefmt="rst"))
output.append("")
# Timers section
if timers:
output.append("Timers")
output.append("------")
output.append("")
output.append(tabulate(timers, headers=headers, tablefmt="rst"))
output.append("")
return "\n".join(output)
def write_metric_tables_file(rst_tables_str: str, output_path: str, yaml_source: str):
"""Write the RST tables string to a file."""
from pathlib import Path
license_local_var = """ .. Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
\"License\"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
.. http://www.apache.org/licenses/LICENSE-2.0
.. Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
\"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
"""
warning = f""".. WARNING: This file is auto-generated from '{yaml_source}'.
Do not edit this file manually. Any changes will be overwritten during the next build.
To modify metrics, edit '{yaml_source}' and rebuild.
"""
output_file = Path(output_path)
with open(output_file, "w") as f:
f.write(license_local_var)
f.write("\n")
f.write(warning)
f.write("\n")
f.write(rst_tables_str)
def generate_metrics_rst_from_registry():
metrics_list = read_metrics_yaml(yaml_path=str(YAML_REGISTRY_PATH))
tables_str = convert_to_rst_tables(metrics=metrics_list)
write_metric_tables_file(
rst_tables_str=tables_str, output_path=RST_TABLE_OUTPUT_PATH, yaml_source=str(YAML_REGISTRY_PATH)
)
class MetricsRegistry:
"""Class for storing and looking up metrics."""
def __init__(self):
metrics_list = read_metrics_yaml(yaml_path=str(YAML_REGISTRY_PATH))
# Convert the list to dict for faster lookup.
self._metrics = {metric["name"]: metric for metric in metrics_list}
def get(self, name: str) -> dict | None:
"""Get metric by name."""
return self._metrics.get(name)
| {
"repo_id": "apache/airflow",
"file_path": "shared/observability/src/airflow_shared/observability/metrics/metrics_registry.py",
"license": "Apache License 2.0",
"lines": 127,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:shared/observability/tests/observability/metrics/test_dual_stats_manager.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import Any
import pytest
from airflow_shared.observability.metrics import dual_stats_manager
class TestDualStatsManager:
@pytest.mark.parametrize(
("value", "expected"),
[
pytest.param(
1,
True,
id="number",
),
pytest.param(
False,
True,
id="boolean",
),
pytest.param(
None,
False,
id="None",
),
pytest.param(
{},
False,
id="empty_dict",
),
],
)
def test_value_is_provided(self, value: Any, expected: bool):
result = dual_stats_manager._value_is_provided(value)
assert result == expected
@pytest.mark.parametrize(
("count", "rate", "delta", "tags", "expected_args_dict"),
[
pytest.param(
1,
1,
False,
{},
{"count": 1, "rate": 1, "delta": False},
id="all_params_empty_tags",
),
pytest.param(
1,
1,
False,
{"test": True},
{"count": 1, "rate": 1, "delta": False, "tags": {"test": True}},
id="provide_tags",
),
pytest.param(
None,
1,
False,
{},
{"rate": 1, "delta": False},
id="no_count",
),
pytest.param(
1,
None,
False,
{},
{"count": 1, "delta": False},
id="no_rate",
),
pytest.param(
1,
1,
None,
{},
{"count": 1, "rate": 1},
id="no_delta",
),
pytest.param(
1,
1,
False,
None,
{"count": 1, "rate": 1, "delta": False},
id="no_tags",
),
],
)
def test_get_dict_with_defined_args(
self,
count: int | None,
rate: int | None,
delta: bool | None,
tags: dict[str, Any] | None,
expected_args_dict: dict[str, Any],
):
args_dict = dual_stats_manager._get_dict_with_defined_args(count, rate, delta, tags)
assert sorted(args_dict) == sorted(expected_args_dict)
@pytest.mark.parametrize(
("args_dict", "tags", "extra_tags", "expected_args_dict"),
[
pytest.param(
{"count": 1},
{"test": True},
{},
{"count": 1, "tags": {"test": True}},
id="no_extra_tags",
),
pytest.param(
{},
{"test": True},
{},
{"tags": {"test": True}},
id="no_args_no_extra_but_tags",
),
pytest.param(
{},
{"test": True},
{"test_extra": True},
{"tags": {"test": True, "test_extra": True}},
id="no_args_but_tags_and_extra",
),
pytest.param(
{"count": 1},
{"test": True},
{},
{"count": 1, "tags": {"test": True}},
id="no_args_no_tags_but_extra_tags",
),
pytest.param(
{"count": 1},
{"test": True},
{"test_extra": True},
{"count": 1, "tags": {"test": True, "test_extra": True}},
id="all_params_provided",
),
pytest.param(
{"count": 1, "rate": 3},
{"test1": True, "test2": False},
{"test_extra1": True, "test_extra2": False, "test_extra3": True},
{
"count": 1,
"rate": 3,
"tags": {
"test1": True,
"test2": False,
"test_extra1": True,
"test_extra2": False,
"test_extra3": True,
},
},
id="multiple_params",
),
],
)
def test_get_args_dict_with_extra_tags_if_set(
self,
args_dict: dict[str, Any] | None,
tags: dict[str, Any] | None,
extra_tags: dict[str, Any] | None,
expected_args_dict: dict[str, Any],
):
dict_full = dual_stats_manager._get_args_dict_with_extra_tags_if_set(args_dict, tags, extra_tags)
assert sorted(dict_full) == sorted(expected_args_dict)
@pytest.mark.parametrize(
("tags", "extra_tags", "expected_tags_dict"),
[
pytest.param(
{"test": True},
{"test_extra": True},
{"test": True, "test_extra": True},
id="all_params_provided",
),
pytest.param(
{},
{},
{},
id="no_params_provided",
),
pytest.param(
{"test": True},
{},
{"test": True},
id="only_tags",
),
pytest.param(
{},
{"test_extra": True},
{"test_extra": True},
id="only_extra",
),
pytest.param(
{"test1": True, "test2": False},
{"test_extra1": True, "test_extra2": False, "test_extra3": True},
{
"test1": True,
"test2": False,
"test_extra1": True,
"test_extra2": False,
"test_extra3": True,
},
id="multiple_params",
),
],
)
def test_get_tags_with_extra(
self,
tags: dict[str, Any] | None,
extra_tags: dict[str, Any] | None,
expected_tags_dict: dict[str, Any],
):
tags_full = dual_stats_manager._get_tags_with_extra(tags, extra_tags)
assert sorted(tags_full) == sorted(expected_tags_dict)
@pytest.mark.parametrize(
("stat", "variables", "expected_legacy_stat", "raises_value_error", "expected_error_msg"),
[
pytest.param(
"operator_failures",
{"operator_name": "exec1"},
"operator_failures_exec1",
False,
"",
id="no_errors",
),
pytest.param(
"operator_failures",
{},
"operator_failures_exec1",
True,
"Missing required variables for metric",
id="missing_params",
),
pytest.param(
"missing_metric",
{},
"",
True,
"Add the metric to the YAML file before using it.",
id="missing_metric",
),
],
)
def test_get_legacy_stat_from_registry(
self,
stat: str,
variables: dict[str, Any],
expected_legacy_stat: str,
raises_value_error: bool,
expected_error_msg: str,
):
from airflow_shared.observability.metrics.dual_stats_manager import DualStatsManager
manager = DualStatsManager()
if raises_value_error:
with pytest.raises(
ValueError,
match=expected_error_msg,
):
manager.get_legacy_stat(stat, variables)
else:
legacy_stat = manager.get_legacy_stat(stat, variables)
assert legacy_stat == expected_legacy_stat
| {
"repo_id": "apache/airflow",
"file_path": "shared/observability/tests/observability/metrics/test_dual_stats_manager.py",
"license": "Apache License 2.0",
"lines": 275,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:dev/prune_old_dirs.py | #!/usr/bin/env python3
# Licensed to the Apache Software Foundation (ASF) under one
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
#
# Usage:
# uv run prune_old_svn_versions.py [--path PATH] [--execute]
#
# Defaults to current directory. Without --execute it only prints the svn remove commands.
#
# /// script
# requires-python = ">=3.10"
# dependencies = [
# "rich>=13.6.0",
# "packaging>=23.0",
# ]
# ///
from __future__ import annotations
import argparse
import os
import subprocess
import sys
from packaging.version import InvalidVersion, Version
from rich.console import Console
console = Console()
# command-line args
parser = argparse.ArgumentParser()
parser.add_argument("--path", "-p", default=".")
parser.add_argument("--execute", "-x", action="store_true", help="Execute svn remove (otherwise dry-run)")
args = parser.parse_args()
def parse_version(v):
try:
return Version(v)
except InvalidVersion:
return None
root = os.path.abspath(args.path)
entries = [d for d in os.listdir(root) if os.path.isdir(os.path.join(root, d))]
parsed = []
for name in entries:
pv = parse_version(name)
if pv is not None:
parsed.append((pv, name))
if not parsed:
msg = f"No version-like directories found in {root}"
console.print(f"[bold yellow]{msg}[/]")
sys.exit(0)
# sort by parsed version (stable)
parsed.sort(key=lambda x: x[0])
kept = parsed[-1][1]
to_remove = [name for _, name in parsed[:-1]]
msg = f"Keeping: {kept}"
console.print(msg)
if not to_remove:
msg = "No older versions to remove."
console.print(msg)
sys.exit(0)
for name in to_remove:
path = os.path.join(root, name)
cmd = ["svn", "rm", path]
if args.execute:
msg = "Executing: " + " ".join(cmd)
console.print(f"[cyan]{msg}[/]")
res = subprocess.run(cmd, capture_output=True, text=True, check=False)
if res.returncode != 0:
msg = f"svn rm failed for {path}: {res.stderr.strip()}"
console.print(f"[bold red]{msg}[/]")
sys.exit(res.returncode)
else:
msg = f"Removed: {path}"
console.print(f"[bold green]{msg}[/]")
else:
msg = "Dry-run: would run: " + " ".join(cmd)
console.print(f"[cyan]{msg}[/]")
| {
"repo_id": "apache/airflow",
"file_path": "dev/prune_old_dirs.py",
"license": "Apache License 2.0",
"lines": 89,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:airflow-core/src/airflow/dag_processing/importers/base.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Abstract base class for DAG importers."""
from __future__ import annotations
import logging
import os
import threading
from abc import ABC, abstractmethod
from collections.abc import Iterator
from dataclasses import dataclass, field
from pathlib import Path
from typing import TYPE_CHECKING
from airflow._shared.module_loading.file_discovery import find_path_from_directory
from airflow.configuration import conf
from airflow.utils.file import might_contain_dag
if TYPE_CHECKING:
from airflow.sdk import DAG
log = logging.getLogger(__name__)
@dataclass
class DagImportError:
"""Structured error information for DAG import failures."""
file_path: str
message: str
error_type: str = "import"
line_number: int | None = None
column_number: int | None = None
context: str | None = None
suggestion: str | None = None
stacktrace: str | None = None
def format_message(self) -> str:
"""Format the error as a human-readable string."""
parts = [f"Error in {self.file_path}"]
if self.line_number is not None:
loc = f"line {self.line_number}"
if self.column_number is not None:
loc += f", column {self.column_number}"
parts.append(f"Location: {loc}")
parts.append(f"Error ({self.error_type}): {self.message}")
if self.context:
parts.append(f"Context:\n{self.context}")
if self.suggestion:
parts.append(f"Suggestion: {self.suggestion}")
return "\n".join(parts)
@dataclass
class DagImportWarning:
"""Warning information for non-fatal issues during DAG import."""
file_path: str
message: str
warning_type: str = "general"
line_number: int | None = None
@dataclass
class DagImportResult:
"""Result of importing DAGs from a file."""
file_path: str
dags: list[DAG] = field(default_factory=list)
errors: list[DagImportError] = field(default_factory=list)
skipped_files: list[str] = field(default_factory=list)
warnings: list[DagImportWarning] = field(default_factory=list)
@property
def success(self) -> bool:
"""Return True if no fatal errors occurred."""
return len(self.errors) == 0
class AbstractDagImporter(ABC):
"""Abstract base class for DAG importers."""
@classmethod
@abstractmethod
def supported_extensions(cls) -> list[str]:
"""Return file extensions this importer handles (e.g., ['.py', '.zip'])."""
@abstractmethod
def import_file(
self,
file_path: str | Path,
*,
bundle_path: Path | None = None,
bundle_name: str | None = None,
safe_mode: bool = True,
) -> DagImportResult:
"""Import DAGs from a file."""
def can_handle(self, file_path: str | Path) -> bool:
"""Check if this importer can handle the given file."""
path = Path(file_path) if isinstance(file_path, str) else file_path
return path.suffix.lower() in self.supported_extensions()
def get_relative_path(self, file_path: str | Path, bundle_path: Path | None) -> str:
"""Get the relative file path from the bundle root."""
if bundle_path is None:
return str(file_path)
try:
return str(Path(file_path).relative_to(bundle_path))
except ValueError:
return str(file_path)
def list_dag_files(
self,
directory: str | os.PathLike[str],
safe_mode: bool = True,
) -> Iterator[str]:
"""
List DAG files in a directory that this importer can handle.
Override this method to customize file discovery for your importer.
The default implementation finds files matching supported_extensions()
and respects .airflowignore files.
:param directory: Directory to search for DAG files
:param safe_mode: Whether to use heuristics to filter non-DAG files
:return: Iterator of file paths
"""
ignore_file_syntax = conf.get_mandatory_value("core", "DAG_IGNORE_FILE_SYNTAX", fallback="glob")
supported_exts = [ext.lower() for ext in self.supported_extensions()]
for file_path in find_path_from_directory(directory, ".airflowignore", ignore_file_syntax):
path = Path(file_path)
if not path.is_file():
continue
# Check if this importer handles this file extension
if path.suffix.lower() not in supported_exts:
continue
# Apply safe_mode heuristic if enabled
if safe_mode and not might_contain_dag(file_path, safe_mode):
continue
yield file_path
class DagImporterRegistry:
"""
Registry for DAG importers. Singleton that manages importers by file extension.
Each file extension can only be handled by one importer at a time. If multiple
importers claim the same extension, the last registered one wins and a warning
is logged. The built-in PythonDagImporter handles .py and .zip extensions.
"""
_instance: DagImporterRegistry | None = None
_importers: dict[str, AbstractDagImporter]
_lock = threading.Lock()
def __new__(cls) -> DagImporterRegistry:
with cls._lock:
if cls._instance is None:
cls._instance = super().__new__(cls)
cls._instance._importers = {}
cls._instance._register_default_importers()
return cls._instance
def _register_default_importers(self) -> None:
from airflow.dag_processing.importers.python_importer import PythonDagImporter
self.register(PythonDagImporter())
def register(self, importer: AbstractDagImporter) -> None:
"""
Register an importer for its supported extensions.
Each extension can only have one importer. If an extension is already registered,
the new importer will override it and a warning will be logged.
"""
for ext in importer.supported_extensions():
ext_lower = ext.lower()
if ext_lower in self._importers:
existing = self._importers[ext_lower]
log.warning(
"Extension '%s' already registered by %s, overriding with %s",
ext,
type(existing).__name__,
type(importer).__name__,
)
self._importers[ext_lower] = importer
def get_importer(self, file_path: str | Path) -> AbstractDagImporter | None:
"""Get the appropriate importer for a file, or None if unsupported."""
path = Path(file_path) if isinstance(file_path, str) else file_path
return self._importers.get(path.suffix.lower())
def can_handle(self, file_path: str | Path) -> bool:
"""Check if any registered importer can handle this file."""
return self.get_importer(file_path) is not None
def supported_extensions(self) -> list[str]:
"""Return all registered file extensions."""
return list(self._importers.keys())
def list_dag_files(
self,
directory: str | os.PathLike[str],
safe_mode: bool = True,
) -> list[str]:
"""
List all DAG files in a directory using all registered importers.
If directory is actually a file, returns that file if any importer can handle it.
:param directory: Directory (or file) to search for DAG files
:param safe_mode: Whether to use heuristics to filter non-DAG files
:return: List of file paths (deduplicated)
"""
path = Path(directory)
# If it's a file, just return it if we can handle it
if path.is_file():
if self.can_handle(path):
return [str(path)]
return []
if not path.is_dir():
return []
seen_files: set[str] = set()
file_paths: list[str] = []
for importer in set(self._importers.values()):
for file_path in importer.list_dag_files(directory, safe_mode):
if file_path not in seen_files:
seen_files.add(file_path)
file_paths.append(file_path)
return file_paths
@classmethod
def reset(cls) -> None:
"""Reset the singleton (for testing)."""
cls._instance = None
def get_importer_registry() -> DagImporterRegistry:
"""Get the global importer registry instance."""
return DagImporterRegistry()
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/dag_processing/importers/base.py",
"license": "Apache License 2.0",
"lines": 213,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:airflow-core/src/airflow/dag_processing/importers/python_importer.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Python DAG importer - imports DAGs from Python files."""
from __future__ import annotations
import contextlib
import importlib
import importlib.machinery
import importlib.util
import logging
import os
import signal
import sys
import traceback
import warnings
import zipfile
from collections.abc import Iterator
from pathlib import Path
from typing import TYPE_CHECKING, Any
from airflow import settings
from airflow._shared.module_loading.file_discovery import find_path_from_directory
from airflow.configuration import conf
from airflow.dag_processing.importers.base import (
AbstractDagImporter,
DagImportError,
DagImportResult,
DagImportWarning,
)
from airflow.utils.docs import get_docs_url
from airflow.utils.file import get_unique_dag_module_name, might_contain_dag
if TYPE_CHECKING:
from types import ModuleType
from airflow.sdk import DAG
log = logging.getLogger(__name__)
@contextlib.contextmanager
def _timeout(seconds: float = 1, error_message: str = "Timeout"):
"""Context manager for timing out operations."""
error_message = error_message + ", PID: " + str(os.getpid())
def handle_timeout(signum, frame):
log.error("Process timed out, PID: %s", str(os.getpid()))
from airflow.sdk.exceptions import AirflowTaskTimeout
raise AirflowTaskTimeout(error_message)
try:
try:
signal.signal(signal.SIGALRM, handle_timeout)
signal.setitimer(signal.ITIMER_REAL, seconds)
except ValueError:
log.warning("timeout can't be used in the current context", exc_info=True)
yield
finally:
with contextlib.suppress(ValueError):
signal.setitimer(signal.ITIMER_REAL, 0)
class PythonDagImporter(AbstractDagImporter):
"""
Importer for Python DAG files and zip archives containing Python DAGs.
This is the default importer registered with the DagImporterRegistry. It handles:
- .py files: Standard Python DAG files
- .zip files: ZIP archives containing Python DAG files
Note: The .zip extension is exclusively owned by this importer. If you need to
support other file formats inside ZIP archives (e.g., YAML), you would need to
either extend this importer or create a composite importer that delegates based
on the contents of the archive.
"""
@classmethod
def supported_extensions(cls) -> list[str]:
"""Return file extensions handled by this importer (.py and .zip)."""
return [".py", ".zip"]
def list_dag_files(
self,
directory: str | os.PathLike[str],
safe_mode: bool = True,
) -> Iterator[str]:
"""
List Python DAG files in a directory.
Handles both .py files and .zip archives containing Python DAGs.
Respects .airflowignore files in the directory tree.
"""
ignore_file_syntax = conf.get_mandatory_value("core", "DAG_IGNORE_FILE_SYNTAX", fallback="glob")
for file_path in find_path_from_directory(directory, ".airflowignore", ignore_file_syntax):
path = Path(file_path)
try:
if path.is_file() and (path.suffix.lower() == ".py" or zipfile.is_zipfile(path)):
if might_contain_dag(file_path, safe_mode):
yield file_path
except Exception:
log.exception("Error while examining %s", file_path)
def import_file(
self,
file_path: str | Path,
*,
bundle_path: Path | None = None,
bundle_name: str | None = None,
safe_mode: bool = True,
) -> DagImportResult:
"""
Import DAGs from a Python file or zip archive.
:param file_path: Path to the Python file to import.
:param bundle_path: Path to the bundle root.
:param bundle_name: Name of the bundle.
:param safe_mode: If True, skip files that don't appear to contain DAGs.
:return: DagImportResult with imported DAGs and any errors.
"""
from airflow.sdk.definitions._internal.contextmanager import DagContext
filepath = str(file_path)
relative_path = self.get_relative_path(filepath, bundle_path)
result = DagImportResult(file_path=relative_path)
if not os.path.isfile(filepath):
result.errors.append(
DagImportError(
file_path=relative_path,
message=f"File not found: {filepath}",
error_type="file_not_found",
)
)
return result
# Clear any autoregistered dags from previous imports
DagContext.autoregistered_dags.clear()
# Capture warnings during import
captured_warnings: list[warnings.WarningMessage] = []
try:
with warnings.catch_warnings(record=True) as captured_warnings:
if filepath.endswith(".py") or not zipfile.is_zipfile(filepath):
modules = self._load_modules_from_file(filepath, safe_mode, result)
else:
modules = self._load_modules_from_zip(filepath, safe_mode, result)
except TypeError:
# Configuration errors (e.g., invalid timeout type) should propagate
raise
except Exception as e:
result.errors.append(
DagImportError(
file_path=relative_path,
message=str(e),
error_type="import",
stacktrace=traceback.format_exc(),
)
)
return result
# Convert captured warnings to DagImportWarning
for warn_msg in captured_warnings:
category = warn_msg.category.__name__
if (module := warn_msg.category.__module__) != "builtins":
category = f"{module}.{category}"
result.warnings.append(
DagImportWarning(
file_path=warn_msg.filename,
message=str(warn_msg.message),
warning_type=category,
line_number=warn_msg.lineno,
)
)
# Process imported modules to extract DAGs
self._process_modules(filepath, modules, bundle_name, bundle_path, result)
return result
def _load_modules_from_file(
self, filepath: str, safe_mode: bool, result: DagImportResult
) -> list[ModuleType]:
from airflow.sdk.definitions._internal.contextmanager import DagContext
def sigsegv_handler(signum, frame):
msg = f"Received SIGSEGV signal while processing {filepath}."
log.error(msg)
result.errors.append(
DagImportError(
file_path=result.file_path,
message=msg,
error_type="segfault",
)
)
try:
signal.signal(signal.SIGSEGV, sigsegv_handler)
except ValueError:
log.warning("SIGSEGV signal handler registration failed. Not in the main thread")
if not might_contain_dag(filepath, safe_mode):
log.debug("File %s assumed to contain no DAGs. Skipping.", filepath)
result.skipped_files.append(filepath)
return []
log.debug("Importing %s", filepath)
mod_name = get_unique_dag_module_name(filepath)
if mod_name in sys.modules:
del sys.modules[mod_name]
DagContext.current_autoregister_module_name = mod_name
def parse(mod_name: str, filepath: str) -> list[ModuleType]:
try:
loader = importlib.machinery.SourceFileLoader(mod_name, filepath)
spec = importlib.util.spec_from_loader(mod_name, loader)
new_module = importlib.util.module_from_spec(spec) # type: ignore[arg-type]
sys.modules[spec.name] = new_module # type: ignore[union-attr]
loader.exec_module(new_module)
return [new_module]
except KeyboardInterrupt:
raise
except BaseException as e:
DagContext.autoregistered_dags.clear()
log.exception("Failed to import: %s", filepath)
if conf.getboolean("core", "dagbag_import_error_tracebacks"):
stacktrace = traceback.format_exc(
limit=-conf.getint("core", "dagbag_import_error_traceback_depth")
)
else:
stacktrace = None
result.errors.append(
DagImportError(
file_path=result.file_path,
message=str(e),
error_type="import",
stacktrace=stacktrace,
)
)
return []
dagbag_import_timeout = settings.get_dagbag_import_timeout(filepath)
if not isinstance(dagbag_import_timeout, (int, float)):
raise TypeError(
f"Value ({dagbag_import_timeout}) from get_dagbag_import_timeout must be int or float"
)
if dagbag_import_timeout <= 0:
return parse(mod_name, filepath)
timeout_msg = (
f"DagBag import timeout for {filepath} after {dagbag_import_timeout}s.\n"
"Please take a look at these docs to improve your DAG import time:\n"
f"* {get_docs_url('best-practices.html#top-level-python-code')}\n"
f"* {get_docs_url('best-practices.html#reducing-dag-complexity')}"
)
with _timeout(dagbag_import_timeout, error_message=timeout_msg):
return parse(mod_name, filepath)
def _load_modules_from_zip(
self, filepath: str, safe_mode: bool, result: DagImportResult
) -> list[ModuleType]:
"""Load Python modules from a zip archive."""
from airflow.sdk.definitions._internal.contextmanager import DagContext
mods: list[ModuleType] = []
with zipfile.ZipFile(filepath) as current_zip_file:
for zip_info in current_zip_file.infolist():
zip_path = Path(zip_info.filename)
if zip_path.suffix not in [".py", ".pyc"] or len(zip_path.parts) > 1:
continue
if zip_path.stem == "__init__":
log.warning("Found %s at root of %s", zip_path.name, filepath)
log.debug("Reading %s from %s", zip_info.filename, filepath)
if not might_contain_dag(zip_info.filename, safe_mode, current_zip_file):
result.skipped_files.append(f"{filepath}:{zip_info.filename}")
continue
mod_name = zip_path.stem
if mod_name in sys.modules:
del sys.modules[mod_name]
DagContext.current_autoregister_module_name = mod_name
try:
sys.path.insert(0, filepath)
current_module = importlib.import_module(mod_name)
mods.append(current_module)
except Exception as e:
DagContext.autoregistered_dags.clear()
fileloc = os.path.join(filepath, zip_info.filename)
log.exception("Failed to import: %s", fileloc)
if conf.getboolean("core", "dagbag_import_error_tracebacks"):
stacktrace = traceback.format_exc(
limit=-conf.getint("core", "dagbag_import_error_traceback_depth")
)
else:
stacktrace = None
result.errors.append(
DagImportError(
file_path=fileloc, # Use the file path inside the ZIP
message=str(e),
error_type="import",
stacktrace=stacktrace,
)
)
finally:
if sys.path[0] == filepath:
del sys.path[0]
return mods
def _process_modules(
self,
filepath: str,
mods: list[Any],
bundle_name: str | None,
bundle_path: Path | None,
result: DagImportResult,
) -> None:
"""Extract DAG objects from modules. Validation happens in bag_dag()."""
from airflow.sdk import DAG
from airflow.sdk.definitions._internal.contextmanager import DagContext
top_level_dags: set[tuple[DAG, Any]] = {
(o, m) for m in mods for o in m.__dict__.values() if isinstance(o, DAG)
}
top_level_dags.update(DagContext.autoregistered_dags)
DagContext.current_autoregister_module_name = None
DagContext.autoregistered_dags.clear()
for dag, mod in top_level_dags:
dag.fileloc = mod.__file__
relative_fileloc = self.get_relative_path(dag.fileloc, bundle_path)
dag.relative_fileloc = relative_fileloc
result.dags.append(dag)
log.debug("Found DAG %s", dag.dag_id)
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/dag_processing/importers/python_importer.py",
"license": "Apache License 2.0",
"lines": 307,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:airflow-core/tests/unit/dag_processing/importers/test_registry.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Tests for the DagImporterRegistry."""
from __future__ import annotations
from pathlib import Path
from airflow.dag_processing.importers import (
DagImporterRegistry,
PythonDagImporter,
get_importer_registry,
)
class TestDagImporterRegistry:
"""Test the DagImporterRegistry singleton."""
def setup_method(self):
"""Reset the registry before each test."""
DagImporterRegistry.reset()
def teardown_method(self):
"""Reset the registry after each test."""
DagImporterRegistry.reset()
def test_singleton_pattern(self):
"""Registry should return the same instance."""
registry1 = get_importer_registry()
registry2 = get_importer_registry()
assert registry1 is registry2
def test_default_importers_registered(self):
"""Registry should have Python importer by default."""
registry = get_importer_registry()
extensions = registry.supported_extensions()
assert ".py" in extensions
def test_get_importer_for_python(self):
"""Should return PythonDagImporter for .py files."""
registry = get_importer_registry()
importer = registry.get_importer("test.py")
assert importer is not None
assert isinstance(importer, PythonDagImporter)
def test_get_importer_for_unknown(self):
"""Should return None for unknown file types."""
registry = get_importer_registry()
importer = registry.get_importer("test.txt")
assert importer is None
def test_can_handle_supported_files(self):
"""can_handle should return True for supported file types."""
registry = get_importer_registry()
assert registry.can_handle("dag.py")
assert registry.can_handle(Path("subdir/dag.py"))
def test_can_handle_unsupported_files(self):
"""can_handle should return False for unsupported file types."""
registry = get_importer_registry()
assert not registry.can_handle("readme.txt")
assert not registry.can_handle("config.json")
assert not registry.can_handle("script.sh")
def test_case_insensitive_extension_matching(self):
"""Extension matching should be case-insensitive."""
registry = get_importer_registry()
# All these should be handled
assert registry.can_handle("dag.PY")
assert registry.can_handle("dag.Py")
def test_reset_clears_singleton(self):
"""reset() should clear the singleton instance."""
registry1 = get_importer_registry()
DagImporterRegistry.reset()
registry2 = get_importer_registry()
# Should be different instances after reset
assert registry1 is not registry2
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/tests/unit/dag_processing/importers/test_registry.py",
"license": "Apache License 2.0",
"lines": 77,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/apache/spark/src/airflow/providers/apache/spark/operators/spark_pyspark.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import inspect
from collections.abc import Callable, Sequence
from airflow.providers.apache.spark.hooks.spark_connect import SparkConnectHook
from airflow.providers.common.compat.sdk import BaseHook
from airflow.providers.common.compat.standard.operators import PythonOperator
SPARK_CONTEXT_KEYS = ["spark", "sc"]
class PySparkOperator(PythonOperator):
"""Submit the run of a pyspark job to an external spark-connect service or directly run the pyspark job in a standalone mode."""
template_fields: Sequence[str] = ("conn_id", "config_kwargs", *PythonOperator.template_fields)
def __init__(
self,
python_callable: Callable,
conn_id: str | None = None,
config_kwargs: dict | None = None,
**kwargs,
):
self.conn_id = conn_id
self.config_kwargs = config_kwargs or {}
signature = inspect.signature(python_callable)
parameters = [
param.replace(default=None) if param.name in SPARK_CONTEXT_KEYS else param
for param in signature.parameters.values()
]
# mypy does not understand __signature__ attribute
# see https://github.com/python/mypy/issues/12472
python_callable.__signature__ = signature.replace(parameters=parameters) # type: ignore[attr-defined]
super().__init__(
python_callable=python_callable,
**kwargs,
)
def execute_callable(self):
from pyspark import SparkConf
from pyspark.sql import SparkSession
conf = SparkConf()
conf.set("spark.app.name", f"{self.dag_id}-{self.task_id}")
url = "local[*]"
if self.conn_id:
# we handle both spark connect and spark standalone
conn = BaseHook.get_connection(self.conn_id)
if conn.conn_type == SparkConnectHook.conn_type:
url = SparkConnectHook(self.conn_id).get_connection_url()
elif conn.port:
url = f"{conn.host}:{conn.port}"
elif conn.host:
url = conn.host
for key, value in conn.extra_dejson.items():
conf.set(key, value)
# you cannot have both remote and master
if url.startswith("sc://"):
conf.set("spark.remote", url)
# task can override connection config
for key, value in self.config_kwargs.items():
conf.set(key, value)
if not conf.get("spark.remote") and not conf.get("spark.master"):
conf.set("spark.master", url)
spark_session = SparkSession.builder.config(conf=conf).getOrCreate()
try:
self.op_kwargs = {**self.op_kwargs, "spark": spark_session}
return super().execute_callable()
finally:
spark_session.stop()
| {
"repo_id": "apache/airflow",
"file_path": "providers/apache/spark/src/airflow/providers/apache/spark/operators/spark_pyspark.py",
"license": "Apache License 2.0",
"lines": 79,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/apache/spark/tests/unit/apache/spark/operators/test_spark_pyspark.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from airflow.models.dag import DAG
from airflow.providers.apache.spark.operators.spark_pyspark import PySparkOperator
from airflow.utils import timezone
DEFAULT_DATE = timezone.datetime(2024, 2, 1, tzinfo=timezone.utc)
class TestSparkPySparkOperator:
_config = {
"conn_id": "spark_special_conn_id",
}
def setup_method(self):
args = {"owner": "airflow", "start_date": DEFAULT_DATE}
self.dag = DAG("test_dag_id", schedule=None, default_args=args)
def test_execute(self):
def my_spark_fn(spark):
pass
operator = PySparkOperator(
task_id="spark_pyspark_job", python_callable=my_spark_fn, dag=self.dag, **self._config
)
assert self._config["conn_id"] == operator.conn_id
| {
"repo_id": "apache/airflow",
"file_path": "providers/apache/spark/tests/unit/apache/spark/operators/test_spark_pyspark.py",
"license": "Apache License 2.0",
"lines": 36,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/ssh/src/airflow/providers/ssh/operators/ssh_remote_job.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""SSH Remote Job Operator for deferrable remote command execution."""
from __future__ import annotations
import warnings
from collections.abc import Container, Sequence
from datetime import timedelta
from functools import cached_property
from typing import TYPE_CHECKING, Any, Literal
from airflow.providers.common.compat.sdk import AirflowException, AirflowSkipException, BaseOperator
from airflow.providers.ssh.hooks.ssh import SSHHook
from airflow.providers.ssh.triggers.ssh_remote_job import SSHRemoteJobTrigger
from airflow.providers.ssh.utils.remote_job import (
RemoteJobPaths,
build_posix_cleanup_command,
build_posix_kill_command,
build_posix_os_detection_command,
build_posix_wrapper_command,
build_windows_cleanup_command,
build_windows_kill_command,
build_windows_os_detection_command,
build_windows_wrapper_command,
generate_job_id,
)
if TYPE_CHECKING:
from airflow.providers.common.compat.sdk import Context
class SSHRemoteJobOperator(BaseOperator):
r"""
Execute a command on a remote host via SSH with deferrable monitoring.
This operator submits a job to run detached on the remote host, then
uses a trigger to asynchronously monitor the job status and stream logs.
This approach is resilient to network interruptions as the remote job
continues running independently of the SSH connection.
The remote job is wrapped to:
- Run detached from the SSH session (via nohup on POSIX, Start-Process on Windows)
- Redirect stdout/stderr to a log file
- Write the exit code to a file on completion
:param ssh_conn_id: SSH connection ID from Airflow Connections
:param command: Command to execute on the remote host (templated)
:param remote_host: Override the host from the connection (templated)
:param environment: Environment variables to set for the command (templated)
:param remote_base_dir: Base directory for job artifacts (templated).
Defaults to /tmp/airflow-ssh-jobs on POSIX, C:\\Windows\\Temp\\airflow-ssh-jobs on Windows
:param poll_interval: Seconds between status polls (default: 5)
:param log_chunk_size: Max bytes to read per poll (default: 65536)
:param timeout: Hard timeout in seconds for the entire operation
:param cleanup: When to clean up remote job directory:
'never', 'on_success', or 'always' (default: 'never')
:param remote_os: Remote operating system: 'auto', 'posix', or 'windows' (default: 'auto')
:param skip_on_exit_code: Exit codes that should skip the task instead of failing
:param conn_timeout: SSH connection timeout in seconds
:param banner_timeout: Timeout waiting for SSH banner in seconds
"""
template_fields: Sequence[str] = ("command", "environment", "remote_host", "remote_base_dir")
template_ext: Sequence[str] = (
".sh",
".bash",
".ps1",
)
template_fields_renderers = {
"command": "bash",
"environment": "python",
}
ui_color = "#e4f0e8"
def __init__(
self,
*,
ssh_conn_id: str,
command: str,
remote_host: str | None = None,
environment: dict[str, str] | None = None,
remote_base_dir: str | None = None,
poll_interval: int = 5,
log_chunk_size: int = 65536,
timeout: int | None = None,
cleanup: Literal["never", "on_success", "always"] = "never",
remote_os: Literal["auto", "posix", "windows"] = "auto",
skip_on_exit_code: int | Container[int] | None = None,
conn_timeout: int | None = None,
banner_timeout: float = 30.0,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.ssh_conn_id = ssh_conn_id
self.command = command
self.remote_host = remote_host
self.environment = environment
if remote_base_dir is not None:
self._validate_base_dir(remote_base_dir)
self.remote_base_dir = remote_base_dir
self.poll_interval = poll_interval
self.log_chunk_size = log_chunk_size
self.timeout = timeout
self.cleanup = cleanup
self.remote_os = remote_os
self.conn_timeout = conn_timeout
self.banner_timeout = banner_timeout
self.skip_on_exit_code = (
skip_on_exit_code
if isinstance(skip_on_exit_code, Container)
else [skip_on_exit_code]
if skip_on_exit_code is not None
else []
)
self._job_id: str | None = None
self._paths: RemoteJobPaths | None = None
self._detected_os: Literal["posix", "windows"] | None = None
@staticmethod
def _validate_base_dir(path: str) -> None:
"""
Validate the remote base directory path for security.
:param path: Path to validate
:raises ValueError: If path contains dangerous patterns
"""
if not path:
raise ValueError("remote_base_dir cannot be empty")
if ".." in path:
raise ValueError(f"remote_base_dir cannot contain '..' (path traversal not allowed). Got: {path}")
if "\x00" in path:
raise ValueError("remote_base_dir cannot contain null bytes")
dangerous_patterns = ["/etc", "/bin", "/sbin", "/boot", "C:\\Windows", "C:\\Program Files"]
for pattern in dangerous_patterns:
if pattern in path:
warnings.warn(
f"remote_base_dir '{path}' contains potentially sensitive path '{pattern}'. "
"Ensure you have appropriate permissions.",
UserWarning,
stacklevel=3,
)
@cached_property
def ssh_hook(self) -> SSHHook:
"""Create the SSH hook for command submission."""
return SSHHook(
ssh_conn_id=self.ssh_conn_id,
remote_host=self.remote_host or "",
conn_timeout=self.conn_timeout,
banner_timeout=self.banner_timeout,
)
def _detect_remote_os(self) -> Literal["posix", "windows"]:
"""
Detect the remote operating system.
Uses a two-stage detection:
1. Try POSIX detection via `uname` (works on Linux, macOS, BSD, Solaris, AIX, etc.)
2. Try Windows detection via PowerShell
3. Raise error if both fail
"""
if self.remote_os != "auto":
return self.remote_os
self.log.info("Auto-detecting remote operating system...")
with self.ssh_hook.get_conn() as ssh_client:
try:
exit_status, stdout, _ = self.ssh_hook.exec_ssh_client_command(
ssh_client,
build_posix_os_detection_command(),
get_pty=False,
environment=None,
timeout=10,
)
if exit_status == 0 and stdout:
output = stdout.decode("utf-8", errors="replace").strip().lower()
posix_systems = [
"linux",
"darwin",
"freebsd",
"openbsd",
"netbsd",
"sunos",
"aix",
"hp-ux",
]
if any(system in output for system in posix_systems):
self.log.info("Detected POSIX system: %s", output)
return "posix"
except Exception as e:
self.log.debug("POSIX detection failed: %s", e)
try:
exit_status, stdout, _ = self.ssh_hook.exec_ssh_client_command(
ssh_client,
build_windows_os_detection_command(),
get_pty=False,
environment=None,
timeout=10,
)
if exit_status == 0 and stdout:
output = stdout.decode("utf-8", errors="replace").strip()
if "WINDOWS" in output.upper():
self.log.info("Detected Windows system")
return "windows"
except Exception as e:
self.log.debug("Windows detection failed: %s", e)
raise AirflowException(
"Could not auto-detect remote OS. Please explicitly set remote_os='posix' or 'windows'"
)
def execute(self, context: Context) -> None:
"""
Submit the remote job and defer to the trigger for monitoring.
:param context: Airflow task context
"""
if not self.command:
raise AirflowException("SSH operator error: command not specified.")
self._detected_os = self._detect_remote_os()
self.log.info("Remote OS: %s", self._detected_os)
ti = context["ti"]
self._job_id = generate_job_id(
dag_id=ti.dag_id,
task_id=ti.task_id,
run_id=ti.run_id,
try_number=ti.try_number,
)
self.log.info("Generated job ID: %s", self._job_id)
self._paths = RemoteJobPaths(
job_id=self._job_id,
remote_os=self._detected_os,
base_dir=self.remote_base_dir,
)
if self._detected_os == "posix":
wrapper_cmd = build_posix_wrapper_command(
command=self.command,
paths=self._paths,
environment=self.environment,
)
else:
wrapper_cmd = build_windows_wrapper_command(
command=self.command,
paths=self._paths,
environment=self.environment,
)
self.log.info("Submitting remote job to %s", self.ssh_hook.remote_host)
with self.ssh_hook.get_conn() as ssh_client:
exit_status, stdout, stderr = self.ssh_hook.exec_ssh_client_command(
ssh_client,
wrapper_cmd,
get_pty=False,
environment=None,
timeout=60,
)
if exit_status != 0:
stderr_str = stderr.decode("utf-8", errors="replace") if stderr else ""
raise AirflowException(
f"Failed to submit remote job. Exit code: {exit_status}. Error: {stderr_str}"
)
returned_job_id = stdout.decode("utf-8", errors="replace").strip() if stdout else ""
if returned_job_id != self._job_id:
self.log.warning("Job ID mismatch. Expected: %s, Got: %s", self._job_id, returned_job_id)
self.log.info("Remote job submitted successfully. Job ID: %s", self._job_id)
self.log.info("Job directory: %s", self._paths.job_dir)
if self.do_xcom_push:
ti.xcom_push(
key="ssh_remote_job",
value={
"job_id": self._job_id,
"job_dir": self._paths.job_dir,
"log_file": self._paths.log_file,
"exit_code_file": self._paths.exit_code_file,
"pid_file": self._paths.pid_file,
"remote_os": self._detected_os,
},
)
self.defer(
trigger=SSHRemoteJobTrigger(
ssh_conn_id=self.ssh_conn_id,
remote_host=self.remote_host,
job_id=self._job_id,
job_dir=self._paths.job_dir,
log_file=self._paths.log_file,
exit_code_file=self._paths.exit_code_file,
remote_os=self._detected_os,
poll_interval=self.poll_interval,
log_chunk_size=self.log_chunk_size,
log_offset=0,
),
method_name="execute_complete",
timeout=timedelta(seconds=self.timeout) if self.timeout else None,
)
def execute_complete(self, context: Context, event: dict[str, Any]) -> None:
"""
Handle trigger events and re-defer if job is still running.
:param context: Airflow task context
:param event: Event data from the trigger
"""
if not event:
raise AirflowException("Received null event from trigger")
required_keys = ["job_id", "job_dir", "log_file", "exit_code_file", "remote_os", "done"]
missing_keys = [key for key in required_keys if key not in event]
if missing_keys:
raise AirflowException(
f"Invalid trigger event: missing required keys {missing_keys}. Event: {event}"
)
log_chunk = event.get("log_chunk", "")
if log_chunk:
for line in log_chunk.splitlines():
self.log.info("[remote] %s", line)
if not event.get("done", False):
self.log.debug("Job still running, continuing to monitor...")
self.defer(
trigger=SSHRemoteJobTrigger(
ssh_conn_id=self.ssh_conn_id,
remote_host=self.remote_host,
job_id=event["job_id"],
job_dir=event["job_dir"],
log_file=event["log_file"],
exit_code_file=event["exit_code_file"],
remote_os=event["remote_os"],
poll_interval=self.poll_interval,
log_chunk_size=self.log_chunk_size,
log_offset=event.get("log_offset", 0),
),
method_name="execute_complete",
timeout=timedelta(seconds=self.timeout) if self.timeout else None,
)
return
exit_code = event.get("exit_code")
job_dir = event.get("job_dir", "")
remote_os = event.get("remote_os", "posix")
self.log.info("Remote job completed with exit code: %s", exit_code)
should_cleanup = self.cleanup == "always" or (self.cleanup == "on_success" and exit_code == 0)
if should_cleanup and job_dir:
self._cleanup_remote_job(job_dir, remote_os)
if exit_code is None:
raise AirflowException(f"Remote job failed: {event.get('message', 'Unknown error')}")
if exit_code in self.skip_on_exit_code:
raise AirflowSkipException(f"Remote job returned skip exit code: {exit_code}")
if exit_code != 0:
raise AirflowException(f"Remote job failed with exit code: {exit_code}")
self.log.info("Remote job completed successfully")
def _cleanup_remote_job(self, job_dir: str, remote_os: str) -> None:
"""Clean up the remote job directory."""
self.log.info("Cleaning up remote job directory: %s", job_dir)
try:
if remote_os == "posix":
cleanup_cmd = build_posix_cleanup_command(job_dir)
else:
cleanup_cmd = build_windows_cleanup_command(job_dir)
with self.ssh_hook.get_conn() as ssh_client:
self.ssh_hook.exec_ssh_client_command(
ssh_client,
cleanup_cmd,
get_pty=False,
environment=None,
timeout=30,
)
self.log.info("Remote cleanup completed")
except Exception as e:
self.log.warning("Failed to clean up remote job directory: %s", e)
def on_kill(self) -> None:
"""
Attempt to kill the remote process when the task is killed.
Since the operator is recreated after deferral, instance variables may not
be set. We retrieve job information from XCom if needed.
"""
job_id = self._job_id
pid_file = self._paths.pid_file if self._paths else None
remote_os = self._detected_os
if not job_id or not pid_file or not remote_os:
try:
if hasattr(self, "task_instance") and self.task_instance:
job_info = self.task_instance.xcom_pull(key="ssh_remote_job")
if job_info:
job_id = job_info.get("job_id")
pid_file = job_info.get("pid_file")
remote_os = job_info.get("remote_os")
except Exception as e:
self.log.debug("Could not retrieve job info from XCom: %s", e)
if not job_id or not pid_file or not remote_os:
self.log.info("No active job information available for kill")
return
self.log.info("Attempting to kill remote job: %s", job_id)
try:
if remote_os == "posix":
kill_cmd = build_posix_kill_command(pid_file)
else:
kill_cmd = build_windows_kill_command(pid_file)
with self.ssh_hook.get_conn() as ssh_client:
self.ssh_hook.exec_ssh_client_command(
ssh_client,
kill_cmd,
get_pty=False,
environment=None,
timeout=30,
)
self.log.info("Kill command sent to remote process")
except Exception as e:
self.log.warning("Failed to kill remote process: %s", e)
| {
"repo_id": "apache/airflow",
"file_path": "providers/ssh/src/airflow/providers/ssh/operators/ssh_remote_job.py",
"license": "Apache License 2.0",
"lines": 398,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/ssh/src/airflow/providers/ssh/triggers/ssh_remote_job.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""SSH Remote Job Trigger for deferrable execution."""
from __future__ import annotations
import asyncio
from collections.abc import AsyncIterator
from typing import Any, Literal
import tenacity
from airflow.providers.ssh.hooks.ssh import SSHHookAsync
from airflow.providers.ssh.utils.remote_job import (
build_posix_completion_check_command,
build_posix_file_size_command,
build_posix_log_tail_command,
build_windows_completion_check_command,
build_windows_file_size_command,
build_windows_log_tail_command,
)
from airflow.triggers.base import BaseTrigger, TriggerEvent
class SSHRemoteJobTrigger(BaseTrigger):
"""
Trigger that monitors a remote SSH job and streams logs.
This trigger polls the remote host to check job completion status
and reads log output incrementally.
:param ssh_conn_id: SSH connection ID from Airflow Connections
:param remote_host: Optional override for the remote host
:param job_id: Unique identifier for the remote job
:param job_dir: Remote directory containing job artifacts
:param log_file: Path to the log file on the remote host
:param exit_code_file: Path to the exit code file on the remote host
:param remote_os: Operating system of the remote host ('posix' or 'windows')
:param poll_interval: Seconds between polling attempts
:param log_chunk_size: Maximum bytes to read per poll
:param log_offset: Current byte offset in the log file
"""
def __init__(
self,
ssh_conn_id: str,
remote_host: str | None,
job_id: str,
job_dir: str,
log_file: str,
exit_code_file: str,
remote_os: Literal["posix", "windows"],
poll_interval: int = 5,
log_chunk_size: int = 65536,
log_offset: int = 0,
command_timeout: float = 30.0,
) -> None:
super().__init__()
self.ssh_conn_id = ssh_conn_id
self.remote_host = remote_host
self.job_id = job_id
self.job_dir = job_dir
self.log_file = log_file
self.exit_code_file = exit_code_file
self.remote_os = remote_os
self.poll_interval = poll_interval
self.log_chunk_size = log_chunk_size
self.log_offset = log_offset
self.command_timeout = command_timeout
def serialize(self) -> tuple[str, dict[str, Any]]:
"""Serialize the trigger for storage."""
return (
"airflow.providers.ssh.triggers.ssh_remote_job.SSHRemoteJobTrigger",
{
"ssh_conn_id": self.ssh_conn_id,
"remote_host": self.remote_host,
"job_id": self.job_id,
"job_dir": self.job_dir,
"log_file": self.log_file,
"exit_code_file": self.exit_code_file,
"remote_os": self.remote_os,
"poll_interval": self.poll_interval,
"log_chunk_size": self.log_chunk_size,
"log_offset": self.log_offset,
"command_timeout": self.command_timeout,
},
)
def _get_hook(self) -> SSHHookAsync:
"""Create the async SSH hook."""
return SSHHookAsync(
ssh_conn_id=self.ssh_conn_id,
host=self.remote_host,
)
@tenacity.retry(
stop=tenacity.stop_after_attempt(3),
wait=tenacity.wait_exponential(multiplier=1, min=1, max=10),
retry=tenacity.retry_if_exception_type((OSError, TimeoutError, ConnectionError)),
reraise=True,
)
async def _check_completion(self, hook: SSHHookAsync) -> int | None:
"""
Check if the remote job has completed.
Retries transient network errors up to 3 times with exponential backoff.
:return: Exit code if completed, None if still running
"""
if self.remote_os == "posix":
cmd = build_posix_completion_check_command(self.exit_code_file)
else:
cmd = build_windows_completion_check_command(self.exit_code_file)
try:
_, stdout, _ = await hook.run_command(cmd, timeout=self.command_timeout)
stdout = stdout.strip()
if stdout and stdout.isdigit():
return int(stdout)
except (OSError, TimeoutError, ConnectionError) as e:
self.log.warning("Transient error checking completion (will retry): %s", e)
raise
except Exception as e:
self.log.warning("Error checking completion status: %s", e)
return None
@tenacity.retry(
stop=tenacity.stop_after_attempt(3),
wait=tenacity.wait_exponential(multiplier=1, min=1, max=10),
retry=tenacity.retry_if_exception_type((OSError, TimeoutError, ConnectionError)),
reraise=True,
)
async def _get_log_size(self, hook: SSHHookAsync) -> int:
"""
Get the current size of the log file in bytes.
Retries transient network errors up to 3 times with exponential backoff.
"""
if self.remote_os == "posix":
cmd = build_posix_file_size_command(self.log_file)
else:
cmd = build_windows_file_size_command(self.log_file)
try:
_, stdout, _ = await hook.run_command(cmd, timeout=self.command_timeout)
stdout = stdout.strip()
if stdout and stdout.isdigit():
return int(stdout)
except (OSError, TimeoutError, ConnectionError) as e:
self.log.warning("Transient error getting log size (will retry): %s", e)
raise
except Exception as e:
self.log.warning("Error getting log file size: %s", e)
return 0
@tenacity.retry(
stop=tenacity.stop_after_attempt(3),
wait=tenacity.wait_exponential(multiplier=1, min=1, max=10),
retry=tenacity.retry_if_exception_type((OSError, TimeoutError, ConnectionError)),
reraise=True,
)
async def _read_log_chunk(self, hook: SSHHookAsync) -> tuple[str, int]:
"""
Read a chunk of logs from the current offset.
Retries transient network errors up to 3 times with exponential backoff.
:return: Tuple of (log_chunk, new_offset)
"""
file_size = await self._get_log_size(hook)
if file_size <= self.log_offset:
return "", self.log_offset
bytes_available = file_size - self.log_offset
bytes_to_read = min(bytes_available, self.log_chunk_size)
if self.remote_os == "posix":
cmd = build_posix_log_tail_command(self.log_file, self.log_offset, bytes_to_read)
else:
cmd = build_windows_log_tail_command(self.log_file, self.log_offset, bytes_to_read)
try:
exit_code, stdout, _ = await hook.run_command(cmd, timeout=self.command_timeout)
# Advance offset by bytes requested, not decoded string length
new_offset = self.log_offset + bytes_to_read if stdout else self.log_offset
return stdout, new_offset
except (OSError, TimeoutError, ConnectionError) as e:
self.log.warning("Transient error reading logs (will retry): %s", e)
raise
except Exception as e:
self.log.warning("Error reading log chunk: %s", e)
return "", self.log_offset
async def run(self) -> AsyncIterator[TriggerEvent]:
"""
Poll the remote job status and yield events with log chunks.
This method runs in a loop, checking the job status and reading
logs at each poll interval. It yields a TriggerEvent each time
with the current status and any new log output.
"""
hook = self._get_hook()
while True:
try:
exit_code = await self._check_completion(hook)
log_chunk, new_offset = await self._read_log_chunk(hook)
base_event = {
"job_id": self.job_id,
"job_dir": self.job_dir,
"log_file": self.log_file,
"exit_code_file": self.exit_code_file,
"remote_os": self.remote_os,
}
if exit_code is not None:
yield TriggerEvent(
{
**base_event,
"status": "success" if exit_code == 0 else "failed",
"done": True,
"exit_code": exit_code,
"log_chunk": log_chunk,
"log_offset": new_offset,
"message": f"Job completed with exit code {exit_code}",
}
)
return
self.log_offset = new_offset
if log_chunk:
self.log.info("%s", log_chunk.rstrip())
await asyncio.sleep(self.poll_interval)
except Exception as e:
self.log.exception("Error in SSH remote job trigger")
yield TriggerEvent(
{
"job_id": self.job_id,
"job_dir": self.job_dir,
"log_file": self.log_file,
"exit_code_file": self.exit_code_file,
"remote_os": self.remote_os,
"status": "error",
"done": True,
"exit_code": None,
"log_chunk": "",
"log_offset": self.log_offset,
"message": f"Trigger error: {e}",
}
)
return
| {
"repo_id": "apache/airflow",
"file_path": "providers/ssh/src/airflow/providers/ssh/triggers/ssh_remote_job.py",
"license": "Apache License 2.0",
"lines": 238,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/ssh/tests/unit/ssh/hooks/test_ssh_async.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from unittest import mock
import pytest
from airflow.providers.ssh.hooks.ssh import SSHHookAsync
class TestSSHHookAsync:
def test_init_with_conn_id(self):
"""Test initialization with connection ID."""
hook = SSHHookAsync(ssh_conn_id="test_ssh_conn")
assert hook.ssh_conn_id == "test_ssh_conn"
assert hook.host is None
assert hook.port is None
def test_init_with_overrides(self):
"""Test initialization with parameter overrides."""
hook = SSHHookAsync(
ssh_conn_id="test_ssh_conn",
host="custom.host.com",
port=2222,
username="testuser",
password="testpass",
)
assert hook.host == "custom.host.com"
assert hook.port == 2222
assert hook.username == "testuser"
assert hook.password == "testpass"
def test_init_default_known_hosts(self):
"""Test default known_hosts path."""
hook = SSHHookAsync(ssh_conn_id="test_conn")
assert "known_hosts" in str(hook.known_hosts)
def test_parse_extras_key_file(self):
"""Test parsing key_file from connection extras."""
hook = SSHHookAsync(ssh_conn_id="test_conn")
mock_conn = mock.MagicMock()
mock_conn.extra_dejson = {"key_file": "/path/to/key"}
mock_conn.host = "test.host"
hook._parse_extras(mock_conn)
assert hook.key_file == "/path/to/key"
def test_parse_extras_no_host_key_check(self):
"""Test parsing no_host_key_check from connection extras."""
hook = SSHHookAsync(ssh_conn_id="test_conn")
mock_conn = mock.MagicMock()
mock_conn.extra_dejson = {"no_host_key_check": "true"}
mock_conn.host = "test.host"
hook._parse_extras(mock_conn)
assert hook.known_hosts == "none"
def test_parse_extras_host_key(self):
"""Test parsing host_key from connection extras."""
hook = SSHHookAsync(ssh_conn_id="test_conn")
mock_conn = mock.MagicMock()
mock_conn.extra_dejson = {"host_key": "ssh-rsa AAAAB3...", "no_host_key_check": "false"}
mock_conn.host = "test.host"
hook._parse_extras(mock_conn)
assert hook.known_hosts == b"test.host ssh-rsa AAAAB3..."
def test_parse_extras_host_key_with_no_check_raises(self):
"""Test that host_key with no_host_key_check raises error."""
hook = SSHHookAsync(ssh_conn_id="test_conn")
mock_conn = mock.MagicMock()
mock_conn.extra_dejson = {
"host_key": "ssh-rsa AAAAB3...",
"no_host_key_check": "true",
}
mock_conn.host = "test.host"
with pytest.raises(ValueError, match="Host key check was skipped"):
hook._parse_extras(mock_conn)
def test_parse_extras_private_key(self):
"""Test parsing private_key from connection extras."""
hook = SSHHookAsync(ssh_conn_id="test_conn")
mock_conn = mock.MagicMock()
test_key = "test-private-key-content"
mock_conn.extra_dejson = {"private_key": test_key}
mock_conn.host = "test.host"
hook._parse_extras(mock_conn)
assert hook.private_key == test_key
@pytest.mark.asyncio
async def test_get_conn_builds_config(self):
"""Test that _get_conn builds correct connection config."""
hook = SSHHookAsync(
ssh_conn_id="test_conn",
host="test.host.com",
port=22,
username="testuser",
)
mock_conn_obj = mock.MagicMock()
mock_conn_obj.extra_dejson = {"no_host_key_check": "true"}
mock_conn_obj.host = None
mock_conn_obj.port = None
mock_conn_obj.login = None
mock_conn_obj.password = None
mock_conn_obj.extra = "{}"
mock_ssh_client = mock.AsyncMock()
with mock.patch("asgiref.sync.sync_to_async") as mock_sync:
mock_sync.return_value = mock.AsyncMock(return_value=mock_conn_obj)
with mock.patch("asyncssh.connect", new_callable=mock.AsyncMock) as mock_connect:
mock_connect.return_value = mock_ssh_client
result = await hook._get_conn()
mock_connect.assert_called_once()
call_kwargs = mock_connect.call_args[1]
assert call_kwargs["host"] == "test.host.com"
assert call_kwargs["port"] == 22
assert call_kwargs["username"] == "testuser"
assert result == mock_ssh_client
@pytest.mark.asyncio
async def test_run_command(self):
"""Test running a command."""
hook = SSHHookAsync(ssh_conn_id="test_conn")
mock_result = mock.MagicMock()
mock_result.exit_status = 0
mock_result.stdout = "output"
mock_result.stderr = ""
mock_conn = mock.AsyncMock()
mock_conn.run = mock.AsyncMock(return_value=mock_result)
mock_conn.__aenter__ = mock.AsyncMock(return_value=mock_conn)
mock_conn.__aexit__ = mock.AsyncMock(return_value=None)
with mock.patch.object(hook, "_get_conn", return_value=mock_conn):
exit_code, stdout, stderr = await hook.run_command("echo test")
assert exit_code == 0
assert stdout == "output"
assert stderr == ""
mock_conn.run.assert_called_once_with("echo test", timeout=None, check=False)
@pytest.mark.asyncio
async def test_run_command_output(self):
"""Test running a command and getting output."""
hook = SSHHookAsync(ssh_conn_id="test_conn")
with mock.patch.object(hook, "run_command", return_value=(0, "test output", "")):
output = await hook.run_command_output("echo test")
assert output == "test output"
| {
"repo_id": "apache/airflow",
"file_path": "providers/ssh/tests/unit/ssh/hooks/test_ssh_async.py",
"license": "Apache License 2.0",
"lines": 142,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/ssh/tests/unit/ssh/operators/test_ssh_remote_job.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from unittest import mock
import pytest
from airflow.exceptions import TaskDeferred
from airflow.providers.common.compat.sdk import AirflowException, AirflowSkipException
from airflow.providers.ssh.hooks.ssh import SSHHook
from airflow.providers.ssh.operators.ssh_remote_job import SSHRemoteJobOperator
from airflow.providers.ssh.triggers.ssh_remote_job import SSHRemoteJobTrigger
class TestSSHRemoteJobOperator:
@pytest.fixture(autouse=True)
def mock_ssh_hook(self):
"""Mock the SSHHook to avoid connection lookup."""
with mock.patch.object(
SSHRemoteJobOperator, "ssh_hook", new_callable=mock.PropertyMock
) as mock_hook_prop:
mock_hook = mock.create_autospec(SSHHook, instance=True)
mock_hook.remote_host = "test.host.com"
mock_ssh_client = mock.MagicMock()
mock_hook.get_conn.return_value.__enter__.return_value = mock_ssh_client
mock_hook.get_conn.return_value.__exit__.return_value = None
mock_hook.exec_ssh_client_command.return_value = (0, b"", b"")
mock_hook_prop.return_value = mock_hook
self.mock_hook = mock_hook
self.mock_hook_prop = mock_hook_prop
yield
def test_init_default_values(self):
"""Test operator initialization with default values."""
op = SSHRemoteJobOperator(
task_id="test_task",
ssh_conn_id="test_conn",
command="/path/to/script.sh",
)
assert op.ssh_conn_id == "test_conn"
assert op.command == "/path/to/script.sh"
assert op.poll_interval == 5
assert op.log_chunk_size == 65536
assert op.cleanup == "never"
assert op.remote_os == "auto"
def test_init_custom_values(self):
"""Test operator initialization with custom values."""
op = SSHRemoteJobOperator(
task_id="test_task",
ssh_conn_id="test_conn",
command="/path/to/script.sh",
remote_host="custom.host.com",
poll_interval=10,
log_chunk_size=32768,
timeout=3600,
cleanup="on_success",
remote_os="posix",
skip_on_exit_code=[42, 43],
)
assert op.remote_host == "custom.host.com"
assert op.poll_interval == 10
assert op.log_chunk_size == 32768
assert op.timeout == 3600
assert op.cleanup == "on_success"
assert op.remote_os == "posix"
assert 42 in op.skip_on_exit_code
assert 43 in op.skip_on_exit_code
def test_template_fields(self):
"""Test that template fields are defined correctly."""
op = SSHRemoteJobOperator(
task_id="test_task",
ssh_conn_id="test_conn",
command="/path/to/script.sh",
)
assert "command" in op.template_fields
assert "environment" in op.template_fields
assert "remote_host" in op.template_fields
assert "remote_base_dir" in op.template_fields
def test_execute_defers_to_trigger(self):
"""Test that execute submits job and defers to trigger."""
self.mock_hook.exec_ssh_client_command.return_value = (
0,
b"af_test_dag_test_task_run1_try1_abc123",
b"",
)
op = SSHRemoteJobOperator(
task_id="test_task",
ssh_conn_id="test_conn",
command="/path/to/script.sh",
remote_os="posix",
)
mock_ti = mock.MagicMock()
mock_ti.dag_id = "test_dag"
mock_ti.task_id = "test_task"
mock_ti.run_id = "run1"
mock_ti.try_number = 1
context = {"ti": mock_ti}
with pytest.raises(TaskDeferred) as exc_info:
op.execute(context)
assert isinstance(exc_info.value.trigger, SSHRemoteJobTrigger)
assert exc_info.value.method_name == "execute_complete"
def test_execute_raises_if_no_command(self):
"""Test that execute raises if command is not specified."""
op = SSHRemoteJobOperator(
task_id="test_task",
ssh_conn_id="test_conn",
command="",
)
# Set command to empty after init
op.command = ""
with pytest.raises(AirflowException, match="command not specified"):
op.execute({})
@mock.patch.object(SSHRemoteJobOperator, "defer")
def test_execute_complete_re_defers_if_not_done(self, mock_defer):
"""Test that execute_complete re-defers if job is not done."""
op = SSHRemoteJobOperator(
task_id="test_task",
ssh_conn_id="test_conn",
command="/path/to/script.sh",
)
event = {
"done": False,
"status": "running",
"job_id": "test_job_123",
"job_dir": "/tmp/airflow-ssh-jobs/test_job_123",
"log_file": "/tmp/airflow-ssh-jobs/test_job_123/stdout.log",
"exit_code_file": "/tmp/airflow-ssh-jobs/test_job_123/exit_code",
"remote_os": "posix",
"log_chunk": "Some output\n",
"log_offset": 100,
"exit_code": None,
}
op.execute_complete({}, event)
mock_defer.assert_called_once()
call_kwargs = mock_defer.call_args[1]
assert isinstance(call_kwargs["trigger"], SSHRemoteJobTrigger)
assert call_kwargs["trigger"].log_offset == 100
def test_execute_complete_success(self):
"""Test execute_complete with successful completion."""
op = SSHRemoteJobOperator(
task_id="test_task",
ssh_conn_id="test_conn",
command="/path/to/script.sh",
)
event = {
"done": True,
"status": "success",
"exit_code": 0,
"job_id": "test_job_123",
"job_dir": "/tmp/airflow-ssh-jobs/test_job_123",
"log_file": "/tmp/airflow-ssh-jobs/test_job_123/stdout.log",
"exit_code_file": "/tmp/airflow-ssh-jobs/test_job_123/exit_code",
"log_chunk": "Final output\n",
"log_offset": 200,
"remote_os": "posix",
}
# Should complete without exception
op.execute_complete({}, event)
def test_execute_complete_failure(self):
"""Test execute_complete with non-zero exit code."""
op = SSHRemoteJobOperator(
task_id="test_task",
ssh_conn_id="test_conn",
command="/path/to/script.sh",
)
event = {
"done": True,
"status": "failed",
"exit_code": 1,
"job_id": "test_job_123",
"job_dir": "/tmp/airflow-ssh-jobs/test_job_123",
"log_file": "/tmp/airflow-ssh-jobs/test_job_123/stdout.log",
"exit_code_file": "/tmp/airflow-ssh-jobs/test_job_123/exit_code",
"log_chunk": "Error output\n",
"log_offset": 200,
"remote_os": "posix",
}
with pytest.raises(AirflowException, match="exit code: 1"):
op.execute_complete({}, event)
def test_execute_complete_skip_on_exit_code(self):
"""Test execute_complete skips on configured exit code."""
op = SSHRemoteJobOperator(
task_id="test_task",
ssh_conn_id="test_conn",
command="/path/to/script.sh",
skip_on_exit_code=42,
)
event = {
"done": True,
"status": "failed",
"exit_code": 42,
"job_id": "test_job_123",
"job_dir": "/tmp/airflow-ssh-jobs/test_job_123",
"log_file": "/tmp/airflow-ssh-jobs/test_job_123/stdout.log",
"exit_code_file": "/tmp/airflow-ssh-jobs/test_job_123/exit_code",
"log_chunk": "",
"log_offset": 0,
"remote_os": "posix",
}
with pytest.raises(AirflowSkipException):
op.execute_complete({}, event)
def test_execute_complete_with_cleanup(self):
"""Test execute_complete performs cleanup when configured."""
op = SSHRemoteJobOperator(
task_id="test_task",
ssh_conn_id="test_conn",
command="/path/to/script.sh",
cleanup="on_success",
)
event = {
"done": True,
"status": "success",
"exit_code": 0,
"job_id": "test_job_123",
"job_dir": "/tmp/airflow-ssh-jobs/test_job_123",
"log_file": "/tmp/airflow-ssh-jobs/test_job_123/stdout.log",
"exit_code_file": "/tmp/airflow-ssh-jobs/test_job_123/exit_code",
"log_chunk": "",
"log_offset": 0,
"remote_os": "posix",
}
op.execute_complete({}, event)
# Verify cleanup command was executed
self.mock_hook.exec_ssh_client_command.assert_called_once()
call_args = self.mock_hook.exec_ssh_client_command.call_args
assert "rm -rf" in call_args[0][1]
def test_on_kill(self):
"""Test on_kill attempts to kill remote process."""
op = SSHRemoteJobOperator(
task_id="test_task",
ssh_conn_id="test_conn",
command="/path/to/script.sh",
)
# Simulate that execute was called
op._job_id = "test_job_123"
op._detected_os = "posix"
from airflow.providers.ssh.utils.remote_job import RemoteJobPaths
op._paths = RemoteJobPaths(job_id="test_job_123", remote_os="posix")
op.on_kill()
# Verify kill command was executed
self.mock_hook.exec_ssh_client_command.assert_called_once()
call_args = self.mock_hook.exec_ssh_client_command.call_args
assert "kill" in call_args[0][1]
def test_on_kill_after_rehydration(self):
"""Test on_kill retrieves job info from XCom after operator rehydration."""
op = SSHRemoteJobOperator(
task_id="test_task",
ssh_conn_id="test_conn",
command="/path/to/script.sh",
)
# Instance variables are None (simulating rehydration)
# Don't set _job_id, _paths, _detected_os
# Mock task_instance with XCom data
mock_ti = mock.MagicMock()
mock_ti.xcom_pull.return_value = {
"job_id": "test_job_123",
"pid_file": "/tmp/airflow-ssh-jobs/test_job_123/pid",
"remote_os": "posix",
}
op.task_instance = mock_ti
op.on_kill()
# Verify XCom was called to get job info
mock_ti.xcom_pull.assert_called_once_with(key="ssh_remote_job")
# Verify kill command was executed
self.mock_hook.exec_ssh_client_command.assert_called_once()
call_args = self.mock_hook.exec_ssh_client_command.call_args
assert "kill" in call_args[0][1]
def test_on_kill_no_active_job(self):
"""Test on_kill does nothing if no active job."""
op = SSHRemoteJobOperator(
task_id="test_task",
ssh_conn_id="test_conn",
command="/path/to/script.sh",
)
# Should not raise even without active job
op.on_kill()
| {
"repo_id": "apache/airflow",
"file_path": "providers/ssh/tests/unit/ssh/operators/test_ssh_remote_job.py",
"license": "Apache License 2.0",
"lines": 286,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/ssh/tests/unit/ssh/triggers/test_ssh_remote_job.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from unittest import mock
import pytest
from airflow.providers.ssh.triggers.ssh_remote_job import SSHRemoteJobTrigger
class TestSSHRemoteJobTrigger:
def test_serialization(self):
"""Test that the trigger can be serialized correctly."""
trigger = SSHRemoteJobTrigger(
ssh_conn_id="test_conn",
remote_host="test.example.com",
job_id="test_job_123",
job_dir="/tmp/airflow-ssh-jobs/test_job_123",
log_file="/tmp/airflow-ssh-jobs/test_job_123/stdout.log",
exit_code_file="/tmp/airflow-ssh-jobs/test_job_123/exit_code",
remote_os="posix",
poll_interval=10,
log_chunk_size=32768,
log_offset=1000,
)
classpath, kwargs = trigger.serialize()
assert classpath == "airflow.providers.ssh.triggers.ssh_remote_job.SSHRemoteJobTrigger"
assert kwargs["ssh_conn_id"] == "test_conn"
assert kwargs["remote_host"] == "test.example.com"
assert kwargs["job_id"] == "test_job_123"
assert kwargs["job_dir"] == "/tmp/airflow-ssh-jobs/test_job_123"
assert kwargs["log_file"] == "/tmp/airflow-ssh-jobs/test_job_123/stdout.log"
assert kwargs["exit_code_file"] == "/tmp/airflow-ssh-jobs/test_job_123/exit_code"
assert kwargs["remote_os"] == "posix"
assert kwargs["poll_interval"] == 10
assert kwargs["log_chunk_size"] == 32768
assert kwargs["log_offset"] == 1000
def test_default_values(self):
"""Test default parameter values."""
trigger = SSHRemoteJobTrigger(
ssh_conn_id="test_conn",
remote_host=None,
job_id="test_job",
job_dir="/tmp/job",
log_file="/tmp/job/stdout.log",
exit_code_file="/tmp/job/exit_code",
remote_os="posix",
)
assert trigger.poll_interval == 5
assert trigger.log_chunk_size == 65536
assert trigger.log_offset == 0
@pytest.mark.asyncio
async def test_run_job_completed_success(self):
"""Test trigger when job completes successfully."""
trigger = SSHRemoteJobTrigger(
ssh_conn_id="test_conn",
remote_host=None,
job_id="test_job",
job_dir="/tmp/job",
log_file="/tmp/job/stdout.log",
exit_code_file="/tmp/job/exit_code",
remote_os="posix",
)
with mock.patch.object(trigger, "_check_completion", return_value=0):
with mock.patch.object(trigger, "_read_log_chunk", return_value=("Final output\n", 100)):
events = []
async for event in trigger.run():
events.append(event)
assert len(events) == 1
assert events[0].payload["status"] == "success"
assert events[0].payload["done"] is True
assert events[0].payload["exit_code"] == 0
assert events[0].payload["log_chunk"] == "Final output\n"
@pytest.mark.asyncio
async def test_run_job_completed_failure(self):
"""Test trigger when job completes with failure."""
trigger = SSHRemoteJobTrigger(
ssh_conn_id="test_conn",
remote_host=None,
job_id="test_job",
job_dir="/tmp/job",
log_file="/tmp/job/stdout.log",
exit_code_file="/tmp/job/exit_code",
remote_os="posix",
)
with mock.patch.object(trigger, "_check_completion", return_value=1):
with mock.patch.object(trigger, "_read_log_chunk", return_value=("Error output\n", 50)):
events = []
async for event in trigger.run():
events.append(event)
assert len(events) == 1
assert events[0].payload["status"] == "failed"
assert events[0].payload["done"] is True
assert events[0].payload["exit_code"] == 1
@pytest.mark.asyncio
async def test_run_job_polls_until_completion(self):
"""Test trigger polls without yielding until job completes."""
trigger = SSHRemoteJobTrigger(
ssh_conn_id="test_conn",
remote_host=None,
job_id="test_job",
job_dir="/tmp/job",
log_file="/tmp/job/stdout.log",
exit_code_file="/tmp/job/exit_code",
remote_os="posix",
poll_interval=0.01,
)
poll_count = 0
async def mock_check_completion(_):
nonlocal poll_count
poll_count += 1
# Return None (still running) for first 2 polls, then exit code 0
if poll_count < 3:
return None
return 0
with mock.patch.object(trigger, "_check_completion", side_effect=mock_check_completion):
with mock.patch.object(trigger, "_read_log_chunk", return_value=("output\n", 50)):
events = []
async for event in trigger.run():
events.append(event)
# Only one event should be yielded (the completion event)
assert len(events) == 1
assert events[0].payload["status"] == "success"
assert events[0].payload["done"] is True
assert events[0].payload["exit_code"] == 0
# Should have polled 3 times
assert poll_count == 3
@pytest.mark.asyncio
async def test_run_handles_exception(self):
"""Test trigger handles exceptions gracefully."""
trigger = SSHRemoteJobTrigger(
ssh_conn_id="test_conn",
remote_host=None,
job_id="test_job",
job_dir="/tmp/job",
log_file="/tmp/job/stdout.log",
exit_code_file="/tmp/job/exit_code",
remote_os="posix",
)
with mock.patch.object(trigger, "_check_completion", side_effect=Exception("Connection failed")):
events = []
async for event in trigger.run():
events.append(event)
assert len(events) == 1
assert events[0].payload["status"] == "error"
assert events[0].payload["done"] is True
assert "Connection failed" in events[0].payload["message"]
def test_get_hook(self):
"""Test hook creation."""
trigger = SSHRemoteJobTrigger(
ssh_conn_id="test_conn",
remote_host="custom.host.com",
job_id="test_job",
job_dir="/tmp/job",
log_file="/tmp/job/stdout.log",
exit_code_file="/tmp/job/exit_code",
remote_os="posix",
)
hook = trigger._get_hook()
assert hook.ssh_conn_id == "test_conn"
assert hook.host == "custom.host.com"
| {
"repo_id": "apache/airflow",
"file_path": "providers/ssh/tests/unit/ssh/triggers/test_ssh_remote_job.py",
"license": "Apache License 2.0",
"lines": 172,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/ssh/tests/unit/ssh/utils/test_remote_job.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import base64
import pytest
from airflow.providers.ssh.utils.remote_job import (
RemoteJobPaths,
build_posix_cleanup_command,
build_posix_completion_check_command,
build_posix_file_size_command,
build_posix_kill_command,
build_posix_log_tail_command,
build_posix_wrapper_command,
build_windows_cleanup_command,
build_windows_completion_check_command,
build_windows_file_size_command,
build_windows_kill_command,
build_windows_log_tail_command,
build_windows_wrapper_command,
generate_job_id,
)
class TestGenerateJobId:
def test_generates_unique_ids(self):
"""Test that job IDs are unique."""
id1 = generate_job_id("dag1", "task1", "run1", 1)
id2 = generate_job_id("dag1", "task1", "run1", 1)
assert id1 != id2
def test_includes_context_info(self):
"""Test that job ID includes context information."""
job_id = generate_job_id("my_dag", "my_task", "manual__2024", 2)
assert "af_" in job_id
assert "my_dag" in job_id
assert "my_task" in job_id
assert "try2" in job_id
def test_sanitizes_special_characters(self):
"""Test that special characters are sanitized."""
job_id = generate_job_id("dag-with-dashes", "task.with.dots", "run:with:colons", 1)
assert "-" not in job_id.split("_try")[0]
assert "." not in job_id.split("_try")[0]
assert ":" not in job_id.split("_try")[0]
def test_suffix_length(self):
"""Test that suffix length is configurable."""
job_id = generate_job_id("dag", "task", "run", 1, suffix_length=12)
parts = job_id.split("_")
assert len(parts[-1]) == 12
class TestRemoteJobPaths:
def test_posix_default_paths(self):
"""Test POSIX default paths."""
paths = RemoteJobPaths(job_id="test_job", remote_os="posix")
assert paths.base_dir == "/tmp/airflow-ssh-jobs"
assert paths.job_dir == "/tmp/airflow-ssh-jobs/test_job"
assert paths.log_file == "/tmp/airflow-ssh-jobs/test_job/stdout.log"
assert paths.exit_code_file == "/tmp/airflow-ssh-jobs/test_job/exit_code"
assert paths.pid_file == "/tmp/airflow-ssh-jobs/test_job/pid"
assert paths.sep == "/"
def test_windows_default_paths(self):
"""Test Windows default paths use $env:TEMP for portability."""
paths = RemoteJobPaths(job_id="test_job", remote_os="windows")
assert paths.base_dir == "$env:TEMP\\airflow-ssh-jobs"
assert paths.job_dir == "$env:TEMP\\airflow-ssh-jobs\\test_job"
assert paths.log_file == "$env:TEMP\\airflow-ssh-jobs\\test_job\\stdout.log"
assert paths.exit_code_file == "$env:TEMP\\airflow-ssh-jobs\\test_job\\exit_code"
assert paths.pid_file == "$env:TEMP\\airflow-ssh-jobs\\test_job\\pid"
assert paths.sep == "\\"
def test_custom_base_dir(self):
"""Test custom base directory."""
paths = RemoteJobPaths(job_id="test_job", remote_os="posix", base_dir="/custom/path")
assert paths.base_dir == "/custom/path"
assert paths.job_dir == "/custom/path/test_job"
class TestBuildPosixWrapperCommand:
def test_basic_command(self):
"""Test basic wrapper command generation."""
paths = RemoteJobPaths(job_id="test_job", remote_os="posix")
wrapper = build_posix_wrapper_command("/path/to/script.sh", paths)
assert "mkdir -p" in wrapper
assert "nohup bash -c" in wrapper
assert "/path/to/script.sh" in wrapper
assert "exit_code" in wrapper
assert "pid" in wrapper
def test_with_environment(self):
"""Test wrapper with environment variables."""
paths = RemoteJobPaths(job_id="test_job", remote_os="posix")
wrapper = build_posix_wrapper_command(
"/path/to/script.sh",
paths,
environment={"MY_VAR": "my_value", "OTHER": "test"},
)
assert "export MY_VAR='my_value'" in wrapper
assert "export OTHER='test'" in wrapper
def test_escapes_quotes(self):
"""Test that single quotes in command are escaped."""
paths = RemoteJobPaths(job_id="test_job", remote_os="posix")
wrapper = build_posix_wrapper_command("echo 'hello world'", paths)
assert wrapper is not None
class TestBuildWindowsWrapperCommand:
def test_basic_command(self):
"""Test basic Windows wrapper command generation."""
paths = RemoteJobPaths(job_id="test_job", remote_os="windows")
wrapper = build_windows_wrapper_command("C:\\scripts\\test.ps1", paths)
assert "powershell.exe" in wrapper
assert "-EncodedCommand" in wrapper
# Decode and verify script content
encoded_script = wrapper.split("-EncodedCommand ")[1]
decoded_script = base64.b64decode(encoded_script).decode("utf-16-le")
assert "New-Item -ItemType Directory" in decoded_script
assert "Start-Process" in decoded_script
def test_with_environment(self):
"""Test Windows wrapper with environment variables."""
paths = RemoteJobPaths(job_id="test_job", remote_os="windows")
wrapper = build_windows_wrapper_command(
"C:\\scripts\\test.ps1",
paths,
environment={"MY_VAR": "my_value"},
)
assert wrapper is not None
assert "-EncodedCommand" in wrapper
class TestLogTailCommands:
def test_posix_log_tail(self):
"""Test POSIX log tail command uses efficient tail+head pipeline."""
cmd = build_posix_log_tail_command("/tmp/log.txt", 100, 1024)
assert "tail -c +101" in cmd # offset 100 -> byte 101 (1-indexed)
assert "head -c 1024" in cmd
assert "/tmp/log.txt" in cmd
def test_windows_log_tail(self):
"""Test Windows log tail command."""
cmd = build_windows_log_tail_command("C:\\temp\\log.txt", 100, 1024)
assert "powershell.exe" in cmd
assert "-EncodedCommand" in cmd
# Decode and verify the script content
encoded_script = cmd.split("-EncodedCommand ")[1]
decoded_script = base64.b64decode(encoded_script).decode("utf-16-le")
assert "Seek(100" in decoded_script
assert "1024" in decoded_script
class TestFileSizeCommands:
def test_posix_file_size(self):
"""Test POSIX file size command."""
cmd = build_posix_file_size_command("/tmp/file.txt")
assert "stat" in cmd
assert "/tmp/file.txt" in cmd
def test_windows_file_size(self):
"""Test Windows file size command."""
cmd = build_windows_file_size_command("C:\\temp\\file.txt")
assert "powershell.exe" in cmd
assert "-EncodedCommand" in cmd
encoded_script = cmd.split("-EncodedCommand ")[1]
decoded_script = base64.b64decode(encoded_script).decode("utf-16-le")
assert "Get-Item" in decoded_script
assert "Length" in decoded_script
class TestCompletionCheckCommands:
def test_posix_completion_check(self):
"""Test POSIX completion check command."""
cmd = build_posix_completion_check_command("/tmp/exit_code")
assert "test -s" in cmd
assert "cat" in cmd
def test_windows_completion_check(self):
"""Test Windows completion check command."""
cmd = build_windows_completion_check_command("C:\\temp\\exit_code")
assert "powershell.exe" in cmd
assert "-EncodedCommand" in cmd
encoded_script = cmd.split("-EncodedCommand ")[1]
decoded_script = base64.b64decode(encoded_script).decode("utf-16-le")
assert "Test-Path" in decoded_script
assert "Get-Content" in decoded_script
class TestKillCommands:
def test_posix_kill(self):
"""Test POSIX kill command."""
cmd = build_posix_kill_command("/tmp/pid")
assert "kill" in cmd
assert "cat" in cmd
def test_windows_kill(self):
"""Test Windows kill command."""
cmd = build_windows_kill_command("C:\\temp\\pid")
assert "powershell.exe" in cmd
assert "-EncodedCommand" in cmd
encoded_script = cmd.split("-EncodedCommand ")[1]
decoded_script = base64.b64decode(encoded_script).decode("utf-16-le")
assert "Stop-Process" in decoded_script
class TestCleanupCommands:
def test_posix_cleanup(self):
"""Test POSIX cleanup command."""
cmd = build_posix_cleanup_command("/tmp/airflow-ssh-jobs/job_123")
assert "rm -rf" in cmd
assert "/tmp/airflow-ssh-jobs/job_123" in cmd
def test_windows_cleanup(self):
"""Test Windows cleanup command."""
cmd = build_windows_cleanup_command("$env:TEMP\\airflow-ssh-jobs\\job_123")
assert "powershell.exe" in cmd
assert "-EncodedCommand" in cmd
encoded_script = cmd.split("-EncodedCommand ")[1]
decoded_script = base64.b64decode(encoded_script).decode("utf-16-le")
assert "Remove-Item" in decoded_script
assert "-Recurse" in decoded_script
def test_posix_cleanup_rejects_invalid_path(self):
"""Test POSIX cleanup rejects paths outside expected base directory."""
with pytest.raises(ValueError, match="Invalid job directory"):
build_posix_cleanup_command("/tmp/other_dir")
def test_windows_cleanup_rejects_invalid_path(self):
"""Test Windows cleanup rejects paths outside expected base directory."""
with pytest.raises(ValueError, match="Invalid job directory"):
build_windows_cleanup_command("C:\\temp\\other_dir")
| {
"repo_id": "apache/airflow",
"file_path": "providers/ssh/tests/unit/ssh/utils/test_remote_job.py",
"license": "Apache License 2.0",
"lines": 215,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:scripts/ci/prek/check_secrets_search_path_sync.py | #!/usr/bin/env python
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import ast
import sys
from pathlib import Path
from types import EllipsisType
AIRFLOW_ROOT = Path(__file__).parents[3].resolve()
CORE_SECRETS_FILE = AIRFLOW_ROOT / "airflow-core" / "src" / "airflow" / "secrets" / "base_secrets.py"
SDK_SECRETS_FILE = (
AIRFLOW_ROOT / "task-sdk" / "src" / "airflow" / "sdk" / "execution_time" / "secrets" / "__init__.py"
)
def extract_from_file(
file_path: Path, constant_name: str
) -> list[str | bytes | int | float | complex | EllipsisType | None] | None:
"""Extract a list constant value from a Python file using AST parsing."""
try:
with open(file_path) as f:
tree = ast.parse(f.read(), filename=str(file_path))
for node in ast.walk(tree):
if isinstance(node, ast.Assign):
for target in node.targets:
if isinstance(target, ast.Name) and target.id == constant_name:
if isinstance(node.value, ast.List):
values = []
for elt in node.value.elts:
if isinstance(elt, ast.Constant):
values.append(elt.value)
return values
return None
except Exception as e:
print(f"Error parsing {file_path}: {e}", file=sys.stderr)
return None
def main() -> None:
# Extract DEFAULT_SECRETS_SEARCH_PATH from airflow-core
core_path = extract_from_file(CORE_SECRETS_FILE, "DEFAULT_SECRETS_SEARCH_PATH")
if core_path is None:
print(
f"ERROR: Could not extract DEFAULT_SECRETS_SEARCH_PATH from {CORE_SECRETS_FILE}",
file=sys.stderr,
)
sys.exit(1)
# Extract _SERVER_DEFAULT_SECRETS_SEARCH_PATH from task-sdk
sdk_path = extract_from_file(SDK_SECRETS_FILE, "_SERVER_DEFAULT_SECRETS_SEARCH_PATH")
if sdk_path is None:
print(
f"ERROR: Could not extract _SERVER_DEFAULT_SECRETS_SEARCH_PATH from {SDK_SECRETS_FILE}",
file=sys.stderr,
)
sys.exit(1)
if core_path == sdk_path:
sys.exit(0)
else:
print("\nERROR: Secrets search paths are not synchronized!", file=sys.stderr)
print(
"\nThe DEFAULT_SECRETS_SEARCH_PATH in airflow-core and "
"_SERVER_DEFAULT_SECRETS_SEARCH_PATH in task-sdk must match.",
file=sys.stderr,
)
print("\nPlease update either:", file=sys.stderr)
print(f" - {CORE_SECRETS_FILE}", file=sys.stderr)
print(f" - {SDK_SECRETS_FILE}", file=sys.stderr)
sys.exit(1)
if __name__ == "__main__":
main()
| {
"repo_id": "apache/airflow",
"file_path": "scripts/ci/prek/check_secrets_search_path_sync.py",
"license": "Apache License 2.0",
"lines": 81,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:shared/listeners/src/airflow_shared/listeners/listener.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING
import pluggy
import structlog
if TYPE_CHECKING:
from pluggy._hooks import _HookRelay
log = structlog.get_logger(__name__)
def _before_hookcall(hook_name, hook_impls, kwargs):
log.debug("Calling %r with %r", hook_name, kwargs)
log.debug("Hook impls: %s", hook_impls)
def _after_hookcall(outcome, hook_name, hook_impls, kwargs):
log.debug("Result from %r: %s", hook_name, outcome.get_result())
class ListenerManager:
"""
Manage listener registration and provides hook property for calling them.
This class provides base infra for listener system. The consumers / components
wanting to register listeners should initialise its own ListenerManager and
register the hook specs relevant to that component using add_hookspecs.
"""
def __init__(self):
self.pm = pluggy.PluginManager("airflow")
self.pm.add_hookcall_monitoring(_before_hookcall, _after_hookcall)
def add_hookspecs(self, spec_module) -> None:
"""
Register hook specs from a module.
:param spec_module: A module containing functions decorated with @hookspec.
"""
self.pm.add_hookspecs(spec_module)
@property
def has_listeners(self) -> bool:
return bool(self.pm.get_plugins())
@property
def hook(self) -> _HookRelay:
"""Return hook, on which plugin methods specified in spec can be called."""
return self.pm.hook
def add_listener(self, listener):
if self.pm.is_registered(listener):
return
self.pm.register(listener)
def clear(self):
"""Remove registered plugins."""
for plugin in self.pm.get_plugins():
self.pm.unregister(plugin)
| {
"repo_id": "apache/airflow",
"file_path": "shared/listeners/src/airflow_shared/listeners/listener.py",
"license": "Apache License 2.0",
"lines": 60,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:shared/listeners/tests/listeners/test_listener_manager.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from airflow_shared.listeners import hookimpl
from airflow_shared.listeners.listener import ListenerManager
from airflow_shared.listeners.spec import lifecycle, taskinstance
class TestListenerManager:
def test_initial_state_has_no_listeners(self):
"""Test that a new ListenerManager has no listeners."""
lm = ListenerManager()
assert not lm.has_listeners
assert len(lm.pm.get_plugins()) == 0
def test_add_hookspecs_registers_hooks(self):
"""Test that add_hookspecs makes hooks available."""
lm = ListenerManager()
lm.add_hookspecs(lifecycle)
# Verify lifecycle hooks are now available
assert hasattr(lm.hook, "on_starting")
assert hasattr(lm.hook, "before_stopping")
def test_add_multiple_hookspecs(self):
"""Test that multiple hookspecs can be registered."""
lm = ListenerManager()
lm.add_hookspecs(lifecycle)
lm.add_hookspecs(taskinstance)
# Verify hooks from both specs are available
assert hasattr(lm.hook, "on_starting")
assert hasattr(lm.hook, "on_task_instance_running")
def test_add_listener(self):
"""Test listener registration."""
class TestListener:
def __init__(self):
self.called = False
@hookimpl
def on_starting(self, component):
self.called = True
lm = ListenerManager()
lm.add_hookspecs(lifecycle)
listener = TestListener()
lm.add_listener(listener)
assert lm.has_listeners
assert lm.pm.is_registered(listener)
def test_duplicate_listener_registration(self):
"""Test adding same listener twice doesn't duplicate."""
class TestListener:
@hookimpl
def on_starting(self, component):
pass
lm = ListenerManager()
lm.add_hookspecs(lifecycle)
listener = TestListener()
lm.add_listener(listener)
lm.add_listener(listener)
# Should only be registered once
assert len(lm.pm.get_plugins()) == 1
def test_clear_listeners(self):
"""Test clearing listeners removes all registered listeners."""
class TestListener:
@hookimpl
def on_starting(self, component):
pass
lm = ListenerManager()
lm.add_hookspecs(lifecycle)
listener1 = TestListener()
listener2 = TestListener()
lm.add_listener(listener1)
lm.add_listener(listener2)
assert lm.has_listeners
assert len(lm.pm.get_plugins()) == 2
lm.clear()
assert not lm.has_listeners
assert len(lm.pm.get_plugins()) == 0
def test_hook_calling(self):
"""Test hooks can be called and listeners receive them."""
class TestListener:
def __init__(self):
self.component_received = None
@hookimpl
def on_starting(self, component):
self.component_received = component
lm = ListenerManager()
lm.add_hookspecs(lifecycle)
listener = TestListener()
lm.add_listener(listener)
test_component = "test_component"
lm.hook.on_starting(component=test_component)
assert listener.component_received == test_component
def test_taskinstance_hooks(self):
"""Test taskinstance hook specs work correctly."""
class TaskInstanceListener:
def __init__(self):
self.events = []
@hookimpl
def on_task_instance_running(self, previous_state, task_instance):
self.events.append(("running", task_instance))
@hookimpl
def on_task_instance_success(self, previous_state, task_instance):
self.events.append(("success", task_instance))
@hookimpl
def on_task_instance_failed(self, previous_state, task_instance, error):
self.events.append(("failed", task_instance, error))
lm = ListenerManager()
lm.add_hookspecs(taskinstance)
listener = TaskInstanceListener()
lm.add_listener(listener)
mock_ti = "mock_task_instance"
lm.hook.on_task_instance_running(previous_state=None, task_instance=mock_ti)
lm.hook.on_task_instance_success(previous_state=None, task_instance=mock_ti)
lm.hook.on_task_instance_failed(previous_state=None, task_instance=mock_ti, error="test error")
assert listener.events == [
("running", mock_ti),
("success", mock_ti),
("failed", mock_ti, "test error"),
]
| {
"repo_id": "apache/airflow",
"file_path": "shared/listeners/tests/listeners/test_listener_manager.py",
"license": "Apache License 2.0",
"lines": 128,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:task-sdk/src/airflow/sdk/listener.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from functools import cache
from airflow.sdk._shared.listeners.listener import ListenerManager
from airflow.sdk._shared.listeners.spec import lifecycle, taskinstance
from airflow.sdk.plugins_manager import integrate_listener_plugins
@cache
def get_listener_manager() -> ListenerManager:
"""
Get a listener manager for task sdk.
Registers the following listeners:
- lifecycle: on_starting, before_stopping
- taskinstance: on_task_instance_running, on_task_instance_success, etc.
"""
_listener_manager = ListenerManager()
_listener_manager.add_hookspecs(lifecycle)
_listener_manager.add_hookspecs(taskinstance)
integrate_listener_plugins(_listener_manager) # type: ignore[arg-type]
return _listener_manager
__all__ = ["get_listener_manager", "ListenerManager"]
| {
"repo_id": "apache/airflow",
"file_path": "task-sdk/src/airflow/sdk/listener.py",
"license": "Apache License 2.0",
"lines": 36,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:airflow-core/tests/unit/models/test_log.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import pytest
from sqlalchemy import select
from sqlalchemy.orm import joinedload
from airflow.models.log import Log
from airflow.operators.empty import EmptyOperator
from airflow.utils.state import TaskInstanceState
pytestmark = pytest.mark.db_test
class TestLogTaskInstanceReproduction:
def test_log_task_instance_join_correctness(self, dag_maker, session):
# Create dag_1 with a task
with dag_maker("dag_1", session=session):
EmptyOperator(task_id="common_task_id")
dr1 = dag_maker.create_dagrun()
ti1 = dr1.get_task_instance("common_task_id")
ti1.state = TaskInstanceState.SUCCESS
session.merge(ti1)
session.commit()
# Create dag_2 with the SAME task_id
with dag_maker("dag_2", session=session):
EmptyOperator(task_id="common_task_id")
dr2 = dag_maker.create_dagrun()
ti2 = dr2.get_task_instance("common_task_id")
ti2.state = TaskInstanceState.FAILED
session.merge(ti2)
session.commit()
# Create a log entry specifically for dag_1's task instance
log = Log(
event="test_event",
task_instance=ti1,
)
session.add(log)
session.commit()
# Query with joinedload to trigger the relationship join
stmt = select(Log).where(Log.id == log.id).options(joinedload(Log.task_instance))
loaded_log = session.scalar(stmt)
assert loaded_log.task_instance is not None
assert loaded_log.task_instance.dag_id == "dag_1"
assert loaded_log.task_instance.run_id == ti1.run_id
# Verify incorrect join for second dag
log2 = Log(
event="test_event_2",
task_instance=ti2,
)
session.add(log2)
session.commit()
stmt2 = select(Log).where(Log.id == log2.id).options(joinedload(Log.task_instance))
loaded_log2 = session.scalar(stmt2)
# This should fail if the join is ambiguous and picks the first one (dag_1)
assert loaded_log2.task_instance is not None
assert loaded_log2.task_instance.dag_id == "dag_2"
assert loaded_log2.task_instance.run_id == ti2.run_id
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/tests/unit/models/test_log.py",
"license": "Apache License 2.0",
"lines": 69,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/celery/src/airflow/providers/celery/cli/definition.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""CLI commands for Celery executor."""
from __future__ import annotations
from typing import TYPE_CHECKING
from airflow.cli.cli_config import (
ARG_DAEMON,
ARG_LOG_FILE,
ARG_PID,
ARG_SKIP_SERVE_LOGS,
ARG_STDERR,
ARG_STDOUT,
ARG_VERBOSE,
ActionCommand,
Arg,
GroupCommand,
lazy_load_command,
)
from airflow.configuration import conf
if TYPE_CHECKING:
import argparse
# flower cli args
ARG_BROKER_API = Arg(("-a", "--broker-api"), help="Broker API")
ARG_FLOWER_HOSTNAME = Arg(
("-H", "--hostname"),
default=conf.get("celery", "FLOWER_HOST"),
help="Set the hostname on which to run the server",
)
ARG_FLOWER_PORT = Arg(
("-p", "--port"),
default=conf.getint("celery", "FLOWER_PORT"),
type=int,
help="The port on which to run the server",
)
ARG_FLOWER_CONF = Arg(("-c", "--flower-conf"), help="Configuration file for flower")
ARG_FLOWER_URL_PREFIX = Arg(
("-u", "--url-prefix"),
default=conf.get("celery", "FLOWER_URL_PREFIX"),
help="URL prefix for Flower",
)
ARG_FLOWER_BASIC_AUTH = Arg(
("-A", "--basic-auth"),
default=conf.get("celery", "FLOWER_BASIC_AUTH"),
help=(
"Securing Flower with Basic Authentication. "
"Accepts user:password pairs separated by a comma. "
"Example: flower_basic_auth = user1:password1,user2:password2"
),
)
# worker cli args
ARG_AUTOSCALE = Arg(("-a", "--autoscale"), help="Minimum and Maximum number of worker to autoscale")
ARG_QUEUES = Arg(
("-q", "--queues"),
help="Comma delimited list of queues to serve",
default=conf.get("operators", "DEFAULT_QUEUE"),
)
ARG_CONCURRENCY = Arg(
("-c", "--concurrency"),
type=int,
help="The number of worker processes",
default=conf.getint("celery", "worker_concurrency"),
)
ARG_CELERY_HOSTNAME = Arg(
("-H", "--celery-hostname"),
help="Set the hostname of celery worker if you have multiple workers on a single machine",
)
ARG_UMASK = Arg(
("-u", "--umask"),
help="Set the umask of celery worker in daemon mode",
)
ARG_WITHOUT_MINGLE = Arg(
("--without-mingle",),
default=False,
help="Don't synchronize with other workers at start-up",
action="store_true",
)
ARG_WITHOUT_GOSSIP = Arg(
("--without-gossip",),
default=False,
help="Don't subscribe to other workers events",
action="store_true",
)
ARG_TEAM = Arg(
("-t", "--team"),
help="Team name for team-specific multi-team configuration (requires Airflow 3.2+)",
)
ARG_OUTPUT = Arg(
(
"-o",
"--output",
),
help="Output format. Allowed values: json, yaml, plain, table (default: table)",
metavar="(table, json, yaml, plain)",
choices=("table", "json", "yaml", "plain"),
default="table",
)
ARG_FULL_CELERY_HOSTNAME = Arg(
("-H", "--celery-hostname"),
required=True,
help="Specify the full celery hostname. example: celery@hostname",
)
ARG_REQUIRED_QUEUES = Arg(
("-q", "--queues"),
help="Comma delimited list of queues to serve",
required=True,
)
ARG_YES = Arg(
("-y", "--yes"),
help="Do not prompt to confirm. Use with care!",
action="store_true",
default=False,
)
CELERY_CLI_COMMAND_PATH = "airflow.providers.celery.cli.celery_command"
CELERY_COMMANDS = (
ActionCommand(
name="worker",
help="Start a Celery worker node",
func=lazy_load_command(f"{CELERY_CLI_COMMAND_PATH}.worker"),
args=(
ARG_QUEUES,
ARG_CONCURRENCY,
ARG_CELERY_HOSTNAME,
ARG_TEAM,
ARG_PID,
ARG_DAEMON,
ARG_UMASK,
ARG_STDOUT,
ARG_STDERR,
ARG_LOG_FILE,
ARG_AUTOSCALE,
ARG_SKIP_SERVE_LOGS,
ARG_WITHOUT_MINGLE,
ARG_WITHOUT_GOSSIP,
ARG_VERBOSE,
),
),
ActionCommand(
name="flower",
help="Start a Celery Flower",
func=lazy_load_command(f"{CELERY_CLI_COMMAND_PATH}.flower"),
args=(
ARG_FLOWER_HOSTNAME,
ARG_FLOWER_PORT,
ARG_FLOWER_CONF,
ARG_FLOWER_URL_PREFIX,
ARG_FLOWER_BASIC_AUTH,
ARG_BROKER_API,
ARG_PID,
ARG_DAEMON,
ARG_STDOUT,
ARG_STDERR,
ARG_LOG_FILE,
ARG_VERBOSE,
),
),
ActionCommand(
name="stop",
help="Stop the Celery worker gracefully",
func=lazy_load_command(f"{CELERY_CLI_COMMAND_PATH}.stop_worker"),
args=(ARG_PID, ARG_VERBOSE),
),
ActionCommand(
name="list-workers",
help="List active celery workers",
func=lazy_load_command(f"{CELERY_CLI_COMMAND_PATH}.list_workers"),
args=(ARG_OUTPUT,),
),
ActionCommand(
name="shutdown-worker",
help="Request graceful shutdown of celery workers",
func=lazy_load_command(f"{CELERY_CLI_COMMAND_PATH}.shutdown_worker"),
args=(ARG_FULL_CELERY_HOSTNAME,),
),
ActionCommand(
name="shutdown-all-workers",
help="Request graceful shutdown of all active celery workers",
func=lazy_load_command(f"{CELERY_CLI_COMMAND_PATH}.shutdown_all_workers"),
args=(ARG_YES,),
),
ActionCommand(
name="add-queue",
help="Subscribe Celery worker to specified queues",
func=lazy_load_command(f"{CELERY_CLI_COMMAND_PATH}.add_queue"),
args=(
ARG_REQUIRED_QUEUES,
ARG_FULL_CELERY_HOSTNAME,
),
),
ActionCommand(
name="remove-queue",
help="Unsubscribe Celery worker from specified queues",
func=lazy_load_command(f"{CELERY_CLI_COMMAND_PATH}.remove_queue"),
args=(
ARG_REQUIRED_QUEUES,
ARG_FULL_CELERY_HOSTNAME,
),
),
ActionCommand(
name="remove-all-queues",
help="Unsubscribe Celery worker from all its active queues",
func=lazy_load_command(f"{CELERY_CLI_COMMAND_PATH}.remove_all_queues"),
args=(ARG_FULL_CELERY_HOSTNAME,),
),
)
CELERY_CLI_COMMANDS = [
GroupCommand(
name="celery",
help="Celery components",
description=(
"Start celery components. Works only when using CeleryExecutor. For more information, "
"see https://airflow.apache.org/docs/apache-airflow-providers-celery/stable/celery_executor.html"
),
subcommands=CELERY_COMMANDS,
),
]
def get_celery_cli_commands():
"""Return CLI commands for Celery executor."""
return CELERY_CLI_COMMANDS
def get_parser() -> argparse.ArgumentParser:
"""
Generate documentation; used by Sphinx.
:meta private:
"""
from airflow.cli.cli_parser import AirflowHelpFormatter, DefaultHelpParser, _add_command
parser = DefaultHelpParser(prog="airflow", formatter_class=AirflowHelpFormatter)
subparsers = parser.add_subparsers(dest="subcommand", metavar="GROUP_OR_COMMAND")
for group_command in get_celery_cli_commands():
_add_command(subparsers, group_command)
return parser
| {
"repo_id": "apache/airflow",
"file_path": "providers/celery/src/airflow/providers/celery/cli/definition.py",
"license": "Apache License 2.0",
"lines": 243,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/celery/tests/unit/celery/cli/test_definition.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import importlib
import pytest
from airflow.cli import cli_parser
from airflow.providers.celery.cli.definition import CELERY_CLI_COMMANDS, CELERY_COMMANDS
from tests_common.test_utils.config import conf_vars
from tests_common.test_utils.version_compat import AIRFLOW_V_3_2_PLUS
class TestCeleryCliDefinition:
@pytest.fixture(autouse=True)
def setup_parser(self):
if AIRFLOW_V_3_2_PLUS:
importlib.reload(cli_parser)
cli_parser.get_parser.cache_clear()
self.arg_parser = cli_parser.get_parser()
else:
with conf_vars(
{
(
"core",
"executor",
): "CeleryExecutor",
}
):
importlib.reload(cli_parser)
cli_parser.get_parser.cache_clear()
self.arg_parser = cli_parser.get_parser()
def test_celery_cli_commands_count(self):
"""Test that CELERY_CLI_COMMANDS contains exactly 1 GroupCommand."""
assert len(CELERY_CLI_COMMANDS) == 1
def test_celery_commands_count(self):
"""Test that CELERY_COMMANDS contains all 9 subcommands."""
assert len(CELERY_COMMANDS) == 9
@pytest.mark.parametrize(
"command",
[
"worker",
"flower",
"stop",
"list-workers",
"shutdown-worker",
"shutdown-all-workers",
"add-queue",
"remove-queue",
"remove-all-queues",
],
)
def test_celery_subcommands_defined(self, command):
"""Test that all celery subcommands are properly defined."""
params = ["celery", command, "--help"]
with pytest.raises(SystemExit) as exc_info:
self.arg_parser.parse_args(params)
# --help exits with code 0
assert exc_info.value.code == 0
def test_worker_command_args(self):
"""Test worker command with various arguments."""
params = [
"celery",
"worker",
"--queues",
"queue1,queue2",
"--concurrency",
"4",
"--celery-hostname",
"worker1",
]
args = self.arg_parser.parse_args(params)
assert args.queues == "queue1,queue2"
assert args.concurrency == 4
assert args.celery_hostname == "worker1"
def test_flower_command_args(self):
"""Test flower command with various arguments."""
params = [
"celery",
"flower",
"--hostname",
"localhost",
"--port",
"5555",
"--url-prefix",
"/flower",
]
args = self.arg_parser.parse_args(params)
assert args.hostname == "localhost"
assert args.port == 5555
assert args.url_prefix == "/flower"
def test_list_workers_command_args(self):
"""Test list-workers command with output format."""
params = ["celery", "list-workers", "--output", "json"]
args = self.arg_parser.parse_args(params)
assert args.output == "json"
def test_shutdown_worker_command_args(self):
"""Test shutdown-worker command with celery hostname."""
params = ["celery", "shutdown-worker", "--celery-hostname", "celery@worker1"]
args = self.arg_parser.parse_args(params)
assert args.celery_hostname == "celery@worker1"
def test_shutdown_all_workers_command_args(self):
"""Test shutdown-all-workers command with yes flag."""
params = ["celery", "shutdown-all-workers", "--yes"]
args = self.arg_parser.parse_args(params)
assert args.yes is True
def test_add_queue_command_args(self):
"""Test add-queue command with required arguments."""
params = [
"celery",
"add-queue",
"--queues",
"new_queue",
"--celery-hostname",
"celery@worker1",
]
args = self.arg_parser.parse_args(params)
assert args.queues == "new_queue"
assert args.celery_hostname == "celery@worker1"
def test_remove_queue_command_args(self):
"""Test remove-queue command with required arguments."""
params = [
"celery",
"remove-queue",
"--queues",
"old_queue",
"--celery-hostname",
"celery@worker1",
]
args = self.arg_parser.parse_args(params)
assert args.queues == "old_queue"
assert args.celery_hostname == "celery@worker1"
def test_remove_all_queues_command_args(self):
"""Test remove-all-queues command with celery hostname."""
params = ["celery", "remove-all-queues", "--celery-hostname", "celery@worker1"]
args = self.arg_parser.parse_args(params)
assert args.celery_hostname == "celery@worker1"
def test_stop_command_args(self):
"""Test stop command with pid argument."""
params = ["celery", "stop", "--pid", "/path/to/pid"]
args = self.arg_parser.parse_args(params)
assert args.pid == "/path/to/pid"
| {
"repo_id": "apache/airflow",
"file_path": "providers/celery/tests/unit/celery/cli/test_definition.py",
"license": "Apache License 2.0",
"lines": 152,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/cncf/kubernetes/src/airflow/providers/cncf/kubernetes/cli/definition.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING
from airflow.cli.cli_config import (
ARG_DAG_ID,
ARG_OUTPUT_PATH,
ARG_VERBOSE,
ActionCommand,
Arg,
GroupCommand,
lazy_load_command,
positive_int,
)
from airflow.configuration import conf
from airflow.providers.cncf.kubernetes.version_compat import AIRFLOW_V_3_0_PLUS
if TYPE_CHECKING:
import argparse
try:
from airflow.cli.cli_config import ARG_LOGICAL_DATE
except ImportError: # 2.x compatibility.
from airflow.cli.cli_config import ( # type: ignore[attr-defined, no-redef]
ARG_EXECUTION_DATE as ARG_LOGICAL_DATE,
)
if AIRFLOW_V_3_0_PLUS:
from airflow.cli.cli_config import ARG_BUNDLE_NAME
ARG_COMPAT = ARG_BUNDLE_NAME
else:
from airflow.cli.cli_config import ARG_SUBDIR # type: ignore[attr-defined]
ARG_COMPAT = ARG_SUBDIR
# CLI Args
ARG_NAMESPACE = Arg(
("--namespace",),
default=conf.get("kubernetes_executor", "namespace"),
help="Kubernetes Namespace. Default value is `[kubernetes] namespace` in configuration.",
)
ARG_MIN_PENDING_MINUTES = Arg(
("--min-pending-minutes",),
default=30,
type=positive_int(allow_zero=False),
help=(
"Pending pods created before the time interval are to be cleaned up, "
"measured in minutes. Default value is 30(m). The minimum value is 5(m)."
),
)
# CLI Commands
KUBERNETES_COMMANDS = (
ActionCommand(
name="cleanup-pods",
help=(
"Clean up Kubernetes pods "
"(created by KubernetesExecutor/KubernetesPodOperator) "
"in evicted/failed/succeeded/pending states"
),
func=lazy_load_command("airflow.providers.cncf.kubernetes.cli.kubernetes_command.cleanup_pods"),
args=(ARG_NAMESPACE, ARG_MIN_PENDING_MINUTES, ARG_VERBOSE),
),
ActionCommand(
name="generate-dag-yaml",
help="Generate YAML files for all tasks in DAG. Useful for debugging tasks without "
"launching into a cluster",
func=lazy_load_command("airflow.providers.cncf.kubernetes.cli.kubernetes_command.generate_pod_yaml"),
args=(ARG_DAG_ID, ARG_LOGICAL_DATE, ARG_COMPAT, ARG_OUTPUT_PATH, ARG_VERBOSE),
),
)
def get_kubernetes_cli_commands() -> list[GroupCommand]:
return [
GroupCommand(
name="kubernetes",
help="Tools to help run the KubernetesExecutor",
subcommands=KUBERNETES_COMMANDS,
)
]
def get_parser() -> argparse.ArgumentParser:
"""
Generate documentation; used by Sphinx.
:meta private:
"""
from airflow.cli.cli_parser import AirflowHelpFormatter, DefaultHelpParser, _add_command
parser = DefaultHelpParser(prog="airflow", formatter_class=AirflowHelpFormatter)
subparsers = parser.add_subparsers(dest="subcommand", metavar="GROUP_OR_COMMAND")
for group_command in get_kubernetes_cli_commands():
_add_command(subparsers, group_command)
return parser
| {
"repo_id": "apache/airflow",
"file_path": "providers/cncf/kubernetes/src/airflow/providers/cncf/kubernetes/cli/definition.py",
"license": "Apache License 2.0",
"lines": 98,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:providers/cncf/kubernetes/tests/unit/cncf/kubernetes/cli/test_definition.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import importlib
from datetime import datetime
from unittest.mock import MagicMock, patch
import pytest
from airflow.cli import cli_parser
from airflow.providers.cncf.kubernetes.cli.definition import (
KUBERNETES_COMMANDS,
get_kubernetes_cli_commands,
)
from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS, AIRFLOW_V_3_2_PLUS
class TestKubernetesCliDefinition:
@pytest.fixture(autouse=True)
def setup_parser(self):
if AIRFLOW_V_3_2_PLUS:
importlib.reload(cli_parser)
cli_parser.get_parser.cache_clear()
self.arg_parser = cli_parser.get_parser()
else:
with patch(
"airflow.executors.executor_loader.ExecutorLoader.get_executor_names",
) as mock_get_executor_names:
mock_get_executor_names.return_value = [
MagicMock(
name="KubernetesExecutor",
module_path="airflow.providers.cncf.kubernetes.executors.kubernetes_executor.KubernetesExecutor",
)
]
importlib.reload(cli_parser)
cli_parser.get_parser.cache_clear()
self.arg_parser = cli_parser.get_parser()
def test_kubernetes_cli_commands_count(self):
"""Test that get_kubernetes_cli_commands returns exactly 1 GroupCommand."""
commands = get_kubernetes_cli_commands()
assert len(commands) == 1
def test_kubernetes_commands_count(self):
"""Test that KUBERNETES_COMMANDS contains all 2 subcommands."""
assert len(KUBERNETES_COMMANDS) == 2
@pytest.mark.parametrize(
"command",
[
"cleanup-pods",
"generate-dag-yaml",
],
)
def test_kubernetes_subcommands_defined(self, command):
"""Test that all kubernetes subcommands are properly defined."""
params = ["kubernetes", command, "--help"]
with pytest.raises(SystemExit) as exc_info:
self.arg_parser.parse_args(params)
# --help exits with code 0
assert exc_info.value.code == 0
def test_cleanup_pods_command_args(self):
"""Test cleanup-pods command with various arguments."""
params = [
"kubernetes",
"cleanup-pods",
"--namespace",
"my-namespace",
"--min-pending-minutes",
"60",
]
args = self.arg_parser.parse_args(params)
assert args.namespace == "my-namespace"
assert args.min_pending_minutes == 60
def test_cleanup_pods_command_default_args(self):
"""Test cleanup-pods command with default arguments."""
params = ["kubernetes", "cleanup-pods"]
args = self.arg_parser.parse_args(params)
# Should use default values from configuration
assert hasattr(args, "namespace")
assert args.min_pending_minutes == 30
def test_generate_dag_yaml_command_args(self):
"""Test generate-dag-yaml command with various arguments."""
if AIRFLOW_V_3_0_PLUS:
params = [
"kubernetes",
"generate-dag-yaml",
"my_dag",
"--logical-date",
"2024-01-01T00:00:00+00:00",
"--output-path",
"/tmp/output",
]
args = self.arg_parser.parse_args(params)
assert args.logical_date == datetime.fromisoformat("2024-01-01T00:00:00+00:00")
else:
params = [
"kubernetes",
"generate-dag-yaml",
"--output-path",
"/tmp/output",
"my_dag",
"2024-01-01T00:00:00+00:00",
]
args = self.arg_parser.parse_args(params)
assert args.execution_date == datetime.fromisoformat("2024-01-01T00:00:00+00:00")
assert args.dag_id == "my_dag"
assert args.output_path == "/tmp/output"
| {
"repo_id": "apache/airflow",
"file_path": "providers/cncf/kubernetes/tests/unit/cncf/kubernetes/cli/test_definition.py",
"license": "Apache License 2.0",
"lines": 115,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/edge3/src/airflow/providers/edge3/cli/definition.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING
from airflow.cli.cli_config import ARG_PID, ARG_VERBOSE, ActionCommand, Arg, GroupCommand, lazy_load_command
from airflow.configuration import conf
if TYPE_CHECKING:
import argparse
ARG_CONCURRENCY = Arg(
("-c", "--concurrency"),
type=int,
help="The number of worker processes",
default=conf.getint("edge", "worker_concurrency", fallback=8),
)
ARG_QUEUES = Arg(
("-q", "--queues"),
help="Comma delimited list of queues to serve, serve all queues if not provided.",
)
ARG_EDGE_HOSTNAME = Arg(
("-H", "--edge-hostname"),
help="Set the hostname of worker if you have multiple workers on a single machine",
)
ARG_REQUIRED_EDGE_HOSTNAME = Arg(
("-H", "--edge-hostname"),
help="Set the hostname of worker if you have multiple workers on a single machine",
required=True,
)
ARG_MAINTENANCE = Arg(("maintenance",), help="Desired maintenance state", choices=("on", "off"))
ARG_MAINTENANCE_COMMENT = Arg(
("-c", "--comments"),
help="Maintenance comments to report reason. Required if maintenance is turned on.",
)
ARG_REQUIRED_MAINTENANCE_COMMENT = Arg(
("-c", "--comments"),
help="Maintenance comments to report reason. Required if enabling maintenance",
required=True,
)
ARG_QUEUES_MANAGE = Arg(
("-q", "--queues"),
help="Comma delimited list of queues to add or remove.",
required=True,
)
ARG_WAIT_MAINT = Arg(
("-w", "--wait"),
default=False,
help="Wait until edge worker has reached desired state.",
action="store_true",
)
ARG_WAIT_STOP = Arg(
("-w", "--wait"),
default=False,
help="Wait until edge worker is shut down.",
action="store_true",
)
ARG_OUTPUT = Arg(
(
"-o",
"--output",
),
help="Output format. Allowed values: json, yaml, plain, table (default: table)",
metavar="(table, json, yaml, plain)",
choices=("table", "json", "yaml", "plain"),
default="table",
)
ARG_STATE = Arg(
(
"-s",
"--state",
),
nargs="+",
help="State of the edge worker",
)
ARG_DAEMON = Arg(
("-D", "--daemon"), help="Daemonize instead of running in the foreground", action="store_true"
)
ARG_UMASK = Arg(
("-u", "--umask"),
help="Set the umask of edge worker in daemon mode",
)
ARG_STDERR = Arg(("--stderr",), help="Redirect stderr to this file if run in daemon mode")
ARG_STDOUT = Arg(("--stdout",), help="Redirect stdout to this file if run in daemon mode")
ARG_LOG_FILE = Arg(("-l", "--log-file"), help="Location of the log file if run in daemon mode")
ARG_YES = Arg(
("-y", "--yes"),
help="Skip confirmation prompt and proceed with shutdown",
action="store_true",
default=False,
)
EDGE_COMMANDS: list[ActionCommand] = [
ActionCommand(
name="worker",
help="Start Airflow Edge Worker.",
func=lazy_load_command("airflow.providers.edge3.cli.edge_command.worker"),
args=(
ARG_CONCURRENCY,
ARG_QUEUES,
ARG_EDGE_HOSTNAME,
ARG_PID,
ARG_VERBOSE,
ARG_DAEMON,
ARG_STDOUT,
ARG_STDERR,
ARG_LOG_FILE,
ARG_UMASK,
),
),
ActionCommand(
name="status",
help="Check for Airflow Local Edge Worker status.",
func=lazy_load_command("airflow.providers.edge3.cli.edge_command.status"),
args=(
ARG_PID,
ARG_VERBOSE,
),
),
ActionCommand(
name="maintenance",
help="Set or Unset maintenance mode of local edge worker.",
func=lazy_load_command("airflow.providers.edge3.cli.edge_command.maintenance"),
args=(
ARG_MAINTENANCE,
ARG_MAINTENANCE_COMMENT,
ARG_WAIT_MAINT,
ARG_PID,
ARG_VERBOSE,
),
),
ActionCommand(
name="stop",
help="Stop a running local Airflow Edge Worker.",
func=lazy_load_command("airflow.providers.edge3.cli.edge_command.stop"),
args=(
ARG_WAIT_STOP,
ARG_PID,
ARG_VERBOSE,
),
),
ActionCommand(
name="list-workers",
help="Query the db to list all registered edge workers.",
func=lazy_load_command("airflow.providers.edge3.cli.edge_command.list_edge_workers"),
args=(
ARG_OUTPUT,
ARG_STATE,
),
),
ActionCommand(
name="remote-edge-worker-request-maintenance",
help="Put remote edge worker on maintenance.",
func=lazy_load_command("airflow.providers.edge3.cli.edge_command.put_remote_worker_on_maintenance"),
args=(
ARG_REQUIRED_EDGE_HOSTNAME,
ARG_REQUIRED_MAINTENANCE_COMMENT,
),
),
ActionCommand(
name="remote-edge-worker-exit-maintenance",
help="Remove remote edge worker from maintenance.",
func=lazy_load_command(
"airflow.providers.edge3.cli.edge_command.remove_remote_worker_from_maintenance"
),
args=(ARG_REQUIRED_EDGE_HOSTNAME,),
),
ActionCommand(
name="remote-edge-worker-update-maintenance-comment",
help="Update maintenance comments of the remote edge worker.",
func=lazy_load_command(
"airflow.providers.edge3.cli.edge_command.remote_worker_update_maintenance_comment"
),
args=(
ARG_REQUIRED_EDGE_HOSTNAME,
ARG_REQUIRED_MAINTENANCE_COMMENT,
),
),
ActionCommand(
name="remove-remote-edge-worker",
help="Remove remote edge worker entry from db.",
func=lazy_load_command("airflow.providers.edge3.cli.edge_command.remove_remote_worker"),
args=(ARG_REQUIRED_EDGE_HOSTNAME,),
),
ActionCommand(
name="shutdown-remote-edge-worker",
help="Initiate the shutdown of the remote edge worker.",
func=lazy_load_command("airflow.providers.edge3.cli.edge_command.remote_worker_request_shutdown"),
args=(ARG_REQUIRED_EDGE_HOSTNAME,),
),
ActionCommand(
name="add-worker-queues",
help="Add queues to an edge worker.",
func=lazy_load_command("airflow.providers.edge3.cli.edge_command.add_worker_queues"),
args=(
ARG_REQUIRED_EDGE_HOSTNAME,
ARG_QUEUES_MANAGE,
),
),
ActionCommand(
name="remove-worker-queues",
help="Remove queues from an edge worker.",
func=lazy_load_command("airflow.providers.edge3.cli.edge_command.remove_worker_queues"),
args=(
ARG_REQUIRED_EDGE_HOSTNAME,
ARG_QUEUES_MANAGE,
),
),
ActionCommand(
name="shutdown-all-workers",
help="Request graceful shutdown of all edge workers.",
func=lazy_load_command("airflow.providers.edge3.cli.edge_command.shutdown_all_workers"),
args=(ARG_YES,),
),
]
def get_edge_cli_commands() -> list[GroupCommand]:
return [
GroupCommand(
name="edge",
help="Edge Worker components",
description=(
"Start and manage Edge Worker. Works only when using EdgeExecutor. For more information, "
"see https://airflow.apache.org/docs/apache-airflow-providers-edge3/stable/edge_executor.html"
),
subcommands=EDGE_COMMANDS,
),
]
def get_parser() -> argparse.ArgumentParser:
"""
Generate documentation; used by Sphinx.
:meta private:
"""
from airflow.cli.cli_parser import AirflowHelpFormatter, DefaultHelpParser, _add_command
parser = DefaultHelpParser(prog="airflow", formatter_class=AirflowHelpFormatter)
subparsers = parser.add_subparsers(dest="subcommand", metavar="GROUP_OR_COMMAND")
for group_command in get_edge_cli_commands():
_add_command(subparsers, group_command)
return parser
| {
"repo_id": "apache/airflow",
"file_path": "providers/edge3/src/airflow/providers/edge3/cli/definition.py",
"license": "Apache License 2.0",
"lines": 248,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/edge3/tests/unit/edge3/cli/test_definition.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import importlib
from unittest.mock import MagicMock, patch
import pytest
from airflow.cli import cli_parser
from airflow.providers.edge3.cli.definition import EDGE_COMMANDS, get_edge_cli_commands
from tests_common.test_utils.version_compat import AIRFLOW_V_3_2_PLUS
class TestEdgeCliDefinition:
@pytest.fixture(autouse=True)
def setup_parser(self):
if AIRFLOW_V_3_2_PLUS:
importlib.reload(cli_parser)
cli_parser.get_parser.cache_clear()
self.arg_parser = cli_parser.get_parser()
else:
with patch(
"airflow.executors.executor_loader.ExecutorLoader.get_executor_names",
) as mock_get_executor_names:
mock_get_executor_names.return_value = [
MagicMock(
name="EdgeExecutor", module_path="airflow.providers.edge3.executors.EdgeExecutor"
)
]
importlib.reload(cli_parser)
cli_parser.get_parser.cache_clear()
self.arg_parser = cli_parser.get_parser()
def test_edge_cli_commands_count(self):
"""Test that get_edge_cli_commands returns exactly 1 GroupCommand."""
commands = get_edge_cli_commands()
assert len(commands) == 1
def test_edge_commands_count(self):
"""Test that EDGE_COMMANDS contains all 13 subcommands."""
assert len(EDGE_COMMANDS) == 13
@pytest.mark.parametrize(
"command",
[
"worker",
"status",
"maintenance",
"stop",
"list-workers",
"remote-edge-worker-request-maintenance",
"remote-edge-worker-exit-maintenance",
"remote-edge-worker-update-maintenance-comment",
"remove-remote-edge-worker",
"shutdown-remote-edge-worker",
"add-worker-queues",
"remove-worker-queues",
"shutdown-all-workers",
],
)
def test_edge_subcommands_defined(self, command):
"""Test that all edge subcommands are properly defined."""
params = ["edge", command, "--help"]
with pytest.raises(SystemExit) as exc_info:
self.arg_parser.parse_args(params)
# --help exits with code 0
assert exc_info.value.code == 0
def test_worker_command_args(self):
"""Test worker command with various arguments."""
params = [
"edge",
"worker",
"--queues",
"queue1,queue2",
"--concurrency",
"4",
"--edge-hostname",
"edge-worker-1",
]
args = self.arg_parser.parse_args(params)
assert args.queues == "queue1,queue2"
assert args.concurrency == 4
assert args.edge_hostname == "edge-worker-1"
def test_status_command_args(self):
"""Test status command with pid argument."""
params = ["edge", "status", "--pid", "/path/to/pid"]
args = self.arg_parser.parse_args(params)
assert args.pid == "/path/to/pid"
def test_maintenance_command_args_on(self):
"""Test maintenance command to enable maintenance mode."""
params = [
"edge",
"maintenance",
"on",
"--comments",
"Scheduled maintenance",
"--wait",
]
args = self.arg_parser.parse_args(params)
assert args.maintenance == "on"
assert args.comments == "Scheduled maintenance"
assert args.wait is True
def test_maintenance_command_args_off(self):
"""Test maintenance command to disable maintenance mode."""
params = ["edge", "maintenance", "off"]
args = self.arg_parser.parse_args(params)
assert args.maintenance == "off"
def test_stop_command_args(self):
"""Test stop command with wait argument."""
params = ["edge", "stop", "--wait", "--pid", "/path/to/pid"]
args = self.arg_parser.parse_args(params)
assert args.wait is True
assert args.pid == "/path/to/pid"
def test_list_workers_command_args(self):
"""Test list-workers command with output format and state filter."""
params = ["edge", "list-workers", "--output", "json", "--state", "running", "maintenance"]
args = self.arg_parser.parse_args(params)
assert args.output == "json"
assert args.state == ["running", "maintenance"]
def test_remote_edge_worker_request_maintenance_args(self):
"""Test remote-edge-worker-request-maintenance command with required arguments."""
params = [
"edge",
"remote-edge-worker-request-maintenance",
"--edge-hostname",
"remote-worker-1",
"--comments",
"Emergency maintenance",
]
args = self.arg_parser.parse_args(params)
assert args.edge_hostname == "remote-worker-1"
assert args.comments == "Emergency maintenance"
def test_remote_edge_worker_exit_maintenance_args(self):
"""Test remote-edge-worker-exit-maintenance command with required hostname."""
params = [
"edge",
"remote-edge-worker-exit-maintenance",
"--edge-hostname",
"remote-worker-1",
]
args = self.arg_parser.parse_args(params)
assert args.edge_hostname == "remote-worker-1"
def test_remote_edge_worker_update_maintenance_comment_args(self):
"""Test remote-edge-worker-update-maintenance-comment command with required arguments."""
params = [
"edge",
"remote-edge-worker-update-maintenance-comment",
"--edge-hostname",
"remote-worker-1",
"--comments",
"Updated maintenance reason",
]
args = self.arg_parser.parse_args(params)
assert args.edge_hostname == "remote-worker-1"
assert args.comments == "Updated maintenance reason"
def test_remove_remote_edge_worker_args(self):
"""Test remove-remote-edge-worker command with required hostname."""
params = [
"edge",
"remove-remote-edge-worker",
"--edge-hostname",
"remote-worker-1",
]
args = self.arg_parser.parse_args(params)
assert args.edge_hostname == "remote-worker-1"
def test_shutdown_remote_edge_worker_args(self):
"""Test shutdown-remote-edge-worker command with required hostname."""
params = [
"edge",
"shutdown-remote-edge-worker",
"--edge-hostname",
"remote-worker-1",
]
args = self.arg_parser.parse_args(params)
assert args.edge_hostname == "remote-worker-1"
def test_add_worker_queues_args(self):
"""Test add-worker-queues command with required arguments."""
params = [
"edge",
"add-worker-queues",
"--edge-hostname",
"remote-worker-1",
"--queues",
"queue3,queue4",
]
args = self.arg_parser.parse_args(params)
assert args.edge_hostname == "remote-worker-1"
assert args.queues == "queue3,queue4"
def test_remove_worker_queues_args(self):
"""Test remove-worker-queues command with required arguments."""
params = [
"edge",
"remove-worker-queues",
"--edge-hostname",
"remote-worker-1",
"--queues",
"queue1",
]
args = self.arg_parser.parse_args(params)
assert args.edge_hostname == "remote-worker-1"
assert args.queues == "queue1"
def test_shutdown_all_workers_args(self):
"""Test shutdown-all-workers command with yes flag."""
params = ["edge", "shutdown-all-workers", "--yes"]
args = self.arg_parser.parse_args(params)
assert args.yes is True
| {
"repo_id": "apache/airflow",
"file_path": "providers/edge3/tests/unit/edge3/cli/test_definition.py",
"license": "Apache License 2.0",
"lines": 213,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:scripts/ci/prek/check_cli_definition_imports.py | #!/usr/bin/env python
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# /// script
# requires-python = ">=3.10,<3.11"
# dependencies = [
# "rich>=13.6.0",
# ]
# ///
"""
Check that CLI definition files only import from allowed modules.
CLI definition files (matching pattern */cli/definition.py) should only import
from 'airflow.configuration' or 'airflow.cli.cli_config' to avoid circular imports
and ensure clean separation of concerns.
"""
from __future__ import annotations
import argparse
import sys
from pathlib import Path
sys.path.insert(0, str(Path(__file__).parent.resolve())) # make sure common_prek_utils is imported
from common_prek_utils import console, get_imports_from_file
# Allowed modules that can be imported in CLI definition files
ALLOWED_MODULES = {
"airflow.configuration",
"airflow.cli.cli_config",
}
# Standard library and __future__ modules are also allowed
STDLIB_PREFIXES = (
"argparse",
"getpass",
"textwrap",
"typing",
"collections",
"functools",
"itertools",
"pathlib",
"os",
"sys",
"re",
"json",
"dataclasses",
"enum",
)
def get_provider_path_from_file(file_path: Path) -> str | None:
"""
Extract the provider path from a CLI definition file.
For example:
- providers/celery/src/airflow/providers/celery/cli/definition.py -> celery
- providers/cncf/kubernetes/src/airflow/providers/cncf/kubernetes/cli/definition.py -> cncf.kubernetes
"""
path_str = file_path.as_posix()
# Find the substring between "airflow/providers/" and "/cli/definition"
start_marker = "airflow/providers/"
end_marker = "/cli/definition"
start_idx = path_str.find(start_marker)
end_idx = path_str.find(end_marker)
if start_idx == -1 or end_idx == -1 or start_idx >= end_idx:
return None
# Extract the provider path and replace '/' with '.'
provider_path = path_str[start_idx + len(start_marker) : end_idx]
return provider_path.replace("/", ".")
def is_allowed_import(import_name: str, file_path: Path) -> bool:
"""Check if an import is allowed in CLI definition files."""
# Check if it's one of the allowed Airflow modules
for allowed_module in ALLOWED_MODULES:
if import_name == allowed_module or import_name.startswith(f"{allowed_module}."):
return True
# Check if it's a standard library module
for prefix in STDLIB_PREFIXES:
if import_name == prefix or import_name.startswith(f"{prefix}."):
return True
# Allow imports from the provider's own version_compat module
provider_path = get_provider_path_from_file(file_path)
if provider_path:
version_compat_module = f"airflow.providers.{provider_path}.version_compat"
if import_name == version_compat_module or import_name.startswith(f"{version_compat_module}."):
return True
return False
def parse_args():
parser = argparse.ArgumentParser(
description="Check that CLI definition files only import from allowed modules."
)
parser.add_argument("files", nargs="*", type=Path, help="Python source files to check.")
return parser.parse_args()
def main() -> int:
args = parse_args()
if not args.files:
console.print("[yellow]No files provided.[/]")
return 0
errors: list[str] = []
# Filter files to only check */cli/definition.py files
cli_definition_files = [
path
for path in args.files
if path.name == "definition.py" and len(path.parts) >= 2 and path.parts[-2] == "cli"
]
if not cli_definition_files:
console.print("[yellow]No CLI definition files found to check.[/]")
return 0
console.print(f"[blue]Checking {len(cli_definition_files)} CLI definition file(s)...[/]")
console.print(cli_definition_files)
for path in cli_definition_files:
try:
imports = get_imports_from_file(path, only_top_level=True)
except Exception as e:
console.print(f"[red]Failed to parse {path}: {e}[/]")
return 2
forbidden_imports = []
for imp in imports:
if not is_allowed_import(imp, path):
forbidden_imports.append(imp)
if forbidden_imports:
errors.append(f"\n[red]{path}:[/]")
for imp in forbidden_imports:
errors.append(f" - {imp}")
if errors:
console.print("\n[red] Some CLI definition files contain forbidden imports![/]\n")
console.print(
f"[yellow]CLI definition files (*/cli/definition.py) should only import from:[/]\n"
" - airflow.configuration\n"
" - airflow.cli.cli_config\n"
" - Their own provider's version_compat module\n"
f" - Standard library modules ({', '.join(STDLIB_PREFIXES)})\n"
)
console.print("[red]Found forbidden imports in:[/]")
for error in errors:
console.print(error)
console.print(
"\n[yellow]This restriction exists to:[/]\n"
" - Keep CLI definitions lightweight and declarative to avoid slowdowns\n"
" - Ensure clean separation between CLI structure and implementation\n"
)
return 1
console.print("[green] All CLI definition files import only from allowed modules![/]")
return 0
if __name__ == "__main__":
sys.exit(main())
| {
"repo_id": "apache/airflow",
"file_path": "scripts/ci/prek/check_cli_definition_imports.py",
"license": "Apache License 2.0",
"lines": 152,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:scripts/in_container/benchmark_cli_latency.py | #!/usr/bin/env python3
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Benchmark script to measure CLI latency for different Auth Manager and Executor combinations.
This script:
1. Discovers all available Auth Managers and Executors from providers
2. Tests each combination by running 'airflow --help'
3. Measures response time for each combination
4. Generates a markdown report with results
"""
from __future__ import annotations
import os
import subprocess
import sys
import time
from pathlib import Path
# Add airflow to path
AIRFLOW_SOURCES_DIR = Path(__file__).resolve().parents[3] / "airflow-core" / "src"
sys.path.insert(0, str(AIRFLOW_SOURCES_DIR))
def get_available_auth_managers() -> list[str]:
"""Get all available auth manager class names from providers."""
from airflow.providers_manager import ProvidersManager
pm = ProvidersManager()
return pm.auth_managers
def get_available_executors() -> list[str]:
"""Get all available executor class names from providers."""
from airflow.providers_manager import ProvidersManager
pm = ProvidersManager()
# Get executors from providers
executor_names = pm.executor_class_names
# Add core executors
core_executors = [
"airflow.executors.local_executor.LocalExecutor",
"airflow.executors.sequential_executor.SequentialExecutor",
]
all_executors = list(set(core_executors + executor_names))
return sorted(all_executors)
def measure_cli_latency(
auth_manager: str | None, executor: str | None, runs: int = 3
) -> tuple[float, float, bool]:
"""
Measure the latency of 'airflow --help' command.
Args:
auth_manager: Auth manager class name (None for default)
executor: Executor class name (None for default)
runs: Number of runs to average
Returns:
Tuple of (average_time, min_time, success)
"""
env = os.environ.copy()
if auth_manager:
env["AIRFLOW__CORE__AUTH_MANAGER"] = auth_manager
if executor:
env["AIRFLOW__CORE__EXECUTOR"] = executor
times = []
success = True
for _ in range(runs):
start = time.time()
try:
result = subprocess.run(
["airflow", "--help"],
env=env,
capture_output=True,
timeout=30,
check=False,
)
elapsed = time.time() - start
# Check if command succeeded
if result.returncode != 0:
success = False
break
times.append(elapsed)
except (subprocess.TimeoutExpired, Exception) as e:
print(f"Error running command: {e}", file=sys.stderr)
success = False
break
if not times:
return 0.0, 0.0, False
avg_time = sum(times) / len(times)
min_time = min(times)
return avg_time, min_time, success
def format_class_name(class_name: str | None) -> str:
"""Format class name for display (show only last part)."""
if class_name is None:
return "Default"
parts = class_name.split(".")
if len(parts) > 1:
return parts[-1]
return class_name
def generate_markdown_report(results: list[dict]) -> str:
"""Generate markdown formatted report."""
lines = [
"# Airflow CLI Latency Benchmark",
"",
"Benchmark results for `airflow --help` command with different Auth Manager and Executor combinations.",
"",
f"Total combinations tested: {len(results)}",
"",
"## Results Table",
"",
"| Auth Manager | Executor | Avg Time (s) | Min Time (s) | Status |",
"|--------------|----------|--------------|--------------|--------|",
]
for result in results:
auth_display = format_class_name(result["auth_manager"])
executor_display = format_class_name(result["executor"])
avg_time = f"{result['avg_time']:.3f}" if result["success"] else "N/A"
min_time = f"{result['min_time']:.3f}" if result["success"] else "N/A"
status = "✅" if result["success"] else "❌"
lines.append(f"| {auth_display} | {executor_display} | {avg_time} | {min_time} | {status} |")
lines.extend(
[
"",
"## Summary Statistics",
"",
]
)
successful_results = [r for r in results if r["success"]]
if successful_results:
avg_times = [r["avg_time"] for r in successful_results]
lines.extend(
[
f"- **Successful combinations**: {len(successful_results)}/{len(results)}",
f"- **Overall average time**: {sum(avg_times) / len(avg_times):.3f}s",
f"- **Fastest time**: {min(avg_times):.3f}s",
f"- **Slowest time**: {max(avg_times):.3f}s",
]
)
else:
lines.append("- No successful combinations")
lines.extend(
[
"",
"---",
"",
"*Note: Each combination was run 3 times and averaged.*",
]
)
return "\n".join(lines)
def main():
"""Main function to run the benchmark."""
print("=" * 80)
print("Airflow CLI Latency Benchmark")
print("=" * 80)
print()
print("Discovering available Auth Managers and Executors...")
try:
auth_managers = get_available_auth_managers()
executors = get_available_executors()
except Exception as e:
print(f"Error discovering providers: {e}", file=sys.stderr)
return 1
print(f"Found {len(auth_managers)} Auth Managers")
print(f"Found {len(executors)} Executors")
print()
# Add None to test default configuration
auth_managers_to_test = [None] + auth_managers
executors_to_test = [None] + executors
total_combinations = len(auth_managers_to_test) * len(executors_to_test)
print(f"Testing {total_combinations} combinations...")
print()
results = []
count = 0
for auth_manager in auth_managers_to_test:
for executor in executors_to_test:
count += 1
auth_display = format_class_name(auth_manager)
executor_display = format_class_name(executor)
print(
f"[{count}/{total_combinations}] Testing: {auth_display} + {executor_display}...",
end=" ",
flush=True,
)
avg_time, min_time, success = measure_cli_latency(auth_manager, executor)
results.append(
{
"auth_manager": auth_manager,
"executor": executor,
"avg_time": avg_time,
"min_time": min_time,
"success": success,
}
)
if success:
print(f"✅ {avg_time:.3f}s (avg) / {min_time:.3f}s (min)")
else:
print("❌ Failed")
print()
print("=" * 80)
print("Generating report...")
print("=" * 80)
print()
report = generate_markdown_report(results)
print(report)
# Optionally save to file
output_file = Path("cli_latency_benchmark.md")
output_file.write_text(report)
print()
print(f"Report saved to: {output_file.absolute()}")
return 0
if __name__ == "__main__":
sys.exit(main())
| {
"repo_id": "apache/airflow",
"file_path": "scripts/in_container/benchmark_cli_latency.py",
"license": "Apache License 2.0",
"lines": 218,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:shared/plugins_manager/src/airflow_shared/plugins_manager/plugins_manager.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Manages all plugins."""
from __future__ import annotations
import importlib
import importlib.machinery
import importlib.util
import inspect
import logging
import os
import sys
import types
from pathlib import Path
from typing import TYPE_CHECKING, Any
if TYPE_CHECKING:
if sys.version_info >= (3, 12):
from importlib import metadata
else:
import importlib_metadata as metadata
from collections.abc import Generator
from types import ModuleType
from ..listeners.listener import ListenerManager
log = logging.getLogger(__name__)
class AirflowPluginSource:
"""Class used to define an AirflowPluginSource."""
def __str__(self):
raise NotImplementedError
def __html__(self):
raise NotImplementedError
class PluginsDirectorySource(AirflowPluginSource):
"""Class used to define Plugins loaded from Plugins Directory."""
def __init__(self, path, plugins_folder: str):
self.path = os.path.relpath(path, plugins_folder)
def __str__(self):
return f"$PLUGINS_FOLDER/{self.path}"
def __html__(self):
return f"<em>$PLUGINS_FOLDER/</em>{self.path}"
class EntryPointSource(AirflowPluginSource):
"""Class used to define Plugins loaded from entrypoint."""
def __init__(self, entrypoint: metadata.EntryPoint, dist: metadata.Distribution):
self.dist = dist.metadata["Name"] # type: ignore[index]
self.version = dist.version
self.entrypoint = str(entrypoint)
def __str__(self):
return f"{self.dist}=={self.version}: {self.entrypoint}"
def __html__(self):
return f"<em>{self.dist}=={self.version}:</em> {self.entrypoint}"
class AirflowPluginException(Exception):
"""Exception when loading plugin."""
class AirflowPlugin:
"""Class used to define AirflowPlugin."""
name: str | None = None
source: AirflowPluginSource | None = None
macros: list[Any] = []
admin_views: list[Any] = []
flask_blueprints: list[Any] = []
fastapi_apps: list[Any] = []
fastapi_root_middlewares: list[Any] = []
external_views: list[Any] = []
react_apps: list[Any] = []
menu_links: list[Any] = []
appbuilder_views: list[Any] = []
appbuilder_menu_items: list[Any] = []
# A list of global operator extra links that can redirect users to
# external systems. These extra links will be available on the
# task page in the form of buttons.
#
# Note: the global operator extra link can be overridden at each
# operator level.
global_operator_extra_links: list[Any] = []
# A list of operator extra links to override or add operator links
# to existing Airflow Operators.
#
# These extra links will be available on the task page in form of
# buttons.
operator_extra_links: list[Any] = []
# A list of timetable classes that can be used for Dag scheduling.
timetables: list[Any] = []
# A list of timetable classes that can be used for Dag scheduling.
partition_mappers: list[Any] = []
# A list of listeners that can be used for tracking task and Dag states.
listeners: list[ModuleType | object] = []
# A list of hook lineage reader classes that can be used for reading lineage information from a hook.
hook_lineage_readers: list[Any] = []
# A list of priority weight strategy classes that can be used for calculating tasks weight priority.
priority_weight_strategies: list[Any] = []
@classmethod
def validate(cls):
"""Validate if plugin has a name."""
if not cls.name:
raise AirflowPluginException("Your plugin needs a name.")
@classmethod
def on_load(cls, *args, **kwargs):
"""
Execute when the plugin is loaded; This method is only called once during runtime.
:param args: If future arguments are passed in on call.
:param kwargs: If future arguments are passed in on call.
"""
def is_valid_plugin(plugin_obj) -> bool:
"""
Check whether a potential object is a subclass of the AirflowPlugin class.
:param plugin_obj: potential subclass of AirflowPlugin
:return: Whether or not the obj is a valid subclass of
AirflowPlugin
"""
if not inspect.isclass(plugin_obj):
return False
# Temporarily here, we use a name base checking instead of issubclass() because the shared library
# is accessed via different symlink paths in core (airflow._shared) and task sdk (airflow.sdk._shared).
# Python treats these as different modules, so the AirflowPlugin class has different identities in each context.
# Providers will typically inherit from SDK AirflowPlugin, so using issubclass() would fail when core tries
# to validate those plugins and provider plugins won't work in airflow core.
# For now, by validating by class name, we allow plugins defined with either
# core's or SDK's AirflowPlugin to be loaded.
is_airflow_plugin = any(
base.__name__ == "AirflowPlugin" and "plugins_manager" in base.__module__
for base in plugin_obj.__mro__
)
if is_airflow_plugin and plugin_obj.__name__ != "AirflowPlugin":
plugin_obj.validate()
return True
return False
def _load_entrypoint_plugins() -> tuple[list[AirflowPlugin], dict[str, str]]:
"""
Load and register plugins AirflowPlugin subclasses from the entrypoints.
The entry_point group should be 'airflow.plugins'.
"""
from ..module_loading import entry_points_with_dist
log.debug("Loading plugins from entrypoints")
plugins: list[AirflowPlugin] = []
import_errors: dict[str, str] = {}
for entry_point, dist in entry_points_with_dist("airflow.plugins"):
log.debug("Importing entry_point plugin %s", entry_point.name)
try:
plugin_class = entry_point.load()
if not is_valid_plugin(plugin_class):
continue
plugin_instance: AirflowPlugin = plugin_class()
plugin_instance.source = EntryPointSource(entry_point, dist)
plugins.append(plugin_instance)
except Exception as e:
log.exception("Failed to import plugin %s", entry_point.name)
import_errors[entry_point.module] = str(e)
return plugins, import_errors
def _load_plugins_from_plugin_directory(
plugins_folder: str,
load_examples: bool = False,
example_plugins_module: str | None = None,
ignore_file_syntax: str = "glob",
) -> tuple[list[AirflowPlugin], dict[str, str]]:
"""Load and register Airflow Plugins from plugins directory."""
from ..module_loading import find_path_from_directory
if not plugins_folder:
raise ValueError("Plugins folder is not set")
log.debug("Loading plugins from directory: %s", plugins_folder)
files = find_path_from_directory(plugins_folder, ".airflowignore", ignore_file_syntax)
plugin_search_locations: list[tuple[str, Generator[str, None, None]]] = [("", files)]
if load_examples:
if not example_plugins_module:
raise ValueError("example_plugins_module is required when load_examples is True")
log.debug("Note: Loading plugins from examples as well: %s", plugins_folder)
example_plugins = importlib.import_module(example_plugins_module)
example_plugins_folder = next(iter(example_plugins.__path__))
example_files = find_path_from_directory(example_plugins_folder, ".airflowignore")
plugin_search_locations.append((example_plugins.__name__, example_files))
plugins: list[AirflowPlugin] = []
import_errors: dict[str, str] = {}
for module_prefix, plugin_files in plugin_search_locations:
for file_path in plugin_files:
path = Path(file_path)
if not path.is_file() or path.suffix != ".py":
continue
mod_name = f"{module_prefix}.{path.stem}" if module_prefix else path.stem
try:
loader = importlib.machinery.SourceFileLoader(mod_name, file_path)
spec = importlib.util.spec_from_loader(mod_name, loader)
if not spec:
log.error("Could not load spec for module %s at %s", mod_name, file_path)
continue
mod = importlib.util.module_from_spec(spec)
sys.modules[spec.name] = mod
loader.exec_module(mod)
for mod_attr_value in (m for m in mod.__dict__.values() if is_valid_plugin(m)):
plugin_instance: AirflowPlugin = mod_attr_value()
plugin_instance.source = PluginsDirectorySource(file_path, plugins_folder)
plugins.append(plugin_instance)
except Exception as e:
log.exception("Failed to import plugin %s", file_path)
import_errors[file_path] = str(e)
return plugins, import_errors
def make_module(name: str, objects: list[Any]) -> ModuleType | None:
"""Create new module."""
if not objects:
return None
log.debug("Creating module %s", name)
name = name.lower()
module = types.ModuleType(name)
module._name = name.split(".")[-1] # type: ignore
module._objects = objects # type: ignore
module.__dict__.update((o.__name__, o) for o in objects)
return module
def integrate_macros_plugins(
target_macros_module: ModuleType, macros_module_name_prefix: str, plugins: list[AirflowPlugin]
) -> None:
"""
Register macros from plugins onto the target macros module.
For each plugin with macros, creates a submodule and attaches it to
the target module so macros can be accessed in templates as
``{{ macros.plugin_name.macro_func() }}``.
"""
log.debug("Integrate Macros plugins")
for plugin in plugins:
if plugin.name is None:
raise AirflowPluginException("Invalid plugin name")
macros_module_instance = make_module(f"{macros_module_name_prefix}.{plugin.name}", plugin.macros)
if macros_module_instance:
sys.modules[macros_module_instance.__name__] = macros_module_instance
# Register the newly created module on the provided macros module
# so it can be accessed when rendering templates.
setattr(target_macros_module, plugin.name, macros_module_instance)
def integrate_listener_plugins(listener_manager: ListenerManager, plugins: list[AirflowPlugin]) -> None:
"""
Register listeners from plugins with the listener manager.
For each plugin with listeners, registers them with the provided
ListenerManager.
"""
for plugin in plugins:
if plugin.name is None:
raise AirflowPluginException("Invalid plugin name")
for listener in plugin.listeners:
listener_manager.add_listener(listener)
| {
"repo_id": "apache/airflow",
"file_path": "shared/plugins_manager/src/airflow_shared/plugins_manager/plugins_manager.py",
"license": "Apache License 2.0",
"lines": 246,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:shared/plugins_manager/tests/plugins_manager/test_plugins_manager.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import contextlib
import logging
import sys
from unittest import mock
import pytest
from airflow_shared.plugins_manager import (
EntryPointSource,
PluginsDirectorySource,
_load_entrypoint_plugins,
)
@pytest.fixture
def mock_metadata_distribution(mocker):
@contextlib.contextmanager
def wrapper(*args, **kwargs):
if sys.version_info < (3, 12):
patch_fq = "importlib_metadata.distributions"
else:
patch_fq = "importlib.metadata.distributions"
with mock.patch(patch_fq, *args, **kwargs) as m:
yield m
return wrapper
class TestPluginsDirectorySource:
def test_should_return_correct_path_name(self, tmp_path):
plugins_folder = str(tmp_path)
test_file = tmp_path / "test_plugins_manager.py"
test_file.write_text("# test file")
source = PluginsDirectorySource(str(test_file), plugins_folder)
assert source.path == "test_plugins_manager.py"
assert str(source) == "$PLUGINS_FOLDER/test_plugins_manager.py"
assert source.__html__() == "<em>$PLUGINS_FOLDER/</em>test_plugins_manager.py"
class TestEntryPointSource:
def test_should_return_correct_source_details(self, mock_metadata_distribution):
mock_entrypoint = mock.Mock()
mock_entrypoint.name = "test-entrypoint-plugin"
mock_entrypoint.module = "module_name_plugin"
mock_entrypoint.group = "airflow.plugins"
mock_dist = mock.Mock()
mock_dist.metadata = {"Name": "test-entrypoint-plugin"}
mock_dist.version = "1.0.0"
mock_dist.entry_points = [mock_entrypoint]
with mock_metadata_distribution(return_value=[mock_dist]):
_load_entrypoint_plugins()
source = EntryPointSource(mock_entrypoint, mock_dist)
assert str(mock_entrypoint) == source.entrypoint
assert "test-entrypoint-plugin==1.0.0: " + str(mock_entrypoint) == str(source)
assert "<em>test-entrypoint-plugin==1.0.0:</em> " + str(mock_entrypoint) == source.__html__()
class TestPluginsManager:
def test_entrypoint_plugin_errors_dont_raise_exceptions(self, mock_metadata_distribution, caplog):
"""
Test that Airflow does not raise an error if there is any Exception because of a plugin.
"""
mock_dist = mock.Mock()
mock_dist.metadata = {"Name": "test-dist"}
mock_entrypoint = mock.Mock()
mock_entrypoint.name = "test-entrypoint"
mock_entrypoint.group = "airflow.plugins"
mock_entrypoint.module = "test.plugins.test_plugins_manager"
mock_entrypoint.load.side_effect = ImportError("my_fake_module not found")
mock_dist.entry_points = [mock_entrypoint]
with (
mock_metadata_distribution(return_value=[mock_dist]),
caplog.at_level(logging.ERROR, logger="airflow_shared.plugins_manager.plugins_manager"),
):
_, import_errors = _load_entrypoint_plugins()
received_logs = caplog.text
# Assert Traceback is shown too
assert "Traceback (most recent call last):" in received_logs
assert "my_fake_module not found" in received_logs
assert "Failed to import plugin test-entrypoint" in received_logs
assert (
"test.plugins.test_plugins_manager",
"my_fake_module not found",
) in import_errors.items()
| {
"repo_id": "apache/airflow",
"file_path": "shared/plugins_manager/tests/plugins_manager/test_plugins_manager.py",
"license": "Apache License 2.0",
"lines": 91,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:task-sdk/src/airflow/sdk/plugins_manager.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""SDK wrapper for plugins manager."""
from __future__ import annotations
import logging
from functools import cache
from typing import TYPE_CHECKING
from airflow import settings
from airflow.sdk._shared.module_loading import import_string
from airflow.sdk._shared.observability.metrics.stats import Stats
from airflow.sdk._shared.plugins_manager import (
AirflowPlugin,
_load_entrypoint_plugins,
_load_plugins_from_plugin_directory,
integrate_listener_plugins as _integrate_listener_plugins,
integrate_macros_plugins as _integrate_macros_plugins,
is_valid_plugin,
)
from airflow.sdk.configuration import conf
from airflow.sdk.providers_manager_runtime import ProvidersManagerTaskRuntime
if TYPE_CHECKING:
from airflow.sdk._shared.listeners.listener import ListenerManager
from airflow.sdk.lineage import HookLineageReader
log = logging.getLogger(__name__)
def _load_providers_plugins() -> tuple[list[AirflowPlugin], dict[str, str]]:
"""Load plugins from providers."""
log.debug("Loading plugins from providers")
providers_manager = ProvidersManagerTaskRuntime()
providers_manager.initialize_providers_plugins()
plugins: list[AirflowPlugin] = []
import_errors: dict[str, str] = {}
for plugin in providers_manager.plugins:
log.debug("Importing plugin %s from class %s", plugin.name, plugin.plugin_class)
try:
plugin_instance = import_string(plugin.plugin_class)
if is_valid_plugin(plugin_instance):
plugins.append(plugin_instance)
else:
log.warning("Plugin %s is not a valid plugin", plugin.name)
except ImportError:
log.exception("Failed to load plugin %s from class name %s", plugin.name, plugin.plugin_class)
return plugins, import_errors
@cache
def _get_plugins() -> tuple[list[AirflowPlugin], dict[str, str]]:
"""
Load plugins from plugins directory and entrypoints.
Plugins are only loaded if they have not been previously loaded.
"""
if not settings.PLUGINS_FOLDER:
raise ValueError("Plugins folder is not set")
log.debug("Loading plugins")
plugins: list[AirflowPlugin] = []
import_errors: dict[str, str] = {}
loaded_plugins: set[str | None] = set()
def __register_plugins(plugin_instances: list[AirflowPlugin], errors: dict[str, str]) -> None:
for plugin_instance in plugin_instances:
if plugin_instance.name in loaded_plugins:
return
loaded_plugins.add(plugin_instance.name)
try:
plugin_instance.on_load()
plugins.append(plugin_instance)
except Exception as e:
log.exception("Failed to load plugin %s", plugin_instance.name)
name = str(plugin_instance.source) if plugin_instance.source else plugin_instance.name or ""
import_errors[name] = str(e)
import_errors.update(errors)
with Stats.timer() as timer:
load_examples = conf.getboolean("core", "LOAD_EXAMPLES")
__register_plugins(
*_load_plugins_from_plugin_directory(
plugins_folder=settings.PLUGINS_FOLDER,
load_examples=load_examples,
example_plugins_module="airflow.example_dags.plugins" if load_examples else None,
)
)
__register_plugins(*_load_entrypoint_plugins())
if not settings.LAZY_LOAD_PROVIDERS:
__register_plugins(*_load_providers_plugins())
log.debug("Loading %d plugin(s) took %.2f ms", len(plugins), timer.duration)
return plugins, import_errors
@cache
def integrate_macros_plugins() -> None:
"""Integrates macro plugins."""
from airflow.sdk.execution_time import macros
plugins, _ = _get_plugins()
_integrate_macros_plugins(
target_macros_module=macros,
macros_module_name_prefix="airflow.sdk.execution_time.macros",
plugins=plugins,
)
def integrate_listener_plugins(listener_manager: ListenerManager) -> None:
"""Add listeners from plugins."""
plugins, _ = _get_plugins()
_integrate_listener_plugins(listener_manager, plugins=plugins)
@cache
def get_hook_lineage_readers_plugins() -> list[type[HookLineageReader]]:
"""Collect and get hook lineage reader classes registered by plugins."""
log.debug("Initialize hook lineage readers plugins")
result: list[type[HookLineageReader]] = []
for plugin in _get_plugins()[0]:
result.extend(plugin.hook_lineage_readers)
return result
| {
"repo_id": "apache/airflow",
"file_path": "task-sdk/src/airflow/sdk/plugins_manager.py",
"license": "Apache License 2.0",
"lines": 118,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:shared/module_loading/src/airflow_shared/module_loading/file_discovery.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""File discovery utilities for finding files while respecting ignore patterns."""
from __future__ import annotations
import logging
import os
import re
from collections.abc import Generator
from pathlib import Path
from re import Pattern
from typing import NamedTuple, Protocol
from pathspec.patterns import GitWildMatchPattern
log = logging.getLogger(__name__)
class _IgnoreRule(Protocol):
"""Interface for ignore rules for structural subtyping."""
@staticmethod
def compile(pattern: str, base_dir: Path, definition_file: Path) -> _IgnoreRule | None:
"""
Build an ignore rule from the supplied pattern.
``base_dir`` and ``definition_file`` should be absolute paths.
"""
@staticmethod
def match(path: Path, rules: list[_IgnoreRule]) -> bool:
"""Match a candidate absolute path against a list of rules."""
class _RegexpIgnoreRule(NamedTuple):
"""Typed namedtuple with utility functions for regexp ignore rules."""
pattern: Pattern
base_dir: Path
@staticmethod
def compile(pattern: str, base_dir: Path, definition_file: Path) -> _IgnoreRule | None:
"""Build an ignore rule from the supplied regexp pattern and log a useful warning if it is invalid."""
try:
return _RegexpIgnoreRule(re.compile(pattern), base_dir)
except re.error as e:
log.warning("Ignoring invalid regex '%s' from %s: %s", pattern, definition_file, e)
return None
@staticmethod
def match(path: Path, rules: list[_IgnoreRule]) -> bool:
"""Match a list of ignore rules against the supplied path."""
for rule in rules:
if not isinstance(rule, _RegexpIgnoreRule):
raise ValueError(f"_RegexpIgnoreRule cannot match rules of type: {type(rule)}")
if rule.pattern.search(str(path.relative_to(rule.base_dir))) is not None:
return True
return False
class _GlobIgnoreRule(NamedTuple):
"""Typed namedtuple with utility functions for glob ignore rules."""
wild_match_pattern: GitWildMatchPattern
relative_to: Path | None = None
@staticmethod
def compile(pattern: str, base_dir: Path, definition_file: Path) -> _IgnoreRule | None:
"""Build an ignore rule from the supplied glob pattern and log a useful warning if it is invalid."""
relative_to: Path | None = None
if pattern.strip() == "/":
# "/" doesn't match anything in gitignore
log.warning("Ignoring no-op glob pattern '/' from %s", definition_file)
return None
if pattern.startswith("/") or "/" in pattern.rstrip("/"):
# See https://git-scm.com/docs/gitignore
# > If there is a separator at the beginning or middle (or both) of the pattern, then the
# > pattern is relative to the directory level of the particular .gitignore file itself.
# > Otherwise the pattern may also match at any level below the .gitignore level.
relative_to = definition_file.parent
ignore_pattern = GitWildMatchPattern(pattern)
return _GlobIgnoreRule(wild_match_pattern=ignore_pattern, relative_to=relative_to)
@staticmethod
def match(path: Path, rules: list[_IgnoreRule]) -> bool:
"""Match a list of ignore rules against the supplied path, accounting for exclusion rules and ordering."""
matched = False
for rule in rules:
if not isinstance(rule, _GlobIgnoreRule):
raise ValueError(f"_GlobIgnoreRule cannot match rules of type: {type(rule)}")
rel_obj = path.relative_to(rule.relative_to) if rule.relative_to else Path(path.name)
if path.is_dir():
rel_path = f"{rel_obj.as_posix()}/"
else:
rel_path = rel_obj.as_posix()
if (
rule.wild_match_pattern.include is not None
and rule.wild_match_pattern.match_file(rel_path) is not None
):
matched = rule.wild_match_pattern.include
return matched
def _find_path_from_directory(
base_dir_path: str | os.PathLike[str],
ignore_file_name: str,
ignore_rule_type: type[_IgnoreRule],
) -> Generator[str, None, None]:
"""
Recursively search the base path and return the list of file paths that should not be ignored.
:param base_dir_path: the base path to be searched
:param ignore_file_name: the file name containing regular expressions for files that should be ignored.
:param ignore_rule_type: the concrete class for ignore rules, which implements the _IgnoreRule interface.
:return: a generator of file paths which should not be ignored.
"""
# A Dict of patterns, keyed using resolved, absolute paths
patterns_by_dir: dict[Path, list[_IgnoreRule]] = {}
for root, dirs, files in os.walk(base_dir_path, followlinks=True):
patterns: list[_IgnoreRule] = patterns_by_dir.get(Path(root).resolve(), [])
ignore_file_path = Path(root) / ignore_file_name
if ignore_file_path.is_file():
with open(ignore_file_path) as ifile:
patterns_to_match_excluding_comments = [
re.sub(r"\s*#.*", "", line) for line in ifile.read().split("\n")
]
# append new patterns and filter out "None" objects, which are invalid patterns
patterns += [
p
for p in [
ignore_rule_type.compile(pattern, Path(base_dir_path), ignore_file_path)
for pattern in patterns_to_match_excluding_comments
if pattern
]
if p is not None
]
# evaluation order of patterns is important with negation
# so that later patterns can override earlier patterns
dirs[:] = [subdir for subdir in dirs if not ignore_rule_type.match(Path(root) / subdir, patterns)]
# explicit loop for infinite recursion detection since we are following symlinks in this walk
for sd in dirs:
dirpath = (Path(root) / sd).resolve()
if dirpath in patterns_by_dir:
raise RuntimeError(
"Detected recursive loop when walking DAG directory "
f"{base_dir_path}: {dirpath} has appeared more than once."
)
patterns_by_dir.update({dirpath: patterns.copy()})
for file in files:
if file != ignore_file_name:
abs_file_path = Path(root) / file
if not ignore_rule_type.match(abs_file_path, patterns):
yield str(abs_file_path)
def find_path_from_directory(
base_dir_path: str | os.PathLike[str],
ignore_file_name: str,
ignore_file_syntax: str = "glob",
) -> Generator[str, None, None]:
"""
Recursively search the base path for a list of file paths that should not be ignored.
:param base_dir_path: the base path to be searched
:param ignore_file_name: the file name in which specifies the patterns of files/dirs to be ignored
:param ignore_file_syntax: the syntax of patterns in the ignore file: regexp or glob (default: glob)
:return: a generator of file paths.
"""
if ignore_file_syntax == "glob" or not ignore_file_syntax:
return _find_path_from_directory(base_dir_path, ignore_file_name, _GlobIgnoreRule)
if ignore_file_syntax == "regexp":
return _find_path_from_directory(base_dir_path, ignore_file_name, _RegexpIgnoreRule)
raise ValueError(f"Unsupported ignore_file_syntax: {ignore_file_syntax}")
| {
"repo_id": "apache/airflow",
"file_path": "shared/module_loading/src/airflow_shared/module_loading/file_discovery.py",
"license": "Apache License 2.0",
"lines": 164,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:shared/module_loading/tests/module_loading/test_file_discovery.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import os
from pathlib import Path
import pytest
from airflow_shared.module_loading import find_path_from_directory
class TestFindPathFromDirectory:
@pytest.fixture
def test_dir(self, tmp_path):
# create test tree with symlinks
source = os.path.join(tmp_path, "folder")
target = os.path.join(tmp_path, "symlink")
py_file = os.path.join(source, "hello_world.py")
ignore_file = os.path.join(tmp_path, ".airflowignore")
os.mkdir(source)
os.symlink(source, target)
# write ignore files
with open(ignore_file, "w") as f:
f.write("folder")
# write sample pyfile
with open(py_file, "w") as f:
f.write("print('hello world')")
return tmp_path
def test_find_path_from_directory_respects_symlinks_regexp_ignore(self, test_dir):
ignore_list_file = ".airflowignore"
found = list(find_path_from_directory(test_dir, ignore_list_file, "regexp"))
assert os.path.join(test_dir, "symlink", "hello_world.py") in found
assert os.path.join(test_dir, "folder", "hello_world.py") not in found
def test_find_path_from_directory_respects_symlinks_glob_ignore(self, test_dir):
ignore_list_file = ".airflowignore"
found = list(find_path_from_directory(test_dir, ignore_list_file, ignore_file_syntax="glob"))
assert os.path.join(test_dir, "symlink", "hello_world.py") in found
assert os.path.join(test_dir, "folder", "hello_world.py") not in found
def test_find_path_from_directory_fails_on_recursive_link(self, test_dir):
# add a recursive link
recursing_src = os.path.join(test_dir, "folder2", "recursor")
recursing_tgt = os.path.join(test_dir, "folder2")
os.mkdir(recursing_tgt)
os.symlink(recursing_tgt, recursing_src)
ignore_list_file = ".airflowignore"
error_message = (
f"Detected recursive loop when walking DAG directory {test_dir}: "
f"{Path(recursing_tgt).resolve()} has appeared more than once."
)
with pytest.raises(RuntimeError, match=error_message):
list(find_path_from_directory(test_dir, ignore_list_file, ignore_file_syntax="glob"))
def test_airflowignore_negation_unignore_subfolder_file_glob(self, tmp_path):
"""Ensure negation rules can unignore a subfolder and a file inside it when using glob syntax.
Patterns:
* -> ignore everything
!subfolder/ -> unignore the subfolder (must match directory rule)
!subfolder/keep.py -> unignore a specific file inside the subfolder
"""
dags_root = tmp_path / "dags"
(dags_root / "subfolder").mkdir(parents=True)
# files
(dags_root / "drop.py").write_text("raise Exception('ignored')\n")
(dags_root / "subfolder" / "keep.py").write_text("# should be discovered\n")
(dags_root / "subfolder" / "drop.py").write_text("raise Exception('ignored')\n")
(dags_root / ".airflowignore").write_text(
"\n".join(
[
"*",
"!subfolder/",
"!subfolder/keep.py",
]
)
)
detected = set()
for raw in find_path_from_directory(dags_root, ".airflowignore", "glob"):
p = Path(raw)
if p.is_file() and p.suffix == ".py":
detected.add(p.relative_to(dags_root).as_posix())
assert detected == {"subfolder/keep.py"}
def test_airflowignore_negation_nested_with_globstar(self, tmp_path):
"""Negation with ** should work for nested subfolders."""
dags_root = tmp_path / "dags"
nested = dags_root / "a" / "b" / "subfolder"
nested.mkdir(parents=True)
# files
(dags_root / "ignore_top.py").write_text("raise Exception('ignored')\n")
(nested / "keep.py").write_text("# should be discovered\n")
(nested / "drop.py").write_text("raise Exception('ignored')\n")
(dags_root / ".airflowignore").write_text(
"\n".join(
[
"*",
"!a/",
"!a/b/",
"!**/subfolder/",
"!**/subfolder/keep.py",
"drop.py",
]
)
)
detected = set()
for raw in find_path_from_directory(dags_root, ".airflowignore", "glob"):
p = Path(raw)
if p.is_file() and p.suffix == ".py":
detected.add(p.relative_to(dags_root).as_posix())
assert detected == {"a/b/subfolder/keep.py"}
| {
"repo_id": "apache/airflow",
"file_path": "shared/module_loading/tests/module_loading/test_file_discovery.py",
"license": "Apache License 2.0",
"lines": 117,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/discord/src/airflow/providers/discord/notifications/embed.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Discord embed structure.
See:
https://discord.com/developers/docs/resources/message#embed-object-embed-structure
"""
from __future__ import annotations
from typing import Literal, TypedDict
from typing_extensions import NotRequired, Required
EmbedType = Literal["rich"]
class EmbedFooter(TypedDict):
"""
EmbedFooter.
:param text: Footer text.
:param icon_url: Url of footer icon (only supports http(s) and attachments).
:param proxy_icon_url: A proxy url of footer icon.
See:
https://discord.com/developers/docs/resources/message#embed-object-embed-footer-structure
"""
text: str
icon_url: NotRequired[str]
proxy_icon_url: NotRequired[str]
class EmbedField(TypedDict):
"""
EmbedField.
:param name: Name of the field.
:param value: Value of the field.
:param inline: Whether or not this field should display inline.
See:
https://discord.com/developers/docs/resources/message#embed-object-embed-field-structure
"""
name: str
value: str
inline: NotRequired[bool]
class EmbedProvider(TypedDict, total=False):
"""
EmbedProvider.
:param name: Name of provider
:param url: Url of provider
See:
https://discord.com/developers/docs/resources/message#embed-object-embed-provider-structure
"""
name: str
url: str
class EmbedAuthor(TypedDict, total=False):
"""
EmbedAuthor.
:param name: Name of author.
:param url: Url of author (only supports http(s)).
:param icon_url: Url of author icon (only supports http(s) and attachments).
:param proxy_icon_url: A proxy url of author icon.
See:
https://discord.com/developers/docs/resources/message#embed-object-embed-author-structure
"""
name: Required[str]
url: str
icon_url: str
proxy_icon_url: str
class Embed(TypedDict, total=False):
"""
Embed.
:param title: The text that is placed above the description.
Embed titles are limited to 256 characters.
:param description: The part of the embed where most of the text is contained.
Embed descriptions are limited to 2048 characters.
:param type: Type of embed (always "rich" for webhook embeds).
:param url: Url of embed.
:param timestamp: Timestamp (ISO8601) of embed content.
:param color: Color code of the embed.
:param footer: Footer information. Footer text is limited to 2048 characters
:param provider: Add provider information.
:param author: Adds the author block to the embed, always located at the
top of the embed.
:param fields: Add fields information, max of 25 fields.
See:
https://discord.com/developers/docs/resources/message#embed-object-embed-author-structure
"""
title: str
description: str
type: EmbedType
url: str
timestamp: str
color: int
footer: EmbedFooter
provider: EmbedProvider
author: EmbedAuthor
fields: list[EmbedField]
| {
"repo_id": "apache/airflow",
"file_path": "providers/discord/src/airflow/providers/discord/notifications/embed.py",
"license": "Apache License 2.0",
"lines": 103,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | documentation |
apache/airflow:providers/discord/tests/unit/discord/notifications/test_embed.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from airflow.providers.discord.notifications.embed import (
Embed,
EmbedAuthor,
EmbedField,
EmbedFooter,
EmbedProvider,
)
def test_embed_footer():
footer = EmbedFooter(
text="Test footer",
icon_url="https://example.com/icon.png",
proxy_icon_url="https://example.com/icon.png",
)
assert footer["text"] == "Test footer"
assert footer["icon_url"] == "https://example.com/icon.png"
assert footer["proxy_icon_url"] == "https://example.com/icon.png"
def test_embed_field():
field = EmbedField(name="Test Field", value="Test Value")
assert field["name"] == "Test Field"
assert field["value"] == "Test Value"
def test_embed_provider():
provider = EmbedProvider(name="Test Provider", url="https://example.com")
assert provider["name"] == "Test Provider"
assert provider["url"] == "https://example.com"
def test_embed_author():
author = EmbedAuthor(
name="Test Author",
url="https://example.com",
icon_url="https://example.com/icon.png",
proxy_icon_url="https://example.com/icon.png",
)
assert author["name"] == "Test Author"
assert author["url"] == "https://example.com"
assert author["icon_url"] == "https://example.com/icon.png"
assert author["proxy_icon_url"] == "https://example.com/icon.png"
def test_embed():
embed = Embed(title="Test Title", description="Test Description")
assert embed["title"] == "Test Title"
assert embed["description"] == "Test Description"
| {
"repo_id": "apache/airflow",
"file_path": "providers/discord/tests/unit/discord/notifications/test_embed.py",
"license": "Apache License 2.0",
"lines": 57,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:devel-common/src/tests_common/test_utils/taskinstance.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import copy
from typing import TYPE_CHECKING
from airflow.models.taskinstance import TaskInstance
from airflow.utils.session import NEW_SESSION
from tests_common.test_utils.compat import SerializedBaseOperator, SerializedMappedOperator
from tests_common.test_utils.dag import create_scheduler_dag
from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS, AIRFLOW_V_3_2_PLUS
try:
from airflow.serialization.serialized_objects import create_scheduler_operator
except ImportError:
create_scheduler_operator = lambda t: t
if TYPE_CHECKING:
from uuid import UUID
from jinja2 import Environment
from sqlalchemy.orm import Session
from airflow.sdk import Context
from airflow.sdk.types import Operator as SdkOperator, RuntimeTaskInstanceProtocol
from airflow.serialization.definitions.mappedoperator import Operator as SerializedOperator
__all__ = ["TaskInstanceWrapper", "create_task_instance", "render_template_fields", "run_task_instance"]
class TaskInstanceWrapper:
"""Compat wrapper for TaskInstance to support ``run()``."""
def __init__(self, ti: TaskInstance, task: SdkOperator) -> None:
self.__dict__.update(__ti=ti, __task=task)
def __delattr__(self, name):
delattr(self.__dict__["__ti"], name)
def __setattr__(self, name, value):
setattr(self.__dict__["__ti"], name, value)
def __getattr__(self, name):
return getattr(self.__dict__["__ti"], name)
def __copy__(self):
return TaskInstanceWrapper(copy.copy(self.__dict__["__ti"]), copy.copy(self.__dict__["__task"]))
def run(self, **kwargs) -> RuntimeTaskInstanceProtocol:
return run_task_instance(self.__dict__["__ti"], self.__dict__["__task"], **kwargs)
def render_templates(self, **kwargs) -> SdkOperator:
return render_template_fields(self.__dict__["__ti"], self.__dict__["__task"], **kwargs)
def get_template_context(self) -> Context:
return get_template_context(self.__dict__["__ti"], self.__dict__["__task"])
def create_task_instance(
task: SdkOperator | SerializedOperator,
*,
dag_version_id: UUID,
run_id: str | None = None,
state: str | None = None,
map_index: int = -1,
ti_type: type[TaskInstance] = TaskInstance,
) -> TaskInstance:
if isinstance(task, (SerializedBaseOperator, SerializedMappedOperator)):
serialized_task = task
elif sdk_dag := task.get_dag():
serialized_task = create_scheduler_dag(sdk_dag).get_task(task.task_id)
else:
serialized_task = create_scheduler_operator(task)
if AIRFLOW_V_3_0_PLUS:
return ti_type(
serialized_task,
dag_version_id=dag_version_id,
run_id=run_id,
state=state,
map_index=map_index,
)
return ti_type( # type: ignore[call-arg]
serialized_task,
run_id=run_id,
state=state,
map_index=map_index,
)
def run_task_instance(
ti: TaskInstance,
task: SdkOperator,
*,
ignore_depends_on_past: bool = False,
ignore_task_deps: bool = False,
ignore_ti_state: bool = False,
mark_success: bool = False,
session=None,
) -> RuntimeTaskInstanceProtocol:
session_kwargs = {"session": session} if session else {}
if not AIRFLOW_V_3_2_PLUS:
ti.refresh_from_task(task) # type: ignore[arg-type]
ti.run(**session_kwargs)
return ti
if not ti.check_and_change_state_before_execution(
ignore_depends_on_past=ignore_depends_on_past,
ignore_task_deps=ignore_task_deps,
ignore_ti_state=ignore_ti_state,
mark_success=mark_success,
**session_kwargs,
):
return ti
from airflow.sdk.definitions.dag import _run_task
# Session handling is a mess in tests; use a fresh ti to run the task.
new_ti = TaskInstance.get_task_instance(
dag_id=ti.dag_id,
run_id=ti.run_id,
task_id=ti.task_id,
map_index=ti.map_index,
**session_kwargs,
)
# Some tests don't even save the ti at all, in which case new_ti is None.
taskrun_result = _run_task(ti=new_ti or ti, task=task)
ti.refresh_from_db(**session_kwargs) # Some tests expect side effects.
if not taskrun_result:
raise RuntimeError("task failed to finish with a result")
if error := taskrun_result.error:
raise error
return taskrun_result.ti
def get_template_context(ti: TaskInstance, task: SdkOperator, *, session: Session = NEW_SESSION) -> Context:
if not AIRFLOW_V_3_2_PLUS:
ti.refresh_from_task(task) # type: ignore[arg-type]
return ti.get_template_context(session=session)
from airflow.cli.commands.task_command import _get_template_context
from airflow.utils.context import ConnectionAccessor, VariableAccessor
# TODO: Move these to test_utils too.
context = _get_template_context(ti, task)
context["ti"].__dict__.update(xcom_push=ti.xcom_push, xcom_pull=ti.xcom_pull) # Avoid execution API.
context.update( # type: ignore[call-arg] # https://github.com/python/mypy/issues/17750
conn=ConnectionAccessor(),
test_mode=ti.test_mode,
var={
"json": VariableAccessor(deserialize_json=True),
"value": VariableAccessor(deserialize_json=False),
},
)
return context
def render_template_fields(
ti: TaskInstance,
task: SdkOperator,
*,
context: Context | None = None,
jinja_env: Environment | None = None,
session: Session = NEW_SESSION,
) -> SdkOperator:
if AIRFLOW_V_3_2_PLUS:
task.render_template_fields(context or get_template_context(ti, task), jinja_env)
return task
ti.refresh_from_task(task) # type: ignore[arg-type]
ti.render_templates(context or ti.get_template_context(session=session), jinja_env)
return ti.task # type: ignore[return-value]
| {
"repo_id": "apache/airflow",
"file_path": "devel-common/src/tests_common/test_utils/taskinstance.py",
"license": "Apache License 2.0",
"lines": 156,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:airflow-core/src/airflow/api_fastapi/execution_api/versions/v2026_03_31.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import Any
from cadwyn import ResponseInfo, VersionChange, convert_response_to_previous_version_for, schema
from airflow.api_fastapi.common.types import UtcDateTime
from airflow.api_fastapi.execution_api.datamodels.taskinstance import (
DagRun,
TIDeferredStatePayload,
TIRunContext,
)
class ModifyDeferredTaskKwargsToJsonValue(VersionChange):
"""Change the types of `trigger_kwargs` and `next_kwargs` in TIDeferredStatePayload to JsonValue."""
description = __doc__
instructions_to_migrate_to_previous_version = (
schema(TIDeferredStatePayload).field("trigger_kwargs").had(type=dict[str, Any] | str),
schema(TIDeferredStatePayload).field("next_kwargs").had(type=dict[str, Any]),
)
class RemoveUpstreamMapIndexesField(VersionChange):
"""Remove upstream_map_indexes field from TIRunContext - now computed by Task SDK."""
description = __doc__
instructions_to_migrate_to_previous_version = (
schema(TIRunContext)
.field("upstream_map_indexes")
.existed_as(type=dict[str, int | list[int] | None] | None),
)
@convert_response_to_previous_version_for(TIRunContext) # type: ignore[arg-type]
def add_upstream_map_indexes_field(response: ResponseInfo) -> None: # type: ignore[misc]
"""Add upstream_map_indexes field with None for older API versions."""
response.body["upstream_map_indexes"] = None
class AddNoteField(VersionChange):
"""Add note parameter to DagRun Model."""
description = __doc__
instructions_to_migrate_to_previous_version = (schema(DagRun).field("note").didnt_exist,)
@convert_response_to_previous_version_for(TIRunContext) # type: ignore[arg-type]
def remove_note_field(response: ResponseInfo) -> None: # type: ignore[misc]
"""Remove note field for older API versions."""
if "dag_run" in response.body and isinstance(response.body["dag_run"], dict):
response.body["dag_run"].pop("note", None)
class MakeDagRunStartDateNullable(VersionChange):
"""Make DagRun.start_date field nullable for runs that haven't started yet."""
description = __doc__
instructions_to_migrate_to_previous_version = (schema(DagRun).field("start_date").had(type=UtcDateTime),)
@convert_response_to_previous_version_for(TIRunContext) # type: ignore[arg-type]
def ensure_start_date_in_ti_run_context(response: ResponseInfo) -> None: # type: ignore[misc]
"""
Ensure start_date is never None in DagRun for previous API versions.
Older Task SDK clients expect start_date to be non-nullable. When the
DagRun hasn't started yet (e.g. queued), fall back to run_after.
"""
dag_run = response.body.get("dag_run")
if isinstance(dag_run, dict) and dag_run.get("start_date") is None:
dag_run["start_date"] = dag_run.get("run_after")
@convert_response_to_previous_version_for(DagRun) # type: ignore[arg-type]
def ensure_start_date_in_dag_run(response: ResponseInfo) -> None: # type: ignore[misc]
"""Ensure start_date is never None in direct DagRun responses for previous API versions."""
if response.body.get("start_date") is None:
response.body["start_date"] = response.body.get("run_after")
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/api_fastapi/execution_api/versions/v2026_03_31.py",
"license": "Apache License 2.0",
"lines": 72,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:task-sdk/src/airflow/sdk/serde/serializers/uuid.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING
from airflow.sdk.module_loading import qualname
if TYPE_CHECKING:
import uuid
from airflow.sdk.serde import U
__version__ = 1
serializers = ["uuid.UUID"]
deserializers = serializers
def serialize(o: object) -> tuple[U, str, int, bool]:
"""Serialize a UUID object to a string representation."""
import uuid
if isinstance(o, uuid.UUID):
return str(o), qualname(o), __version__, True
return "", "", 0, False
def deserialize(cls: type, version: int, data: str) -> uuid.UUID:
"""Deserialize a string back to a UUID object."""
import uuid
if cls is uuid.UUID and isinstance(data, str):
return uuid.UUID(data)
raise TypeError(f"cannot deserialize {qualname(cls)} from {type(data)}")
| {
"repo_id": "apache/airflow",
"file_path": "task-sdk/src/airflow/sdk/serde/serializers/uuid.py",
"license": "Apache License 2.0",
"lines": 37,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:airflow-core/src/airflow/serialization/definitions/xcom_arg.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from collections.abc import Iterator, Sequence
from functools import singledispatch
from typing import TYPE_CHECKING, Any
import attrs
from sqlalchemy import func, or_, select
from sqlalchemy.orm import Session
from airflow.models.referencemixin import ReferenceMixin
from airflow.models.xcom import XCOM_RETURN_KEY
from airflow.serialization.definitions.notset import NOTSET, is_arg_set
from airflow.utils.db import exists_query
from airflow.utils.state import State
__all__ = ["SchedulerXComArg", "deserialize_xcom_arg", "get_task_map_length"]
if TYPE_CHECKING:
from airflow.serialization.definitions.dag import SerializedDAG
from airflow.serialization.definitions.mappedoperator import Operator
from airflow.typing_compat import Self
class SchedulerXComArg:
"""
Reference to an XCom value pushed from another operator.
This is the safe counterpart to :class:`airflow.sdk.XComArg`.
"""
@classmethod
def _deserialize(cls, data: dict[str, Any], dag: SerializedDAG) -> Self:
"""
Deserialize an XComArg.
The implementation should be the inverse function to ``serialize``,
implementing given a data dict converted from this XComArg derivative,
how the original XComArg should be created. DAG serialization relies on
additional information added in ``serialize_xcom_arg`` to dispatch data
dicts to the correct ``_deserialize`` information, so this function does
not need to validate whether the incoming data contains correct keys.
"""
raise NotImplementedError("This class should not be instantiated directly")
@classmethod
def iter_xcom_references(cls, arg: Any) -> Iterator[tuple[Operator, str]]:
"""
Return XCom references in an arbitrary value.
Recursively traverse ``arg`` and look for XComArg instances in any
collection objects, and instances with ``template_fields`` set.
"""
from airflow.serialization.definitions.baseoperator import SerializedBaseOperator
from airflow.serialization.definitions.mappedoperator import SerializedMappedOperator
if isinstance(arg, ReferenceMixin):
yield from arg.iter_references()
elif isinstance(arg, (tuple, set, list)):
for elem in arg:
yield from cls.iter_xcom_references(elem)
elif isinstance(arg, dict):
for elem in arg.values():
yield from cls.iter_xcom_references(elem)
elif isinstance(arg, (SerializedMappedOperator, SerializedBaseOperator)):
for attr in arg.template_fields:
yield from cls.iter_xcom_references(getattr(arg, attr))
def iter_references(self) -> Iterator[tuple[Operator, str]]:
raise NotImplementedError("This class should not be instantiated directly")
@attrs.define
class SchedulerPlainXComArg(SchedulerXComArg):
operator: Operator
key: str
@classmethod
def _deserialize(cls, data: dict[str, Any], dag: SerializedDAG) -> Self:
return cls(dag.get_task(data["task_id"]), data["key"])
def iter_references(self) -> Iterator[tuple[Operator, str]]:
yield self.operator, self.key
@attrs.define
class SchedulerMapXComArg(SchedulerXComArg):
arg: SchedulerXComArg
callables: Sequence[str]
@classmethod
def _deserialize(cls, data: dict[str, Any], dag: SerializedDAG) -> Self:
# We are deliberately NOT deserializing the callables. These are shown
# in the UI, and displaying a function object is useless.
return cls(deserialize_xcom_arg(data["arg"], dag), data["callables"])
def iter_references(self) -> Iterator[tuple[Operator, str]]:
yield from self.arg.iter_references()
@attrs.define
class SchedulerConcatXComArg(SchedulerXComArg):
args: Sequence[SchedulerXComArg]
@classmethod
def _deserialize(cls, data: dict[str, Any], dag: SerializedDAG) -> Self:
return cls([deserialize_xcom_arg(arg, dag) for arg in data["args"]])
def iter_references(self) -> Iterator[tuple[Operator, str]]:
for arg in self.args:
yield from arg.iter_references()
@attrs.define
class SchedulerZipXComArg(SchedulerXComArg):
args: Sequence[SchedulerXComArg]
fillvalue: Any
@classmethod
def _deserialize(cls, data: dict[str, Any], dag: SerializedDAG) -> Self:
return cls(
[deserialize_xcom_arg(arg, dag) for arg in data["args"]],
fillvalue=data.get("fillvalue", NOTSET),
)
def iter_references(self) -> Iterator[tuple[Operator, str]]:
for arg in self.args:
yield from arg.iter_references()
@singledispatch
def get_task_map_length(xcom_arg: SchedulerXComArg, run_id: str, *, session: Session) -> int | None:
# The base implementation -- specific XComArg subclasses have specialised implementations
raise NotImplementedError(f"get_task_map_length not implemented for {type(xcom_arg)}")
@get_task_map_length.register
def _(xcom_arg: SchedulerPlainXComArg, run_id: str, *, session: Session) -> int | None:
from airflow.models.taskinstance import TaskInstance
from airflow.models.taskmap import TaskMap
from airflow.models.xcom import XComModel
from airflow.serialization.definitions.mappedoperator import is_mapped
dag_id = xcom_arg.operator.dag_id
task_id = xcom_arg.operator.task_id
if is_mapped(xcom_arg.operator):
unfinished_ti_exists = exists_query(
TaskInstance.dag_id == dag_id,
TaskInstance.run_id == run_id,
TaskInstance.task_id == task_id,
# Special NULL treatment is needed because 'state' can be NULL.
# The "IN" part would produce "NULL NOT IN ..." and eventually
# "NULl = NULL", which is a big no-no in SQL.
or_(
TaskInstance.state.is_(None),
TaskInstance.state.in_(s.value for s in State.unfinished if s is not None),
),
session=session,
)
if unfinished_ti_exists:
return None # Not all of the expanded tis are done yet.
query = select(func.count(XComModel.map_index)).where(
XComModel.dag_id == dag_id,
XComModel.run_id == run_id,
XComModel.task_id == task_id,
XComModel.map_index >= 0,
XComModel.key == XCOM_RETURN_KEY,
)
else:
query = select(TaskMap.length).where(
TaskMap.dag_id == dag_id,
TaskMap.run_id == run_id,
TaskMap.task_id == task_id,
TaskMap.map_index < 0,
)
return session.scalar(query)
@get_task_map_length.register
def _(xcom_arg: SchedulerMapXComArg, run_id: str, *, session: Session) -> int | None:
return get_task_map_length(xcom_arg.arg, run_id, session=session)
@get_task_map_length.register
def _(xcom_arg: SchedulerZipXComArg, run_id: str, *, session: Session) -> int | None:
all_lengths = (get_task_map_length(arg, run_id, session=session) for arg in xcom_arg.args)
ready_lengths = [length for length in all_lengths if length is not None]
if len(ready_lengths) != len(xcom_arg.args):
return None # If any of the referenced XComs is not ready, we are not ready either.
if is_arg_set(xcom_arg.fillvalue):
return max(ready_lengths)
return min(ready_lengths)
@get_task_map_length.register
def _(xcom_arg: SchedulerConcatXComArg, run_id: str, *, session: Session) -> int | None:
all_lengths = (get_task_map_length(arg, run_id, session=session) for arg in xcom_arg.args)
ready_lengths = [length for length in all_lengths if length is not None]
if len(ready_lengths) != len(xcom_arg.args):
return None # If any of the referenced XComs is not ready, we are not ready either.
return sum(ready_lengths)
def deserialize_xcom_arg(data: dict[str, Any], dag: SerializedDAG):
"""DAG serialization interface."""
klass = _XCOM_ARG_TYPES[data.get("type", "")]
return klass._deserialize(data, dag)
_XCOM_ARG_TYPES: dict[str, type[SchedulerXComArg]] = {
"": SchedulerPlainXComArg,
"concat": SchedulerConcatXComArg,
"map": SchedulerMapXComArg,
"zip": SchedulerZipXComArg,
}
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/serialization/definitions/xcom_arg.py",
"license": "Apache License 2.0",
"lines": 186,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:airflow-core/src/airflow/serialization/definitions/operatorlink.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import json
from typing import TYPE_CHECKING
import attrs
from airflow.models.xcom import XComModel
from airflow.utils.log.logging_mixin import LoggingMixin
from airflow.utils.session import create_session
if TYPE_CHECKING:
from airflow.models.taskinstancekey import TaskInstanceKey
from airflow.serialization.definitions.mappedoperator import Operator
@attrs.define()
class XComOperatorLink(LoggingMixin):
"""
Generic operator link class that can retrieve link only using XCOMs.
Used while deserializing operators.
"""
name: str
xcom_key: str
def get_link(self, operator: Operator, *, ti_key: TaskInstanceKey) -> str:
"""
Retrieve the link from the XComs.
:param operator: The Airflow operator object this link is associated to.
:param ti_key: TaskInstance ID to return link for.
:return: link to external system, but by pulling it from XComs
"""
self.log.info(
"Attempting to retrieve link from XComs with key: %s for task id: %s", self.xcom_key, ti_key
)
with create_session() as session:
result = session.execute(
XComModel.get_many(
key=self.xcom_key,
run_id=ti_key.run_id,
dag_ids=ti_key.dag_id,
task_ids=ti_key.task_id,
map_indexes=ti_key.map_index,
).with_only_columns(XComModel.value)
).first()
if not result:
self.log.debug(
"No link with name: %s present in XCom as key: %s, returning empty link",
self.name,
self.xcom_key,
)
return ""
from airflow.serialization.stringify import (
StringifyNotSupportedError,
stringify as stringify_xcom,
)
try:
parsed_value = json.loads(result.value)
except (ValueError, TypeError):
# Handling for cases when types do not need to be deserialized (e.g. when value is a simple string link)
parsed_value = result.value
try:
return str(stringify_xcom(parsed_value))
except StringifyNotSupportedError:
# If stringify doesn't support the type, return the raw value as a string.
# This avoids the XComModel.deserialize_value() call that could
# instantiate arbitrary classes from untrusted XCom data.
return str(parsed_value)
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/serialization/definitions/operatorlink.py",
"license": "Apache License 2.0",
"lines": 78,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:airflow-core/src/airflow/serialization/definitions/baseoperator.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import datetime
import functools
from typing import TYPE_CHECKING, Any
import methodtools
from airflow.serialization.definitions.node import DAGNode
from airflow.serialization.definitions.param import SerializedParamsDict
from airflow.serialization.enums import DagAttributeTypes
from airflow.task.priority_strategy import PriorityWeightStrategy, validate_and_load_priority_weight_strategy
from airflow.ti_deps.deps.mapped_task_upstream_dep import MappedTaskUpstreamDep
from airflow.ti_deps.deps.not_in_retry_period_dep import NotInRetryPeriodDep
from airflow.ti_deps.deps.not_previously_skipped_dep import NotPreviouslySkippedDep
from airflow.ti_deps.deps.prev_dagrun_dep import PrevDagrunDep
from airflow.ti_deps.deps.trigger_rule_dep import TriggerRuleDep
if TYPE_CHECKING:
from collections.abc import Collection, Iterable, Iterator, Sequence
from airflow.models.taskinstance import TaskInstance
from airflow.sdk import Context
from airflow.serialization.definitions.dag import SerializedDAG
from airflow.serialization.definitions.mappedoperator import SerializedMappedOperator
from airflow.serialization.definitions.operatorlink import XComOperatorLink
from airflow.serialization.definitions.taskgroup import SerializedMappedTaskGroup, SerializedTaskGroup
from airflow.task.trigger_rule import TriggerRule
from airflow.ti_deps.deps.base_ti_dep import BaseTIDep
from airflow.triggers.base import StartTriggerArgs
DEFAULT_OPERATOR_DEPS: frozenset[BaseTIDep] = frozenset(
(
NotInRetryPeriodDep(),
PrevDagrunDep(),
TriggerRuleDep(),
NotPreviouslySkippedDep(),
MappedTaskUpstreamDep(),
)
)
class SerializedBaseOperator(DAGNode):
"""
Serialized representation of a BaseOperator instance.
See :mod:`~airflow.serialization.serialized_objects.OperatorSerialization`
for more details on operator serialization.
"""
_can_skip_downstream: bool
_is_empty: bool
_needs_expansion: bool
_task_display_name: str | None = None
_weight_rule: str | PriorityWeightStrategy = "downstream"
allow_nested_operators: bool = True
dag: SerializedDAG | None = None
depends_on_past: bool = False
do_xcom_push: bool = True
doc: str | None = None
doc_md: str | None = None
doc_json: str | None = None
doc_yaml: str | None = None
doc_rst: str | None = None
downstream_task_ids: set[str] = set()
email: str | Sequence[str] | None = None
# These two are deprecated.
email_on_retry: bool = True
email_on_failure: bool = True
execution_timeout: datetime.timedelta | None = None
executor: str | None = None
executor_config: dict = {}
ignore_first_depends_on_past: bool = False
inlets: Sequence = []
is_setup: bool = False
is_teardown: bool = False
map_index_template: str | None = None
max_active_tis_per_dag: int | None = None
max_active_tis_per_dagrun: int | None = None
max_retry_delay: datetime.timedelta | float | None = None
multiple_outputs: bool = False
# Boolean flags for callback existence
has_on_execute_callback: bool = False
has_on_failure_callback: bool = False
has_on_retry_callback: bool = False
has_on_success_callback: bool = False
has_on_skipped_callback: bool = False
operator_extra_links: Collection[XComOperatorLink] = []
on_failure_fail_dagrun: bool = False
outlets: Sequence = []
owner: str = "airflow"
params: SerializedParamsDict = SerializedParamsDict()
pool: str = "default_pool"
pool_slots: int = 1
priority_weight: int = 1
queue: str = "default"
render_template_as_native_obj: bool | None = None
resources: dict[str, Any] | None = None
retries: int = 0
retry_delay: datetime.timedelta = datetime.timedelta(seconds=300)
retry_exponential_backoff: float = 0
run_as_user: str | None = None
task_group: SerializedTaskGroup | None = None
start_date: datetime.datetime | None = None
end_date: datetime.datetime | None = None
start_from_trigger: bool = False
start_trigger_args: StartTriggerArgs | None = None
task_type: str = "BaseOperator"
template_ext: Sequence[str] = []
template_fields: Collection[str] = []
template_fields_renderers: dict[str, str] = {}
trigger_rule: str | TriggerRule = "all_success"
# TODO: Remove the following, they aren't used anymore
ui_color: str = "#fff"
ui_fgcolor: str = "#000"
wait_for_downstream: bool = False
wait_for_past_depends_before_skipping: bool = False
is_mapped = False
def __init__(self, *, task_id: str, _airflow_from_mapped: bool = False) -> None:
super().__init__()
self._BaseOperator__from_mapped = _airflow_from_mapped
self.task_id = task_id
self.deps = DEFAULT_OPERATOR_DEPS
self._operator_name: str | None = None
# Disable hashing.
__hash__ = None # type: ignore[assignment]
def __eq__(self, other) -> bool:
return NotImplemented
def __repr__(self) -> str:
return f"<SerializedTask({self.task_type}): {self.task_id}>"
@classmethod
def get_serialized_fields(cls):
"""Fields to deserialize from the serialized JSON object."""
return frozenset(
(
"_logger_name",
"_needs_expansion",
"_task_display_name",
"allow_nested_operators",
"depends_on_past",
"do_xcom_push",
"doc",
"doc_json",
"doc_md",
"doc_rst",
"doc_yaml",
"downstream_task_ids",
"email",
"email_on_failure",
"email_on_retry",
"end_date",
"execution_timeout",
"executor",
"executor_config",
"ignore_first_depends_on_past",
"inlets",
"is_setup",
"is_teardown",
"map_index_template",
"max_active_tis_per_dag",
"max_active_tis_per_dagrun",
"max_retry_delay",
"multiple_outputs",
"has_on_execute_callback",
"has_on_failure_callback",
"has_on_retry_callback",
"has_on_skipped_callback",
"has_on_success_callback",
"on_failure_fail_dagrun",
"outlets",
"owner",
"params",
"pool",
"pool_slots",
"priority_weight",
"queue",
"render_template_as_native_obj",
"resources",
"retries",
"retry_delay",
"retry_exponential_backoff",
"run_as_user",
"start_date",
"start_from_trigger",
"start_trigger_args",
"task_id",
"task_type",
"template_ext",
"template_fields",
"template_fields_renderers",
"trigger_rule",
"ui_color",
"ui_fgcolor",
"wait_for_downstream",
"wait_for_past_depends_before_skipping",
"weight_rule",
)
)
@property
def node_id(self) -> str:
return self.task_id
def get_dag(self) -> SerializedDAG | None:
return self.dag
@property
def roots(self) -> Sequence[DAGNode]:
"""Required by DAGNode."""
return [self]
@property
def leaves(self) -> Sequence[DAGNode]:
"""Required by DAGNode."""
return [self]
@functools.cached_property
def operator_extra_link_dict(self) -> dict[str, XComOperatorLink]:
"""All extra links for the operator."""
return {link.name: link for link in self.operator_extra_links}
@functools.cached_property
def global_operator_extra_link_dict(self) -> dict[str, Any]:
"""All global extra links."""
from airflow import plugins_manager
return {link.name: link for link in plugins_manager.get_global_operator_extra_links()}
@functools.cached_property
def extra_links(self) -> list[str]:
return sorted(set(self.operator_extra_link_dict).union(self.global_operator_extra_link_dict))
def get_extra_links(self, ti: TaskInstance, name: str) -> str | None:
"""
For an operator, gets the URLs that the ``extra_links`` entry points to.
:meta private:
:raise ValueError: The error message of a ValueError will be passed on through to
the fronted to show up as a tooltip on the disabled link.
:param ti: The TaskInstance for the URL being searched for.
:param name: The name of the link we're looking for the URL for. Should be
one of the options specified in ``extra_links``.
"""
link = self.operator_extra_link_dict.get(name) or self.global_operator_extra_link_dict.get(name)
if not link:
return None
return link.get_link(self, ti_key=ti.key)
@property
def inherits_from_empty_operator(self) -> bool:
return self._is_empty
@property
def inherits_from_skipmixin(self) -> bool:
return self._can_skip_downstream
@property
def operator_name(self) -> str:
# Overwrites operator_name of BaseOperator to use _operator_name instead of
# __class__.operator_name.
return self._operator_name or self.task_type
@operator_name.setter
def operator_name(self, operator_name: str):
self._operator_name = operator_name
@property
def task_display_name(self) -> str:
return self._task_display_name or self.task_id
def expand_start_trigger_args(self, *, context: Context) -> StartTriggerArgs | None:
return self.start_trigger_args
@property
def weight_rule(self) -> PriorityWeightStrategy:
if isinstance(self._weight_rule, PriorityWeightStrategy):
return self._weight_rule
return validate_and_load_priority_weight_strategy(self._weight_rule)
def __getattr__(self, name):
# Handle missing attributes with task_type instead of SerializedBaseOperator
# Don't intercept special methods that Python internals might check
if name.startswith("__") and name.endswith("__"):
# For special methods, raise the original error
raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{name}'")
# For regular attributes, use task_type in the error message
raise AttributeError(f"'{self.task_type}' object has no attribute '{name}'")
def serialize_for_task_group(self) -> tuple[DagAttributeTypes, Any]:
return DagAttributeTypes.OP, self.task_id
def expand_start_from_trigger(self, *, context: Context) -> bool:
"""
Get the start_from_trigger value of the current abstract operator.
Since a BaseOperator is not mapped to begin with, this simply returns
the original value of start_from_trigger.
:meta private:
"""
return self.start_from_trigger
def _iter_all_mapped_downstreams(self) -> Iterator[SerializedMappedOperator | SerializedMappedTaskGroup]:
"""
Return mapped nodes that are direct dependencies of the current task.
For now, this walks the entire DAG to find mapped nodes that has this
current task as an upstream. We cannot use ``downstream_list`` since it
only contains operators, not task groups. In the future, we should
provide a way to record an DAG node's all downstream nodes instead.
Note that this does not guarantee the returned tasks actually use the
current task for task mapping, but only checks those task are mapped
operators, and are downstreams of the current task.
To get a list of tasks that uses the current task for task mapping, use
:meth:`iter_mapped_dependants` instead.
"""
from airflow.serialization.definitions.mappedoperator import SerializedMappedOperator
from airflow.serialization.definitions.taskgroup import SerializedMappedTaskGroup, SerializedTaskGroup
def _walk_group(group: SerializedTaskGroup) -> Iterable[tuple[str, DAGNode]]:
"""
Recursively walk children in a task group.
This yields all direct children (including both tasks and task
groups), and all children of any task groups.
"""
for key, child in group.children.items():
yield key, child
if isinstance(child, SerializedTaskGroup):
yield from _walk_group(child)
if not (dag := self.dag):
raise RuntimeError("Cannot check for mapped dependants when not attached to a DAG")
for key, child in _walk_group(dag.task_group):
if key == self.node_id:
continue
if not isinstance(child, SerializedMappedOperator | SerializedMappedTaskGroup):
continue
if self.node_id in child.upstream_task_ids:
yield child
def iter_mapped_dependants(self) -> Iterator[SerializedMappedOperator | SerializedMappedTaskGroup]:
"""
Return mapped nodes that depend on the current task the expansion.
For now, this walks the entire DAG to find mapped nodes that has this
current task as an upstream. We cannot use ``downstream_list`` since it
only contains operators, not task groups. In the future, we should
provide a way to record an DAG node's all downstream nodes instead.
"""
return (
downstream
for downstream in self._iter_all_mapped_downstreams()
if any(p.node_id == self.node_id for p in downstream.iter_mapped_dependencies())
)
# TODO (GH-52141): Copied from sdk. Find a better place for this to live in.
def iter_mapped_task_groups(self) -> Iterator[SerializedMappedTaskGroup]:
"""
Return mapped task groups this task belongs to.
Groups are returned from the innermost to the outmost.
:meta private:
"""
if (group := self.task_group) is None:
return
yield from group.iter_mapped_task_groups()
# TODO (GH-52141): Copied from sdk. Find a better place for this to live in.
def get_closest_mapped_task_group(self) -> SerializedMappedTaskGroup | None:
"""
Get the mapped task group "closest" to this task in the DAG.
:meta private:
"""
return next(self.iter_mapped_task_groups(), None)
# TODO (GH-52141): Copied from sdk. Find a better place for this to live in.
def get_needs_expansion(self) -> bool:
"""
Return true if the task is MappedOperator or is in a mapped task group.
:meta private:
"""
return self._needs_expansion
# TODO (GH-52141): Copied from sdk. Find a better place for this to live in.
@methodtools.lru_cache(maxsize=1)
def get_parse_time_mapped_ti_count(self) -> int:
"""
Return the number of mapped task instances that can be created on DAG run creation.
This only considers literal mapped arguments, and would return *None*
when any non-literal values are used for mapping.
:raise NotFullyPopulated: If non-literal mapped arguments are encountered.
:raise NotMapped: If the operator is neither mapped, nor has any parent
mapped task groups.
:return: Total number of mapped TIs this task should have.
"""
from airflow.exceptions import NotMapped
group = self.get_closest_mapped_task_group()
if group is None:
raise NotMapped()
return group.get_parse_time_mapped_ti_count()
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/serialization/definitions/baseoperator.py",
"license": "Apache License 2.0",
"lines": 374,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:airflow-core/src/airflow/serialization/definitions/node.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import abc
from typing import TYPE_CHECKING
from airflow._shared.dagnode.node import GenericDAGNode
if TYPE_CHECKING:
from collections.abc import Sequence
from airflow.serialization.definitions.dag import SerializedDAG # noqa: F401
from airflow.serialization.definitions.mappedoperator import Operator # noqa: F401
from airflow.serialization.definitions.taskgroup import SerializedTaskGroup # noqa: F401
__all__ = ["DAGNode"]
class DAGNode(GenericDAGNode["SerializedDAG", "Operator", "SerializedTaskGroup"], metaclass=abc.ABCMeta): # type: ignore[type-var]
"""
Base class for a node in the graph of a workflow.
A node may be an operator or task group, either mapped or unmapped.
Note: type: ignore is used because SerializedBaseOperator and SerializedTaskGroup
don't have explicit type annotations for all attributes required by TaskProtocol
and TaskGroupProtocol (they inherit them from GenericDAGNode). This is acceptable
because they are implemented correctly at runtime.
"""
@property
@abc.abstractmethod
def roots(self) -> Sequence[DAGNode]:
raise NotImplementedError()
@property
@abc.abstractmethod
def leaves(self) -> Sequence[DAGNode]:
raise NotImplementedError()
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/serialization/definitions/node.py",
"license": "Apache License 2.0",
"lines": 43,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:shared/dagnode/src/airflow_shared/dagnode/node.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING, Any, Generic, Protocol, TypeVar
import structlog
if TYPE_CHECKING:
import sys
from collections.abc import Collection, Iterable
# Replicate `airflow.typing_compat.Self` to avoid illegal imports
if sys.version_info >= (3, 11):
from typing import Self
else:
from typing_extensions import Self
from ..logging.types import Logger
class DagProtocol(Protocol):
"""Protocol defining the minimum interface required for Dag generic type."""
dag_id: str
task_dict: dict[str, Any]
def get_task(self, tid: str) -> Any:
"""Retrieve a task by its task ID."""
...
class TaskProtocol(Protocol):
"""Protocol defining the minimum interface required for Task generic type."""
task_id: str
is_setup: bool
is_teardown: bool
downstream_list: Iterable[Self]
downstream_task_ids: set[str]
class TaskGroupProtocol(Protocol):
"""Protocol defining the minimum interface required for TaskGroup generic type."""
node_id: str
prefix_group_id: bool
Dag = TypeVar("Dag", bound=DagProtocol)
Task = TypeVar("Task", bound=TaskProtocol)
TaskGroup = TypeVar("TaskGroup", bound=TaskGroupProtocol)
class GenericDAGNode(Generic[Dag, Task, TaskGroup]):
"""
Generic class for a node in the graph of a workflow.
A node may be an operator or task group, either mapped or unmapped.
"""
dag: Dag | None
task_group: TaskGroup | None
downstream_group_ids: set[str | None]
upstream_task_ids: set[str]
downstream_task_ids: set[str]
_log_config_logger_name: str | None = None
_logger_name: str | None = None
_cached_logger: Logger | None = None
def __init__(self):
super().__init__()
self.upstream_task_ids = set()
self.downstream_task_ids = set()
@property
def log(self) -> Logger:
if self._cached_logger is not None:
return self._cached_logger
typ = type(self)
logger_name: str = (
self._logger_name if self._logger_name is not None else f"{typ.__module__}.{typ.__qualname__}"
)
if self._log_config_logger_name:
logger_name = (
f"{self._log_config_logger_name}.{logger_name}"
if logger_name
else self._log_config_logger_name
)
self._cached_logger = structlog.get_logger(logger_name)
return self._cached_logger
@property
def dag_id(self) -> str:
if self.dag:
return self.dag.dag_id
return "_in_memory_dag_"
@property
def node_id(self) -> str:
raise NotImplementedError()
@property
def label(self) -> str | None:
tg = self.task_group
if tg and tg.node_id and tg.prefix_group_id:
# "task_group_id.task_id" -> "task_id"
return self.node_id[len(tg.node_id) + 1 :]
return self.node_id
@property
def upstream_list(self) -> Iterable[Task]:
if not self.dag:
raise RuntimeError(f"Operator {self} has not been assigned to a Dag yet")
return [self.dag.get_task(tid) for tid in self.upstream_task_ids]
@property
def downstream_list(self) -> Iterable[Task]:
if not self.dag:
raise RuntimeError(f"Operator {self} has not been assigned to a Dag yet")
return [self.dag.get_task(tid) for tid in self.downstream_task_ids]
def has_dag(self) -> bool:
return self.dag is not None
def get_dag(self) -> Dag | None:
return self.dag
def get_direct_relative_ids(self, upstream: bool = False) -> set[str]:
"""Get set of the direct relative ids to the current task, upstream or downstream."""
if upstream:
return self.upstream_task_ids
return self.downstream_task_ids
def get_direct_relatives(self, upstream: bool = False) -> Iterable[Task]:
"""Get list of the direct relatives to the current task, upstream or downstream."""
if upstream:
return self.upstream_list
return self.downstream_list
def get_flat_relative_ids(self, *, upstream: bool = False, depth: int | None = None) -> set[str]:
"""
Get a flat set of relative IDs, upstream or downstream.
Will recurse each relative found in the direction specified.
:param upstream: Whether to look for upstream or downstream relatives.
:param depth: Maximum number of levels to traverse. If None, traverses all levels.
Must be non-negative.
"""
if depth is not None and depth < 0:
raise ValueError(f"depth must be non-negative, got {depth}")
dag = self.get_dag()
if not dag:
return set()
relatives: set[str] = set()
# This is intentionally implemented as a loop, instead of calling
# get_direct_relative_ids() recursively, since Python has significant
# limitation on stack level, and a recursive implementation can blow up
# if a DAG contains very long routes.
task_ids_to_trace = self.get_direct_relative_ids(upstream)
levels_remaining = depth
while task_ids_to_trace:
# if depth is set we have bounded traversal and should break when
# there are no more levels remaining
if levels_remaining is not None and levels_remaining <= 0:
break
task_ids_to_trace_next: set[str] = set()
for task_id in task_ids_to_trace:
if task_id in relatives:
continue
task_ids_to_trace_next.update(dag.task_dict[task_id].get_direct_relative_ids(upstream))
relatives.add(task_id)
task_ids_to_trace = task_ids_to_trace_next
if levels_remaining is not None:
levels_remaining -= 1
return relatives
def get_flat_relatives(self, upstream: bool = False, depth: int | None = None) -> Collection[Task]:
"""
Get a flat list of relatives, either upstream or downstream.
:param upstream: Whether to look for upstream or downstream relatives.
:param depth: Maximum number of levels to traverse. If None, traverses all levels.
Must be non-negative.
"""
dag = self.get_dag()
if not dag:
return set()
return [
dag.task_dict[task_id] for task_id in self.get_flat_relative_ids(upstream=upstream, depth=depth)
]
def get_upstreams_follow_setups(self, depth: int | None = None) -> Iterable[Task]:
"""
All upstreams and, for each upstream setup, its respective teardowns.
:param depth: Maximum number of levels to traverse. If None, traverses all levels.
Must be non-negative.
"""
for task in self.get_flat_relatives(upstream=True, depth=depth):
yield task
if task.is_setup:
for t in task.downstream_list:
if t.is_teardown and t != self:
yield t
def get_upstreams_only_setups_and_teardowns(self) -> Iterable[Task]:
"""
Only *relevant* upstream setups and their teardowns.
This method is meant to be used when we are clearing the task (non-upstream) and we need
to add in the *relevant* setups and their teardowns.
Relevant in this case means, the setup has a teardown that is downstream of ``self``,
or the setup has no teardowns.
"""
downstream_teardown_ids = {
x.task_id for x in self.get_flat_relatives(upstream=False) if x.is_teardown
}
for task in self.get_flat_relatives(upstream=True):
if not task.is_setup:
continue
has_no_teardowns = not any(x.is_teardown for x in task.downstream_list)
# if task has no teardowns or has teardowns downstream of self
if has_no_teardowns or task.downstream_task_ids.intersection(downstream_teardown_ids):
yield task
for t in task.downstream_list:
if t.is_teardown and t != self:
yield t
def get_upstreams_only_setups(self) -> Iterable[Task]:
"""
Return relevant upstream setups.
This method is meant to be used when we are checking task dependencies where we need
to wait for all the upstream setups to complete before we can run the task.
"""
for task in self.get_upstreams_only_setups_and_teardowns():
if task.is_setup:
yield task
| {
"repo_id": "apache/airflow",
"file_path": "shared/dagnode/src/airflow_shared/dagnode/node.py",
"license": "Apache License 2.0",
"lines": 211,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:shared/dagnode/tests/dagnode/test_node.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from unittest import mock
import attrs
import pytest
from airflow_shared.dagnode.node import GenericDAGNode
class Task:
"""Task type for tests."""
@attrs.define
class TaskGroup:
"""Task group type for tests."""
node_id: str = attrs.field(init=False, default="test_group_id")
prefix_group_id: str
class Dag:
"""Dag type for tests."""
dag_id = "test_dag_id"
class ConcreteDAGNode(GenericDAGNode[Dag, Task, TaskGroup]):
"""Concrete DAGNode variant for tests."""
dag = None
task_group = None
@property
def node_id(self) -> str:
return "test_group_id.test_node_id"
class TestDAGNode:
@pytest.fixture
def node(self):
return ConcreteDAGNode()
def test_log(self, node: ConcreteDAGNode) -> None:
assert node._cached_logger is None
with mock.patch("structlog.get_logger") as mock_get_logger:
log = node.log
assert log is node._cached_logger
assert mock_get_logger.mock_calls == [mock.call("tests.dagnode.test_node.ConcreteDAGNode")]
def test_dag_id(self, node: ConcreteDAGNode) -> None:
assert node.dag is None
assert node.dag_id == "_in_memory_dag_"
node.dag = Dag()
assert node.dag_id == "test_dag_id"
@pytest.mark.parametrize(
("prefix_group_id", "expected_label"),
[(True, "test_node_id"), (False, "test_group_id.test_node_id")],
)
def test_label(self, node: ConcreteDAGNode, prefix_group_id: bool, expected_label: str) -> None:
assert node.task_group is None
assert node.label == "test_group_id.test_node_id"
node.task_group = TaskGroup(prefix_group_id)
assert node.label == expected_label
| {
"repo_id": "apache/airflow",
"file_path": "shared/dagnode/tests/dagnode/test_node.py",
"license": "Apache License 2.0",
"lines": 62,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-core/src/airflow/serialization/definitions/dag.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import copy
import functools
import itertools
import operator
import re
import weakref
from typing import TYPE_CHECKING, TypedDict, cast, overload
import attrs
import structlog
from sqlalchemy import func, or_, select, tuple_
from airflow._shared.timezones.timezone import coerce_datetime
from airflow.configuration import conf as airflow_conf
from airflow.exceptions import AirflowException, TaskNotFound
from airflow.models.dag import DagModel
from airflow.models.dag_version import DagVersion
from airflow.models.dagrun import DagRun
from airflow.models.deadline import Deadline
from airflow.models.deadline_alert import DeadlineAlert as DeadlineAlertModel
from airflow.models.taskinstancekey import TaskInstanceKey
from airflow.models.tasklog import LogTemplate
from airflow.sdk._shared.observability.metrics.stats import Stats
from airflow.serialization.decoders import decode_deadline_alert
from airflow.serialization.definitions.deadline import DeadlineAlertFields, SerializedReferenceModels
from airflow.serialization.definitions.param import SerializedParamsDict
from airflow.serialization.enums import DagAttributeTypes as DAT, Encoding
from airflow.timetables.base import DagRunInfo, DataInterval, TimeRestriction
from airflow.utils.session import NEW_SESSION, provide_session
from airflow.utils.state import DagRunState, TaskInstanceState
from airflow.utils.types import DagRunType
if TYPE_CHECKING:
import datetime
from collections.abc import Collection, Iterable, Sequence
from typing import Any, Literal
from pendulum.tz.timezone import FixedTimezone, Timezone
from pydantic import NonNegativeInt
from sqlalchemy.orm import Session
from typing_extensions import TypeIs
from airflow.models.taskinstance import TaskInstance
from airflow.sdk import DAG
from airflow.serialization.definitions.taskgroup import SerializedTaskGroup
from airflow.serialization.serialized_objects import LazyDeserializedDAG, SerializedOperator
from airflow.timetables.base import Timetable
from airflow.utils.types import DagRunTriggeredByType
log = structlog.get_logger(__name__)
# TODO (GH-52141): Share definition with SDK?
class EdgeInfoType(TypedDict):
"""
Extra metadata that the Dag can store about an edge.
This is duplicated from SDK.
"""
label: str | None
@attrs.define(eq=False, hash=False, slots=False)
class SerializedDAG:
"""
Serialized representation of a ``DAG`` instance.
A stringified DAG can only be used in the scope of scheduler and webserver.
Fields that are not serializable, such as functions and customer defined
classes, are casted to strings.
"""
dag_id: str
dag_display_name: str = attrs.field(default=attrs.Factory(operator.attrgetter("dag_id"), takes_self=True))
# Default values of fields below should match schema default.
access_control: dict[str, dict[str, Collection[str]]] | None = None
catchup: bool = False
dagrun_timeout: datetime.timedelta | None = None
deadline: list[str] | None = None
default_args: dict[str, Any] = attrs.field(factory=dict)
allowed_run_types: list[str] | None = None
description: str | None = None
disable_bundle_versioning: bool = False
doc_md: str | None = None
edge_info: dict[str, dict[str, EdgeInfoType]] = attrs.field(factory=dict)
end_date: datetime.datetime | None = None
fail_fast: bool = False
has_on_failure_callback: bool = False
has_on_success_callback: bool = False
is_paused_upon_creation: bool | None = None
max_active_runs: int = 16
max_active_tasks: int = 16
max_consecutive_failed_dag_runs: int = 0
owner_links: dict[str, str] = attrs.field(factory=dict)
params: SerializedParamsDict = attrs.field(factory=SerializedParamsDict)
partial: bool = False
render_template_as_native_obj: bool = False
start_date: datetime.datetime | None = None
tags: set[str] = attrs.field(factory=set)
template_searchpath: tuple[str, ...] | None = None
# These are set dynamically during deserialization.
task_dict: dict[str, SerializedOperator] = attrs.field(init=False)
task_group: SerializedTaskGroup = attrs.field(init=False)
timetable: Timetable = attrs.field(init=False)
timezone: FixedTimezone | Timezone = attrs.field(init=False)
# Only on serialized dag.
last_loaded: datetime.datetime = attrs.field(init=False)
# Determine the relative fileloc based only on the serialize dag.
_processor_dags_folder: str = attrs.field(init=False)
def __init__(self, *, dag_id: str) -> None:
self.__attrs_init__(dag_id=dag_id, dag_display_name=dag_id) # type: ignore[attr-defined]
def __repr__(self) -> str:
return f"<SerializedDAG: {self.dag_id}>"
@classmethod
def get_serialized_fields(cls) -> frozenset[str]:
return frozenset(
{
"access_control",
"catchup",
"dag_display_name",
"dag_id",
"dagrun_timeout",
"deadline",
"default_args",
"allowed_run_types",
"description",
"disable_bundle_versioning",
"doc_md",
"edge_info",
"end_date",
"fail_fast",
"fileloc",
"is_paused_upon_creation",
"max_active_runs",
"max_active_tasks",
"max_consecutive_failed_dag_runs",
"owner_links",
"relative_fileloc",
"render_template_as_native_obj",
"start_date",
"tags",
"task_group",
"timetable",
"timezone",
}
)
@classmethod
@provide_session
def bulk_write_to_db(
cls,
bundle_name: str,
bundle_version: str | None,
dags: Collection[DAG | LazyDeserializedDAG],
parse_duration: float | None = None,
session: Session = NEW_SESSION,
) -> None:
"""
Ensure the DagModel rows for the given dags are up-to-date in the dag table in the DB.
:param dags: the DAG objects to save to the DB
:return: None
"""
if not dags:
return
from airflow.dag_processing.collection import AssetModelOperation, DagModelOperation
from airflow.serialization.serialized_objects import LazyDeserializedDAG
log.info("Bulk-writing dags to db", count=len(dags))
dag_op = DagModelOperation(
bundle_name=bundle_name,
bundle_version=bundle_version,
dags={d.dag_id: LazyDeserializedDAG.from_dag(d) for d in dags},
)
orm_dags = dag_op.add_dags(session=session)
dag_op.update_dags(orm_dags, parse_duration, session=session)
asset_op = AssetModelOperation.collect(dag_op.dags)
orm_assets = asset_op.sync_assets(session=session)
orm_asset_aliases = asset_op.sync_asset_aliases(session=session)
session.flush() # This populates id so we can create fks in later calls.
orm_dags = dag_op.find_orm_dags(session=session) # Refetch so relationship is up to date.
asset_op.add_dag_asset_references(orm_dags, orm_assets, session=session)
asset_op.add_dag_asset_alias_references(orm_dags, orm_asset_aliases, session=session)
asset_op.add_dag_asset_name_uri_references(session=session)
asset_op.add_task_asset_references(orm_dags, orm_assets, session=session)
asset_op.activate_assets_if_possible(orm_assets.values(), session=session)
session.flush() # Activation is needed when we add trigger references.
asset_op.add_asset_trigger_references(orm_assets, session=session)
dag_op.update_dag_asset_expression(orm_dags=orm_dags, orm_assets=orm_assets)
session.flush()
@property
def tasks(self) -> Sequence[SerializedOperator]:
return list(self.task_dict.values())
@property
def task_ids(self) -> list[str]:
return list(self.task_dict)
@property
def roots(self) -> list[SerializedOperator]:
return [task for task in self.tasks if not task.upstream_list]
@property
def owner(self) -> str:
return ", ".join({t.owner for t in self.tasks})
def has_task(self, task_id: str) -> bool:
return task_id in self.task_dict
def get_task(self, task_id: str) -> SerializedOperator:
if task_id in self.task_dict:
return self.task_dict[task_id]
raise TaskNotFound(f"Task {task_id} not found")
@property
def task_group_dict(self):
return {k: v for k, v in self.task_group.get_task_group_dict().items() if k is not None}
def partial_subset(
self,
task_ids: str | Iterable[str],
include_downstream: bool = False,
include_upstream: bool = True,
include_direct_upstream: bool = False,
exclude_original: bool = False,
depth: int | None = None,
):
from airflow.serialization.definitions.baseoperator import SerializedBaseOperator
from airflow.serialization.definitions.mappedoperator import SerializedMappedOperator
def is_task(obj) -> TypeIs[SerializedOperator]:
return isinstance(obj, (SerializedMappedOperator, SerializedBaseOperator))
# deep-copying self.task_dict and self.task_group takes a long time, and we don't want all
# the tasks anyway, so we copy the tasks manually later
memo = {id(self.task_dict): None, id(self.task_group): None}
dag = copy.deepcopy(self, memo)
if isinstance(task_ids, str):
matched_tasks = [t for t in self.tasks if task_ids in t.task_id]
else:
matched_tasks = [t for t in self.tasks if t.task_id in task_ids]
also_include_ids: set[str] = set()
for t in matched_tasks:
if include_downstream:
for rel in t.get_flat_relatives(upstream=False, depth=depth):
also_include_ids.add(rel.task_id)
if rel not in matched_tasks: # if it's in there, we're already processing it
# need to include setups and teardowns for tasks that are in multiple
# non-collinear setup/teardown paths
if not rel.is_setup and not rel.is_teardown:
also_include_ids.update(
x.task_id for x in rel.get_upstreams_only_setups_and_teardowns()
)
if include_upstream:
also_include_ids.update(x.task_id for x in t.get_upstreams_follow_setups(depth=depth))
else:
if not t.is_setup and not t.is_teardown:
also_include_ids.update(x.task_id for x in t.get_upstreams_only_setups_and_teardowns())
if t.is_setup and not include_downstream:
also_include_ids.update(x.task_id for x in t.downstream_list if x.is_teardown)
also_include: list[SerializedOperator] = [self.task_dict[x] for x in also_include_ids]
direct_upstreams: list[SerializedOperator] = []
if include_direct_upstream:
for t in itertools.chain(matched_tasks, also_include):
direct_upstreams.extend(u for u in t.upstream_list if is_task(u))
# Make sure to not recursively deepcopy the dag or task_group while copying the task.
# task_group is reset later
def _deepcopy_task(t) -> SerializedOperator:
memo.setdefault(id(t.task_group), None)
return copy.deepcopy(t, memo)
# Compiling the unique list of tasks that made the cut
if exclude_original:
matched_tasks = []
dag.task_dict = {
t.task_id: _deepcopy_task(t)
for t in itertools.chain(matched_tasks, also_include, direct_upstreams)
}
def filter_task_group(group, parent_group):
"""Exclude tasks not included in the partial dag from the given TaskGroup."""
# We want to deepcopy _most but not all_ attributes of the task group, so we create a shallow copy
# and then manually deep copy the instances. (memo argument to deepcopy only works for instances
# of classes, not "native" properties of an instance)
copied = copy.copy(group)
memo[id(group.children)] = {}
if parent_group:
memo[id(group.parent_group)] = parent_group
for attr in type(group).__slots__:
value = getattr(group, attr)
value = copy.deepcopy(value, memo)
object.__setattr__(copied, attr, value)
proxy = weakref.proxy(copied)
for child in group.children.values():
if is_task(child):
if child.task_id in dag.task_dict:
task = copied.children[child.task_id] = dag.task_dict[child.task_id]
task.task_group = proxy
else:
filtered_child = filter_task_group(child, proxy)
# Only include this child TaskGroup if it is non-empty.
if filtered_child.children:
copied.children[child.group_id] = filtered_child
return copied
object.__setattr__(dag, "task_group", filter_task_group(self.task_group, None))
# Removing upstream/downstream references to tasks and TaskGroups that did not make
# the cut.
groups = dag.task_group.get_task_group_dict()
for g in groups.values():
g.upstream_group_ids.intersection_update(groups)
g.downstream_group_ids.intersection_update(groups)
g.upstream_task_ids.intersection_update(dag.task_dict)
g.downstream_task_ids.intersection_update(dag.task_dict)
for t in dag.tasks:
# Removing upstream/downstream references to tasks that did not
# make the cut
t.upstream_task_ids.intersection_update(dag.task_dict)
t.downstream_task_ids.intersection_update(dag.task_dict)
dag.partial = len(dag.tasks) < len(self.tasks)
return dag
@functools.cached_property
def _time_restriction(self) -> TimeRestriction:
start_dates = [t.start_date for t in self.tasks if t.start_date]
if self.start_date is not None:
start_dates.append(self.start_date)
earliest = None
if start_dates:
earliest = coerce_datetime(min(start_dates))
latest = coerce_datetime(self.end_date)
end_dates = [t.end_date for t in self.tasks if t.end_date]
if len(end_dates) == len(self.tasks): # not exists null end_date
if self.end_date is not None:
end_dates.append(self.end_date)
if end_dates:
latest = coerce_datetime(max(end_dates))
return TimeRestriction(earliest, latest, self.catchup)
def next_dagrun_info(
self,
*,
last_automated_run_info: DagRunInfo | None,
restricted: bool = True,
) -> DagRunInfo | None:
"""
Get the DagRunInfo object for the next run of this dag.
This calculates the interval or partition and when it can be scheduled, according to the
dag's timetable, start_date, end_date, etc. This doesn't check max
active run or any other "max_active_tasks" type limits, but only
performs calculations based on the various date and interval fields of
this dag and its tasks.
:param last_automated_run_info: The latest run info of
existing "automated" DagRuns for this dag (scheduled or backfill,
but not manual).
:param restricted: If set to *False* (default is *True*), ignore
``start_date``, ``end_date``, and ``catchup`` specified on the DAG
or tasks.
:return: DagRunInfo of the next dagrun, or None if a dagrun is not
going to be scheduled.
"""
if restricted:
restriction = self._time_restriction
else:
restriction = TimeRestriction(earliest=None, latest=None, catchup=True)
try:
info = self.timetable.next_dagrun_info_v2(
last_dagrun_info=last_automated_run_info,
restriction=restriction,
)
log.info(
"get next_dagrun_info_v2",
last_automated_run_info=last_automated_run_info,
next_info=info,
)
return info
except Exception:
log.exception(
"Failed to fetch run info",
last_run_info=last_automated_run_info,
dag_id=self.dag_id,
)
return None
def iter_dagrun_infos_between(
self,
earliest: datetime.datetime | None,
latest: datetime.datetime,
) -> Iterable[DagRunInfo]:
"""
Yield DagRunInfo using this DAG's timetable between given interval.
DagRunInfo instances yielded if their ``logical_date`` is not earlier
than ``earliest``, nor later than ``latest``. The instances are ordered
by their ``logical_date`` from earliest to latest.
# TODO: AIP-76 see issue https://github.com/apache/airflow/issues/60455
"""
if earliest is None:
earliest = self._time_restriction.earliest
if earliest is None:
raise ValueError("earliest was None and we had no value in time_restriction to fallback on")
earliest = coerce_datetime(earliest)
latest = coerce_datetime(latest)
restriction = TimeRestriction(earliest, latest, catchup=True)
info = None
try:
while True:
info = self.timetable.next_dagrun_info_v2(
last_dagrun_info=info,
restriction=restriction,
)
if info:
yield info
else:
break
except Exception:
log.exception(
"Failed to fetch run info",
last_dagrun_info=info,
dag_id=self.dag_id,
)
@provide_session
def get_concurrency_reached(self, session=NEW_SESSION) -> bool:
"""Return a boolean indicating whether the max_active_tasks limit for this DAG has been reached."""
from airflow.models.taskinstance import TaskInstance
total_tasks = session.scalar(
select(func.count(TaskInstance.task_id)).where(
TaskInstance.dag_id == self.dag_id,
TaskInstance.state == TaskInstanceState.RUNNING,
)
)
return total_tasks >= self.max_active_tasks
@provide_session
def create_dagrun(
self,
*,
run_id: str,
logical_date: datetime.datetime | None = None,
data_interval: tuple[datetime.datetime, datetime.datetime] | None = None,
run_after: datetime.datetime,
conf: dict | None = None,
run_type: DagRunType,
triggered_by: DagRunTriggeredByType,
triggering_user_name: str | None = None,
state: DagRunState,
start_date: datetime.datetime | None = None,
creating_job_id: int | None = None,
backfill_id: NonNegativeInt | None = None,
partition_key: str | None = None,
note: str | None = None,
session: Session = NEW_SESSION,
) -> DagRun:
"""
Create a run for this DAG to run its tasks.
:param run_id: ID of the dag_run
:param logical_date: date of execution
:param run_after: the datetime before which dag won't run
:param conf: Dict containing configuration/parameters to pass to the DAG
:param triggered_by: the entity which triggers the dag_run
:param triggering_user_name: the user name who triggers the dag_run
:param start_date: the date this dag run should be evaluated
:param creating_job_id: ID of the job creating this DagRun
:param backfill_id: ID of the backfill run if one exists
:param session: Unused. Only added in compatibility with database isolation mode
:return: The created DAG run.
:meta private:
"""
from airflow.models.dagrun import RUN_ID_REGEX
log.info(
"creating dag run",
run_after=run_after,
run_id=run_id,
logical_date=logical_date,
partition_key=partition_key,
)
logical_date = coerce_datetime(logical_date)
# For manual runs where logical_date is None, ensure no data_interval is set.
if logical_date is None and data_interval is not None:
raise ValueError("data_interval must be None when logical_date is None")
if data_interval and not isinstance(data_interval, DataInterval):
data_interval = DataInterval(*map(coerce_datetime, data_interval))
if isinstance(run_type, DagRunType):
pass
elif isinstance(run_type, str): # Ensure the input value is valid.
run_type = DagRunType(run_type)
else:
raise ValueError(f"run_type should be a DagRunType, not {type(run_type)}")
if not isinstance(run_id, str):
raise ValueError(f"`run_id` should be a str, not {type(run_id)}")
# This is also done on the DagRun model class, but SQLAlchemy column
# validator does not work well for some reason.
if not re.match(RUN_ID_REGEX, run_id):
regex = airflow_conf.get("scheduler", "allowed_run_id_pattern").strip()
if not regex or not re.match(regex, run_id):
raise ValueError(
f"The run_id provided '{run_id}' does not match regex pattern "
f"'{regex}' or '{RUN_ID_REGEX}'"
)
# Prevent a manual run from using an ID that looks like a scheduled run.
if run_type == DagRunType.MANUAL:
if (inferred_run_type := DagRunType.from_run_id(run_id)) != DagRunType.MANUAL:
raise ValueError(
f"A {run_type.value} DAG run cannot use ID {run_id!r} since it "
f"is reserved for {inferred_run_type.value} runs"
)
# todo: AIP-78 add verification that if run type is backfill then we have a backfill id
copied_params = self.params.deep_merge(conf)
copied_params.validate()
orm_dagrun = _create_orm_dagrun(
dag=self,
run_id=run_id,
logical_date=logical_date,
data_interval=data_interval,
run_after=coerce_datetime(run_after),
start_date=coerce_datetime(start_date),
conf=conf,
state=state,
run_type=run_type,
creating_job_id=creating_job_id,
backfill_id=backfill_id,
triggered_by=triggered_by,
triggering_user_name=triggering_user_name,
partition_key=partition_key,
note=note,
session=session,
)
if self.deadline:
self._process_dagrun_deadline_alerts(orm_dagrun, session)
return orm_dagrun
def _process_dagrun_deadline_alerts(
self,
orm_dagrun: DagRun,
session: Session,
) -> None:
"""
Process deadline alerts for a newly created DagRun.
Creates Deadline records for any DeadlineAlerts that reference DAGRUN.
:param orm_dagrun: The newly created DagRun
:param session: Database session
"""
# Import here to avoid circular dependency
from airflow.models.serialized_dag import SerializedDagModel
# Get the serialized_dag ID for this DAG
serialized_dag_id = session.scalar(
select(SerializedDagModel.id).where(
SerializedDagModel.dag_version_id == orm_dagrun.created_dag_version_id
)
)
if not serialized_dag_id:
return
# Query deadline alerts by serialized_dag_id
deadline_alert_records = session.scalars(
select(DeadlineAlertModel).where(DeadlineAlertModel.serialized_dag_id == serialized_dag_id)
).all()
for deadline_alert in deadline_alert_records:
if not deadline_alert:
continue
deserialized_deadline_alert = decode_deadline_alert(
{
Encoding.TYPE: DAT.DEADLINE_ALERT,
Encoding.VAR: {
DeadlineAlertFields.REFERENCE: deadline_alert.reference,
DeadlineAlertFields.INTERVAL: deadline_alert.interval,
DeadlineAlertFields.CALLBACK: deadline_alert.callback_def,
},
}
)
if isinstance(deserialized_deadline_alert.reference, SerializedReferenceModels.TYPES.DAGRUN):
deadline_time = deserialized_deadline_alert.reference.evaluate_with(
session=session,
interval=deserialized_deadline_alert.interval,
# TODO : Pretty sure we can drop these last two; verify after testing is complete
dag_id=self.dag_id,
run_id=orm_dagrun.run_id,
)
if deadline_time is not None:
session.add(
Deadline(
deadline_time=deadline_time,
callback=deserialized_deadline_alert.callback,
dagrun_id=orm_dagrun.id,
deadline_alert_id=deadline_alert.id,
dag_id=orm_dagrun.dag_id,
)
)
Stats.incr("deadline_alerts.deadline_created", tags={"dag_id": self.dag_id})
@provide_session
def set_task_instance_state(
self,
*,
task_id: str,
map_indexes: Collection[int] | None = None,
run_id: str | None = None,
state: TaskInstanceState,
upstream: bool = False,
downstream: bool = False,
future: bool = False,
past: bool = False,
commit: bool = True,
session=NEW_SESSION,
) -> list[TaskInstance]:
"""
Set the state of a TaskInstance and clear downstream tasks in failed or upstream_failed state.
:param task_id: Task ID of the TaskInstance
:param map_indexes: Only set TaskInstance if its map_index matches.
If None (default), all mapped TaskInstances of the task are set.
:param run_id: The run_id of the TaskInstance
:param state: State to set the TaskInstance to
:param upstream: Include all upstream tasks of the given task_id
:param downstream: Include all downstream tasks of the given task_id
:param future: Include all future TaskInstances of the given task_id
:param commit: Commit changes
:param past: Include all past TaskInstances of the given task_id
"""
from airflow.api.common.mark_tasks import set_state
task = self.get_task(task_id)
task.dag = self
tasks_to_set_state: list[SerializedOperator | tuple[SerializedOperator, int]]
if map_indexes is None:
tasks_to_set_state = [task]
else:
tasks_to_set_state = [(task, map_index) for map_index in map_indexes]
altered = set_state(
tasks=tasks_to_set_state,
run_id=run_id,
upstream=upstream,
downstream=downstream,
future=future,
past=past,
state=state,
commit=commit,
session=session,
)
if not commit:
return altered
# Clear downstream tasks that are in failed/upstream_failed state to resume them.
# Flush the session so that the tasks marked success are reflected in the db.
session.flush()
subset = self.partial_subset(
task_ids={task_id},
include_downstream=True,
include_upstream=False,
)
# Raises an error if not found
dr_id, logical_date = session.execute(
select(DagRun.id, DagRun.logical_date).where(
DagRun.run_id == run_id, DagRun.dag_id == self.dag_id
)
).one()
# Now we want to clear downstreams of tasks that had their state set...
clear_kwargs = {
"only_failed": True,
"session": session,
# Exclude the task itself from being cleared.
"exclude_task_ids": frozenset((task_id,)),
}
if not future and not past: # Simple case 1: we're only dealing with exactly one run.
clear_kwargs["run_id"] = run_id
subset.clear(**clear_kwargs)
elif future and past: # Simple case 2: we're clearing ALL runs.
subset.clear(**clear_kwargs)
else: # Complex cases: we may have more than one run, based on a date range.
# Make 'future' and 'past' make some sense when multiple runs exist
# for the same logical date. We order runs by their id and only
# clear runs have larger/smaller ids.
exclude_run_id_stmt = select(DagRun.run_id).where(DagRun.logical_date == logical_date)
if future:
clear_kwargs["start_date"] = logical_date
exclude_run_id_stmt = exclude_run_id_stmt.where(DagRun.id > dr_id)
else:
clear_kwargs["end_date"] = logical_date
exclude_run_id_stmt = exclude_run_id_stmt.where(DagRun.id < dr_id)
subset.clear(exclude_run_ids=frozenset(session.scalars(exclude_run_id_stmt)), **clear_kwargs)
return altered
@overload
def _get_task_instances(
self,
*,
task_ids: Collection[str | tuple[str, int]] | None,
start_date: datetime.datetime | None,
end_date: datetime.datetime | None,
run_id: str | None,
state: TaskInstanceState | Sequence[TaskInstanceState],
exclude_task_ids: Collection[str | tuple[str, int]] | None,
exclude_run_ids: frozenset[str] | None,
session: Session,
) -> Iterable[TaskInstance]: ... # pragma: no cover
@overload
def _get_task_instances(
self,
*,
task_ids: Collection[str | tuple[str, int]] | None,
as_pk_tuple: Literal[True],
start_date: datetime.datetime | None,
end_date: datetime.datetime | None,
run_id: str | None,
state: TaskInstanceState | Sequence[TaskInstanceState],
exclude_task_ids: Collection[str | tuple[str, int]] | None,
exclude_run_ids: frozenset[str] | None,
session: Session,
) -> set[TaskInstanceKey]: ... # pragma: no cover
def _get_task_instances(
self,
*,
task_ids: Collection[str | tuple[str, int]] | None,
as_pk_tuple: Literal[True, None] = None,
start_date: datetime.datetime | None,
end_date: datetime.datetime | None,
run_id: str | None,
state: TaskInstanceState | Sequence[TaskInstanceState],
exclude_task_ids: Collection[str | tuple[str, int]] | None,
exclude_run_ids: frozenset[str] | None,
session: Session,
) -> Iterable[TaskInstance] | set[TaskInstanceKey]:
from airflow.models.taskinstance import TaskInstance
# If we are looking at dependent dags we want to avoid UNION calls
# in SQL (it doesn't play nice with fields that have no equality operator,
# like JSON types), we instead build our result set separately.
#
# This will be empty if we are only looking at one dag, in which case
# we can return the filtered TI query object directly.
result: set[TaskInstanceKey] = set()
# Do we want full objects, or just the primary columns?
if as_pk_tuple:
tis_pk = select(
TaskInstance.dag_id,
TaskInstance.task_id,
TaskInstance.run_id,
TaskInstance.map_index,
)
tis_pk = tis_pk.join(TaskInstance.dag_run)
else:
tis_full = select(TaskInstance)
tis_full = tis_full.join(TaskInstance.dag_run)
# Apply common filters
def apply_filters(query):
if self.partial:
query = query.where(
TaskInstance.dag_id == self.dag_id, TaskInstance.task_id.in_(self.task_ids)
)
else:
query = query.where(TaskInstance.dag_id == self.dag_id)
if run_id:
query = query.where(TaskInstance.run_id == run_id)
if start_date:
query = query.where(DagRun.logical_date >= start_date)
if task_ids is not None:
# Use the selector condition directly without intermediate variable
query = query.where(TaskInstance.ti_selector_condition(task_ids))
if end_date:
query = query.where(DagRun.logical_date <= end_date)
return query
if as_pk_tuple:
tis_pk = apply_filters(tis_pk)
else:
tis_full = apply_filters(tis_full)
def apply_state_filter(query):
if state:
if isinstance(state, (str, TaskInstanceState)):
query = query.where(TaskInstance.state == state)
elif len(state) == 1:
query = query.where(TaskInstance.state == state[0])
else:
# this is required to deal with NULL values
if None in state:
if all(x is None for x in state):
query = query.where(TaskInstance.state.is_(None))
else:
not_none_state = [s for s in state if s]
query = query.where(
or_(TaskInstance.state.in_(not_none_state), TaskInstance.state.is_(None))
)
else:
query = query.where(TaskInstance.state.in_(state))
if exclude_run_ids:
query = query.where(TaskInstance.run_id.not_in(exclude_run_ids))
return query
if as_pk_tuple:
tis_pk = apply_state_filter(tis_pk)
else:
tis_full = apply_state_filter(tis_full)
if result or as_pk_tuple:
# Only execute the `ti` query if we have also collected some other results
if as_pk_tuple:
tis_query = session.execute(tis_pk).all()
result.update(TaskInstanceKey(**cols._mapping) for cols in tis_query)
else:
result.update(ti.key for ti in session.scalars(tis_full))
if exclude_task_ids is not None:
result = {
task
for task in result
if task.task_id not in exclude_task_ids
and (task.task_id, task.map_index) not in exclude_task_ids
}
if as_pk_tuple:
return result
if result:
# We've been asked for objects, lets combine it all back in to a result set
ti_filters = TaskInstance.filter_for_tis(result)
if ti_filters is not None:
tis_final = select(TaskInstance).where(ti_filters)
return session.scalars(tis_final)
elif exclude_task_ids is None:
pass # Disable filter if not set.
elif isinstance(next(iter(exclude_task_ids), None), str):
tis_full = tis_full.where(TaskInstance.task_id.notin_(exclude_task_ids))
else:
tis_full = tis_full.where(
tuple_(TaskInstance.task_id, TaskInstance.map_index).not_in(exclude_task_ids)
)
return session.scalars(tis_full)
@overload
def clear(
self,
*,
dry_run: Literal[True],
task_ids: Collection[str | tuple[str, int]] | None = None,
run_id: str,
only_failed: bool = False,
only_running: bool = False,
dag_run_state: DagRunState = DagRunState.QUEUED,
session: Session = NEW_SESSION,
exclude_task_ids: frozenset[str] | frozenset[tuple[str, int]] | None = frozenset(),
exclude_run_ids: frozenset[str] | None = frozenset(),
run_on_latest_version: bool = False,
) -> list[TaskInstance]: ... # pragma: no cover
@overload
def clear(
self,
*,
task_ids: Collection[str | tuple[str, int]] | None = None,
run_id: str,
only_failed: bool = False,
only_running: bool = False,
dag_run_state: DagRunState = DagRunState.QUEUED,
dry_run: Literal[False] = False,
session: Session = NEW_SESSION,
exclude_task_ids: frozenset[str] | frozenset[tuple[str, int]] | None = frozenset(),
exclude_run_ids: frozenset[str] | None = frozenset(),
run_on_latest_version: bool = False,
) -> int: ... # pragma: no cover
@overload
def clear(
self,
*,
dry_run: Literal[True],
task_ids: Collection[str | tuple[str, int]] | None = None,
start_date: datetime.datetime | None = None,
end_date: datetime.datetime | None = None,
only_failed: bool = False,
only_running: bool = False,
dag_run_state: DagRunState = DagRunState.QUEUED,
session: Session = NEW_SESSION,
exclude_task_ids: frozenset[str] | frozenset[tuple[str, int]] | None = frozenset(),
exclude_run_ids: frozenset[str] | None = frozenset(),
run_on_latest_version: bool = False,
) -> list[TaskInstance]: ... # pragma: no cover
@overload
def clear(
self,
*,
task_ids: Collection[str | tuple[str, int]] | None = None,
start_date: datetime.datetime | None = None,
end_date: datetime.datetime | None = None,
only_failed: bool = False,
only_running: bool = False,
dag_run_state: DagRunState = DagRunState.QUEUED,
dry_run: Literal[False] = False,
session: Session = NEW_SESSION,
exclude_task_ids: frozenset[str] | frozenset[tuple[str, int]] | None = frozenset(),
exclude_run_ids: frozenset[str] | None = frozenset(),
run_on_latest_version: bool = False,
) -> int: ... # pragma: no cover
@provide_session
def clear(
self,
task_ids: Collection[str | tuple[str, int]] | None = None,
*,
run_id: str | None = None,
start_date: datetime.datetime | None = None,
end_date: datetime.datetime | None = None,
only_failed: bool = False,
only_running: bool = False,
dag_run_state: DagRunState = DagRunState.QUEUED,
dry_run: bool = False,
session: Session = NEW_SESSION,
exclude_task_ids: frozenset[str] | frozenset[tuple[str, int]] | None = frozenset(),
exclude_run_ids: frozenset[str] | None = frozenset(),
run_on_latest_version: bool = False,
) -> int | Iterable[TaskInstance]:
"""
Clear a set of task instances associated with the current dag for a specified date range.
:param task_ids: List of task ids or (``task_id``, ``map_index``) tuples to clear
:param run_id: The run_id for which the tasks should be cleared
:param start_date: The minimum logical_date to clear
:param end_date: The maximum logical_date to clear
:param only_failed: Only clear failed tasks
:param only_running: Only clear running tasks.
:param dag_run_state: state to set DagRun to. If set to False, dagrun state will not
be changed.
:param dry_run: Find the tasks to clear but don't clear them.
:param run_on_latest_version: whether to run on latest serialized DAG and Bundle version
:param session: The sqlalchemy session to use
:param exclude_task_ids: A set of ``task_id`` or (``task_id``, ``map_index``)
tuples that should not be cleared
:param exclude_run_ids: A set of ``run_id`` or (``run_id``)
"""
from airflow.models.taskinstance import clear_task_instances
state: list[TaskInstanceState] = []
if only_failed:
state += [TaskInstanceState.FAILED, TaskInstanceState.UPSTREAM_FAILED]
if only_running:
# Yes, having `+=` doesn't make sense, but this was the existing behaviour
state += [TaskInstanceState.RUNNING]
tis_result = self._get_task_instances(
task_ids=task_ids,
start_date=start_date,
end_date=end_date,
run_id=run_id,
state=state,
session=session,
exclude_task_ids=exclude_task_ids,
exclude_run_ids=exclude_run_ids,
)
if dry_run:
return list(tis_result)
tis = list(tis_result)
count = len(tis)
if count == 0:
return 0
clear_task_instances(
list(tis),
session,
dag_run_state=dag_run_state,
run_on_latest_version=run_on_latest_version,
)
session.flush()
return count
@classmethod
def clear_dags(
cls,
dags: Iterable[SerializedDAG],
*,
start_date=None,
end_date=None,
only_failed=False,
only_running=False,
dag_run_state=DagRunState.QUEUED,
dry_run: bool = False,
):
if dry_run:
tis = itertools.chain.from_iterable(
dag.clear(
start_date=start_date,
end_date=end_date,
only_failed=only_failed,
only_running=only_running,
dag_run_state=dag_run_state,
dry_run=True,
)
for dag in dags
)
return list(tis)
return sum(
dag.clear(
start_date=start_date,
end_date=end_date,
only_failed=only_failed,
only_running=only_running,
dag_run_state=dag_run_state,
dry_run=False,
)
for dag in dags
)
def get_edge_info(self, upstream_task_id: str, downstream_task_id: str) -> EdgeInfoType:
"""Return edge information for the given pair of tasks or an empty edge if there is no information."""
# Note - older serialized dags may not have edge_info being a dict at all
empty = cast("EdgeInfoType", {})
if self.edge_info:
return self.edge_info.get(upstream_task_id, {}).get(downstream_task_id, empty)
return empty
@provide_session
def _create_orm_dagrun(
*,
dag: SerializedDAG,
run_id: str,
logical_date: datetime.datetime | None,
data_interval: DataInterval | None,
run_after: datetime.datetime,
start_date: datetime.datetime | None,
conf: Any,
state: DagRunState | None,
run_type: DagRunType,
creating_job_id: int | None,
backfill_id: NonNegativeInt | None,
triggered_by: DagRunTriggeredByType,
triggering_user_name: str | None = None,
partition_key: str | None = None,
note: str | None = None,
session: Session = NEW_SESSION,
) -> DagRun:
bundle_version = None
if not dag.disable_bundle_versioning:
bundle_version = session.scalar(
select(DagModel.bundle_version).where(DagModel.dag_id == dag.dag_id),
)
dag_version = DagVersion.get_latest_version(dag.dag_id, session=session)
if not dag_version:
raise AirflowException(f"Cannot create DagRun for DAG {dag.dag_id} because the dag is not serialized")
run = DagRun(
dag_id=dag.dag_id,
run_id=run_id,
logical_date=logical_date,
start_date=start_date,
run_after=run_after,
conf=conf,
state=state,
run_type=run_type,
creating_job_id=creating_job_id,
data_interval=data_interval,
triggered_by=triggered_by,
triggering_user_name=triggering_user_name,
backfill_id=backfill_id,
bundle_version=bundle_version,
partition_key=partition_key,
note=note,
)
# Load defaults into the following two fields to ensure result can be serialized detached
max_log_template_id = session.scalar(select(func.max(LogTemplate.__table__.c.id)))
run.log_template_id = int(max_log_template_id) if max_log_template_id is not None else 0
run.created_dag_version = dag_version
run.consumed_asset_events = []
session.add(run)
session.flush()
run.dag = dag
# create the associated task instances
# state is None at the moment of creation
run.verify_integrity(session=session, dag_version_id=dag_version.id)
return run
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/serialization/definitions/dag.py",
"license": "Apache License 2.0",
"lines": 1026,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:devel-common/src/sphinx_exts/pagefind_search/builder.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Pagefind index builder and static file handler."""
from __future__ import annotations
import asyncio
import logging
import re
from pathlib import Path
from typing import TYPE_CHECKING
import anyio
from pagefind.index import IndexConfig, PagefindIndex
from sphinx.util.fileutil import copy_asset
if TYPE_CHECKING:
from sphinx.application import Sphinx
logger = logging.getLogger(__name__)
def add_content_weights_lightweight(
output_dir: Path, glob_pattern: str, exclude_patterns: list[str] | None = None
) -> int:
"""Add data-pagefind-weight attributes using simple regex replacement.
:param output_dir: Output directory
:param glob_pattern: Glob pattern
:param exclude_patterns: Exclude patterns
:return: Number of files processed
"""
files_processed = 0
exclude_patterns = exclude_patterns or []
# Regex patterns to match opening tags without existing weight attribute
# Use maximum valid weights (0.0-10.0 range, quadratic scale)
# https://pagefind.app/docs/weighting/
# Weight of 10.0 = ~100x impact, 7.0 = ~49x impact (default h1), 5.0 = ~25x impact
patterns = [
(re.compile(r"<title(?![^>]*data-pagefind-weight)"), '<title data-pagefind-weight="10.0"'),
(re.compile(r"<h1(?![^>]*data-pagefind-weight)"), '<h1 data-pagefind-weight="9.0"'),
]
for html_file in output_dir.glob(glob_pattern):
if not html_file.is_file():
continue
# Check if file matches any exclude pattern (using simple prefix matching)
relative_path = html_file.relative_to(output_dir)
relative_str = str(relative_path)
if any(relative_str.startswith(pattern.rstrip("/*")) for pattern in exclude_patterns):
continue
try:
content = html_file.read_text(encoding="utf-8")
modified_content = content
for pattern, replacement in patterns:
modified_content = pattern.sub(replacement, modified_content)
if modified_content != content:
html_file.write_text(modified_content, encoding="utf-8")
files_processed += 1
except Exception as e:
logger.warning("Failed to add weights to %s: %s", html_file, e)
return files_processed
async def build_pagefind_index(app: Sphinx) -> dict[str, int]:
"""Build Pagefind search index using Python API."""
output_dir = Path(app.builder.outdir)
pagefind_dir = output_dir / "_pagefind"
# Add content weighting if enabled
if getattr(app.config, "pagefind_content_weighting", True):
logger.info("Adding content weights to HTML files...")
exclude_patterns = getattr(app.config, "pagefind_exclude_patterns", [])
files_processed = add_content_weights_lightweight(
output_dir, app.config.pagefind_glob, exclude_patterns
)
logger.info("Added content weights to %s files", files_processed)
config = IndexConfig(
root_selector=app.config.pagefind_root_selector,
exclude_selectors=app.config.pagefind_exclude_selectors,
output_path=str(pagefind_dir),
verbose=app.config.pagefind_verbose,
force_language=app.config.language or "en",
keep_index_url=False,
write_playground=getattr(app.config, "pagefind_enable_playground", False),
)
logger.info("Building Pagefind search index...")
exclude_patterns = getattr(app.config, "pagefind_exclude_patterns", [])
if exclude_patterns:
# Need to index files individually to apply exclusion patterns
logger.info("Indexing with exclusion patterns: %s", exclude_patterns)
indexed = 0
skipped = 0
async with PagefindIndex(config=config) as index:
def _glob_html_files():
return list(output_dir.glob(app.config.pagefind_glob))
html_files = await anyio.to_thread.run_sync(_glob_html_files)
for html_file in html_files:
if not await anyio.Path(html_file).is_file():
continue
relative_path = html_file.relative_to(output_dir)
relative_str = str(relative_path)
# Check if path matches any exclude pattern (prefix matching)
if any(relative_str.startswith(pattern.rstrip("/*")) for pattern in exclude_patterns):
skipped += 1
continue
try:
content = await anyio.Path(html_file).read_text(encoding="utf-8")
await index.add_html_file(
content=content,
source_path=str(html_file),
url=str(relative_path),
)
indexed += 1
except Exception as e:
logger.warning("Failed to index %s: %s", relative_path, e)
logger.info("Pagefind indexed %s pages (excluded %s)", indexed, skipped)
if app.config.pagefind_custom_records:
for record in app.config.pagefind_custom_records:
try:
await index.add_custom_record(**record)
except Exception as e:
logger.warning("Failed to add custom record: %s", e)
return {"page_count": indexed}
else:
# No exclusions - use fast directory indexing
async with PagefindIndex(config=config) as index:
result = await index.add_directory(path=str(output_dir), glob=app.config.pagefind_glob)
page_count = result.get("page_count", 0)
logger.info("Pagefind indexed %s pages", page_count)
if app.config.pagefind_custom_records:
for record in app.config.pagefind_custom_records:
try:
await index.add_custom_record(**record)
except Exception as e:
logger.warning("Failed to add custom record: %s", e)
return {"page_count": page_count}
def build_index_finished(app: Sphinx, exception: Exception | None) -> None:
"""Build Pagefind index after HTML build completes."""
if exception:
logger.info("Skipping Pagefind indexing due to build errors")
return
if app.builder.format != "html":
return
if not app.config.pagefind_enabled:
logger.info("Pagefind indexing disabled (pagefind_enabled=False)")
return
try:
result = asyncio.run(build_pagefind_index(app))
page_count = result.get("page_count", 0)
if page_count == 0:
raise RuntimeError("Pagefind indexing failed: no pages were indexed")
logger.info("✓ Pagefind index created with %s pages", page_count)
except Exception as e:
logger.exception("Failed to build Pagefind index")
raise RuntimeError(f"Pagefind indexing failed: {e}") from e
def copy_static_files(app: Sphinx, exception: Exception | None) -> None:
"""Copy CSS and JS files to _static directory."""
if exception or app.builder.format != "html":
return
static_dir = Path(app.builder.outdir) / "_static"
extension_static = Path(__file__).parent / "static"
css_src = extension_static / "css" / "pagefind.css"
css_dest = static_dir / "css"
css_dest.mkdir(parents=True, exist_ok=True)
if css_src.exists():
copy_asset(str(css_src), str(css_dest))
js_src = extension_static / "js" / "search.js"
js_dest = static_dir / "js"
js_dest.mkdir(parents=True, exist_ok=True)
if js_src.exists():
copy_asset(str(js_src), str(js_dest))
| {
"repo_id": "apache/airflow",
"file_path": "devel-common/src/sphinx_exts/pagefind_search/builder.py",
"license": "Apache License 2.0",
"lines": 174,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/common/sql/tests/system/common/sql/example_sql_value_check.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from airflow import DAG
from airflow.providers.common.sql.operators.sql import SQLValueCheckOperator
from airflow.sdk.timezone import datetime
connection_args = {
"conn_id": "sales_db",
"conn_type": "Postgres",
"host": "postgres",
"schema": "postgres",
"login": "postgres",
"password": "postgres",
"port": 5432,
}
with DAG(
"example_sql_value_check_query",
description="Example DAG for SQLValueCheckOperator.",
default_args=connection_args,
start_date=datetime(2025, 12, 15),
schedule=None,
catchup=False,
) as dag:
"""
### Example SQL value check DAG
Runs the SQLValueCheckOperator against the Airflow metadata DB.
"""
# [START howto_operator_sql_value_check]
value_check = SQLValueCheckOperator(
task_id="threshhold_check",
conn_id="sales_db",
sql="SELECT count(distinct(customer_id)) FROM sales LIMIT 50;",
pass_value=40,
tolerance=5,
)
# [END howto_operator_sql_value_check]
from tests_common.test_utils.system_tests import get_test_run # noqa: E402
# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest)
test_run = get_test_run(dag)
| {
"repo_id": "apache/airflow",
"file_path": "providers/common/sql/tests/system/common/sql/example_sql_value_check.py",
"license": "Apache License 2.0",
"lines": 54,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-core/src/airflow/serialization/stringify.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import Any, TypeVar
T = TypeVar("T", bool, float, int, dict, list, str, tuple, set)
class StringifyNotSupportedError(ValueError):
"""
Raised when stringify() cannot handle a serialized object and full deserialization is required.
This typically occurs when trying to stringify Airflow classes that need to be fully deserialized
using XComModel.deserialize_value() instead.
"""
CLASSNAME = "__classname__"
VERSION = "__version__"
DATA = "__data__"
OLD_TYPE = "__type"
OLD_DATA = "__var"
OLD_DICT = "dict"
DEFAULT_VERSION = 0
_primitives = (int, bool, float, str)
_builtin_collections = (frozenset, list, set, tuple)
def _convert(old: dict) -> dict:
"""Convert an old style serialization to new style."""
if OLD_TYPE in old and OLD_DATA in old:
# Return old style dicts directly as they do not need wrapping
if old[OLD_TYPE] == OLD_DICT:
return old[OLD_DATA]
return {CLASSNAME: old[OLD_TYPE], VERSION: DEFAULT_VERSION, DATA: old[OLD_DATA]}
return old
def decode(d: dict[str, Any]) -> tuple[str, int, Any]:
"""Extract classname, version, and data from a serialized dict."""
classname = d[CLASSNAME]
version = d[VERSION]
if not isinstance(classname, str) or not isinstance(version, int):
raise ValueError(f"cannot decode {d!r}")
data = d.get(DATA)
return classname, version, data
def _stringify_builtin_collection(classname: str, value: Any):
if classname in ("builtins.tuple", "builtins.set", "builtins.frozenset"):
if isinstance(value, (list, tuple, set, frozenset)):
items = [str(stringify(v)) for v in value]
return f"({','.join(items)})"
return None
def stringify(o: T | None) -> object:
"""
Convert a serialized object to a human readable representation.
Matches the behavior of deserialize(full=False) exactly:
- Returns objects as-is for primitives, collections, plain dicts
- Only returns string when encountering a serialized object (__classname__)
"""
if o is None:
return o
if isinstance(o, _primitives):
return o
# tuples, sets are included here for backwards compatibility
if isinstance(o, _builtin_collections):
col = [stringify(d) for d in o]
if isinstance(o, tuple):
return tuple(col)
if isinstance(o, set):
return set(col)
return col
if not isinstance(o, dict):
# if o is not a dict, then it's already deserialized
# in this case we should return it as is
return o
o = _convert(o)
# plain dict and no type hint
if CLASSNAME not in o or VERSION not in o:
return {str(k): stringify(v) for k, v in o.items()}
classname, version, value = decode(o)
if not classname:
raise TypeError("classname cannot be empty")
# Early detection for `airflow.` classes. These classes will need full deserialization, not just stringification
if isinstance(classname, str) and classname.startswith("airflow."):
raise StringifyNotSupportedError(
f"Cannot stringify Airflow class '{classname}'. "
f"Use XComModel.deserialize_value() to deserialize Airflow classes."
)
result = _stringify_builtin_collection(classname, value)
if result is not None:
return result
# only return string representation
s = f"{classname}@version={version}("
if isinstance(value, _primitives):
s += f"{value}"
elif isinstance(value, _builtin_collections):
# deserialized values can be != str
s += ",".join(str(stringify(v)) for v in value)
elif isinstance(value, dict):
s += ",".join(f"{k}={stringify(v)}" for k, v in value.items())
s += ")"
return s
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/serialization/stringify.py",
"license": "Apache License 2.0",
"lines": 107,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:airflow-core/tests/unit/serialization/test_stringify.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from dataclasses import dataclass
from typing import ClassVar
import pytest
from airflow._shared.module_loading import qualname
from airflow.sdk.serde import serialize
from airflow.serialization.stringify import CLASSNAME, VERSION, stringify
@dataclass
class W:
__version__: ClassVar[int] = 2
x: int
@dataclass
class V:
__version__: ClassVar[int] = 1
w: W
s: list
t: tuple
c: int
class TestStringify:
# Existing ported over tests
def test_stringify(self):
i = V(W(10), ["l1", "l2"], (1, 2), 10)
e = serialize(i)
s = stringify(e)
assert f"{qualname(V)}@version={V.__version__}" in s
# asdict from dataclasses removes class information
assert "w={'x': 10}" in s
assert "s=['l1', 'l2']" in s
assert "t=(1,2)" in s
assert "c=10" in s
e["__data__"]["t"] = (1, 2)
s = stringify(e)
assert "t=(1, 2)" in s
@pytest.mark.parametrize(
("value", "expected"),
[
(123, "dummy@version=1(123)"),
([1], "dummy@version=1(1)"),
],
)
def test_serde_stringify_primitives(self, value, expected):
e = {CLASSNAME: "dummy", VERSION: 1, "__data__": value}
assert stringify(e) == expected
# New tests
def test_stringify_none(self):
assert stringify(None) is None
@pytest.mark.parametrize(
"value",
[
42,
"hello",
True,
False,
3.14,
],
)
def test_stringify_primitives(self, value):
assert stringify(value) == value
def test_stringify_raw_list(self):
result = stringify([1, 2, 3])
assert result == [1, 2, 3]
assert isinstance(result, list)
def test_stringify_raw_tuple(self):
result = stringify((1, 2, 3))
assert result == (1, 2, 3)
assert isinstance(result, tuple)
def test_stringify_raw_set(self):
result = stringify({1, 2, 3})
assert result == {1, 2, 3}
assert isinstance(result, set)
def test_stringify_raw_frozenset(self):
result = stringify(frozenset({1, 2, 3}))
assert result == [1, 2, 3]
assert isinstance(result, list)
def test_stringify_plain_dict(self):
result = stringify({"key": "value", "num": 42})
assert result == {"key": "value", "num": 42}
assert isinstance(result, dict)
def test_stringify_serialized_tuple(self):
e = {CLASSNAME: "builtins.tuple", VERSION: 1, "__data__": [1, 2, 3]}
result = stringify(e)
assert result == "(1,2,3)"
assert isinstance(result, str)
def test_stringify_serialized_set(self):
e = {CLASSNAME: "builtins.set", VERSION: 1, "__data__": [1, 2, 3]}
result = stringify(e)
assert result == "(1,2,3)"
assert isinstance(result, str)
def test_stringify_serialized_frozenset(self):
e = {CLASSNAME: "builtins.frozenset", VERSION: 1, "__data__": [1, 2, 3]}
result = stringify(e)
assert result == "(1,2,3)"
assert isinstance(result, str)
def test_stringify_nested_serialized(self):
e = {
CLASSNAME: "test.Outer",
VERSION: 1,
"__data__": {
"inner": {CLASSNAME: "builtins.tuple", VERSION: 1, "__data__": [1, 2, 3]},
},
}
result = stringify(e)
assert "test.Outer@version=1" in result
assert "inner=(1,2,3)" in result
def test_stringify_old_style_tuple(self):
e = {"__type": "builtins.tuple", "__var": [1, 2, 3]}
result = stringify(e)
assert result == "(1,2,3)"
def test_stringify_old_style_dict(self):
e = {"__type": "dict", "__var": {"key": "value"}}
result = stringify(e)
assert result == {"key": "value"}
assert isinstance(result, dict)
def test_stringify_list_with_serialized_tuple(self):
e = [{CLASSNAME: "builtins.tuple", VERSION: 1, "__data__": [1, 2]}]
result = stringify(e)
assert result == ["(1,2)"]
assert isinstance(result, list)
def test_stringify_dict_with_serialized_tuple(self):
e = {"key": {CLASSNAME: "builtins.tuple", VERSION: 1, "__data__": [1, 2]}}
result = stringify(e)
assert result == {"key": "(1,2)"}
assert isinstance(result, dict)
def test_stringify_empty_list(self):
assert stringify([]) == []
def test_stringify_empty_tuple(self):
assert stringify(()) == ()
def test_stringify_empty_set(self):
assert stringify(set()) == set()
def test_stringify_empty_dict(self):
assert stringify({}) == {}
def test_stringify_dict_with_none_value(self):
result = stringify({"key": None})
assert result == {"key": None}
def test_stringify_list_with_none(self):
result = stringify([None, 1, None])
assert result == [None, 1, None]
def test_stringify_custom_object(self):
e = {
CLASSNAME: "deltalake.table.DeltaTable",
VERSION: 1,
"__data__": {"table_uri": "s3://bucket/path", "version": 0},
}
result = stringify(e)
assert "deltalake.table.DeltaTable@version=1" in result
assert "table_uri=s3://bucket/path" in result
assert "version=0" in result
def test_stringify_empty_classname_error(self):
e = {CLASSNAME: "", VERSION: 1, "__data__": {}}
with pytest.raises(TypeError, match="classname cannot be empty"):
stringify(e)
def test_stringify_already_deserialized_object(self):
class CustomObj:
def __init__(self):
self.x = 10
obj = CustomObj()
result = stringify(obj)
assert result is obj
def test_stringify_nested_plain_dict(self):
e = {"outer": {"inner": "value", "num": 42}}
result = stringify(e)
assert result == {"outer": {"inner": "value", "num": 42}}
def test_stringify_recursive_collection(self):
e = [[1, 2], [3, 4]]
result = stringify(e)
assert result == [[1, 2], [3, 4]]
def test_stringify_dict_with_nested_serialized(self):
e = {
"key1": {CLASSNAME: "builtins.tuple", VERSION: 1, "__data__": [1, 2]},
"key2": "value",
}
result = stringify(e)
assert result == {"key1": "(1,2)", "key2": "value"}
def test_error_thrown_for_airflow_classes(self):
from airflow.sdk import AssetAlias
e = serialize(AssetAlias("x"))
with pytest.raises(ValueError, match="Cannot stringify Airflow class"):
stringify(e)
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/tests/unit/serialization/test_stringify.py",
"license": "Apache License 2.0",
"lines": 195,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:task-sdk/src/airflow/sdk/serde/serializers/bignum.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING
from airflow.sdk.module_loading import qualname
if TYPE_CHECKING:
import decimal
from airflow.sdk.serde import U
serializers = ["decimal.Decimal"]
deserializers = serializers
__version__ = 1
def serialize(o: object) -> tuple[U, str, int, bool]:
from decimal import Decimal
if not isinstance(o, Decimal):
return "", "", 0, False
name = qualname(o)
_, _, exponent = o.as_tuple()
if isinstance(exponent, int) and exponent >= 0: # No digits after the decimal point.
return int(o), name, __version__, True
# Technically lossy due to floating point errors, but the best we
# can do without implementing a custom encode function.
return float(o), name, __version__, True
def deserialize(cls: type, version: int, data: object) -> decimal.Decimal:
from decimal import Decimal
if version > __version__:
raise TypeError(f"serialized {version} of {qualname(cls)} > {__version__}")
if cls is not Decimal:
raise TypeError(f"do not know how to deserialize {qualname(cls)}")
return Decimal(str(data))
| {
"repo_id": "apache/airflow",
"file_path": "task-sdk/src/airflow/sdk/serde/serializers/bignum.py",
"license": "Apache License 2.0",
"lines": 44,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:task-sdk/src/airflow/sdk/serde/serializers/builtin.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING, cast
from airflow.sdk.module_loading import qualname
if TYPE_CHECKING:
from airflow.sdk.serde import U
__version__ = 1
serializers = ["builtins.frozenset", "builtins.set", "builtins.tuple"]
deserializers = serializers
stringifiers = serializers
def serialize(o: object) -> tuple[U, str, int, bool]:
return list(cast("list", o)), qualname(o), __version__, True
def deserialize(cls: type, version: int, data: list) -> tuple | set | frozenset:
if version > __version__:
raise TypeError(f"serialized version {version} is newer than class version {__version__}")
if cls is tuple:
return tuple(data)
if cls is set:
return set(data)
if cls is frozenset:
return frozenset(data)
raise TypeError(f"do not know how to deserialize {qualname(cls)}")
def stringify(classname: str, version: int, data: list) -> str:
if classname not in stringifiers:
raise TypeError(f"do not know how to stringify {classname}")
s = ",".join(str(d) for d in data)
return f"({s})"
| {
"repo_id": "apache/airflow",
"file_path": "task-sdk/src/airflow/sdk/serde/serializers/builtin.py",
"license": "Apache License 2.0",
"lines": 43,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:task-sdk/src/airflow/sdk/serde/serializers/datetime.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING
from airflow.sdk._shared.timezones.timezone import parse_timezone
from airflow.sdk.module_loading import qualname
from airflow.sdk.serde.serializers.timezone import (
deserialize as deserialize_timezone,
serialize as serialize_timezone,
)
if TYPE_CHECKING:
import datetime
from airflow.sdk.serde import U
__version__ = 2
serializers = [
"datetime.date",
"datetime.datetime",
"datetime.timedelta",
"pendulum.datetime.DateTime",
"pendulum.date.Date",
]
deserializers = serializers
TIMESTAMP = "timestamp"
TIMEZONE = "tz"
def serialize(o: object) -> tuple[U, str, int, bool]:
from datetime import date, datetime, timedelta
if isinstance(o, datetime):
qn = qualname(o)
tz = serialize_timezone(o.tzinfo) if o.tzinfo else None
return {TIMESTAMP: o.timestamp(), TIMEZONE: tz}, qn, __version__, True
if isinstance(o, date):
return o.isoformat(), qualname(o), __version__, True
if isinstance(o, timedelta):
return o.total_seconds(), qualname(o), __version__, True
return "", "", 0, False
def deserialize(cls: type, version: int, data: dict | str) -> datetime.date | datetime.timedelta:
import datetime
from pendulum import Date, DateTime
tz: datetime.tzinfo | None = None
if isinstance(data, dict) and TIMEZONE in data:
if version == 1:
# try to deserialize unsupported timezones
timezone_mapping = {
"EDT": parse_timezone(-4 * 3600),
"CDT": parse_timezone(-5 * 3600),
"MDT": parse_timezone(-6 * 3600),
"PDT": parse_timezone(-7 * 3600),
"CEST": parse_timezone("CET"),
}
if data[TIMEZONE] in timezone_mapping:
tz = timezone_mapping[data[TIMEZONE]]
else:
tz = parse_timezone(data[TIMEZONE])
else:
tz = (
deserialize_timezone(data[TIMEZONE][1], data[TIMEZONE][2], data[TIMEZONE][0])
if data[TIMEZONE]
else None
)
if cls is datetime.datetime and isinstance(data, dict):
return datetime.datetime.fromtimestamp(float(data[TIMESTAMP]), tz=tz)
if cls is DateTime and isinstance(data, dict):
return DateTime.fromtimestamp(float(data[TIMESTAMP]), tz=tz)
if cls is datetime.timedelta and isinstance(data, str | float):
return datetime.timedelta(seconds=float(data))
if cls is Date and isinstance(data, str):
return Date.fromisoformat(data)
if cls is datetime.date and isinstance(data, str):
return datetime.date.fromisoformat(data)
raise TypeError(f"unknown date/time format {qualname(cls)}")
| {
"repo_id": "apache/airflow",
"file_path": "task-sdk/src/airflow/sdk/serde/serializers/datetime.py",
"license": "Apache License 2.0",
"lines": 85,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:task-sdk/src/airflow/sdk/serde/serializers/deltalake.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING
from airflow.sdk.module_loading import qualname
serializers = ["deltalake.table.DeltaTable"]
deserializers = serializers
stringifiers = serializers
if TYPE_CHECKING:
from airflow.sdk.serde import U
__version__ = 1
def serialize(o: object) -> tuple[U, str, int, bool]:
from deltalake.table import DeltaTable
if not isinstance(o, DeltaTable):
return "", "", 0, False
from airflow.sdk.crypto import get_fernet
# we encrypt the information here until we have as part of the
# storage options can have sensitive information
fernet = get_fernet()
properties: dict = {}
for k, v in o._storage_options.items() if o._storage_options else {}:
properties[k] = fernet.encrypt(v.encode("utf-8")).decode("utf-8")
data = {
"table_uri": o.table_uri,
"version": o.version(),
"storage_options": properties,
}
return data, qualname(o), __version__, True
def deserialize(cls: type, version: int, data: dict):
from deltalake.table import DeltaTable
from airflow.sdk.crypto import get_fernet
if version > __version__:
raise TypeError("serialized version is newer than class version")
if cls is DeltaTable:
fernet = get_fernet()
properties = {}
for k, v in data["storage_options"].items():
properties[k] = fernet.decrypt(v.encode("utf-8")).decode("utf-8")
if len(properties) == 0:
storage_options = None
else:
storage_options = properties
return DeltaTable(data["table_uri"], version=data["version"], storage_options=storage_options)
raise TypeError(f"do not know how to deserialize {qualname(cls)}")
| {
"repo_id": "apache/airflow",
"file_path": "task-sdk/src/airflow/sdk/serde/serializers/deltalake.py",
"license": "Apache License 2.0",
"lines": 59,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:task-sdk/src/airflow/sdk/serde/serializers/iceberg.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING
from airflow.sdk.module_loading import qualname
serializers = ["pyiceberg.table.Table"]
deserializers = serializers
stringifiers = serializers
if TYPE_CHECKING:
from airflow.sdk.serde import U
__version__ = 1
def serialize(o: object) -> tuple[U, str, int, bool]:
from pyiceberg.table import Table
if not isinstance(o, Table):
return "", "", 0, False
from airflow.sdk.crypto import get_fernet
# we encrypt the catalog information here until we have
# global catalog management in airflow and the properties
# can have sensitive information
fernet = get_fernet()
properties = {}
for k, v in o.catalog.properties.items():
properties[k] = fernet.encrypt(v.encode("utf-8")).decode("utf-8")
data = {
"identifier": o._identifier,
"catalog_properties": properties,
}
return data, qualname(o), __version__, True
def deserialize(cls: type, version: int, data: dict):
from pyiceberg.catalog import load_catalog
from pyiceberg.table import Table
from airflow.sdk.crypto import get_fernet
if version > __version__:
raise TypeError("serialized version is newer than class version")
if cls is Table:
fernet = get_fernet()
properties = {}
for k, v in data["catalog_properties"].items():
properties[k] = fernet.decrypt(v.encode("utf-8")).decode("utf-8")
catalog = load_catalog(data["identifier"][0], **properties)
return catalog.load_table((data["identifier"][1], data["identifier"][2]))
raise TypeError(f"do not know how to deserialize {qualname(cls)}")
| {
"repo_id": "apache/airflow",
"file_path": "task-sdk/src/airflow/sdk/serde/serializers/iceberg.py",
"license": "Apache License 2.0",
"lines": 57,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:task-sdk/src/airflow/sdk/serde/serializers/kubernetes.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import logging
from typing import TYPE_CHECKING
from airflow.sdk.module_loading import qualname
# lazy loading for performance reasons
serializers = [
"kubernetes.client.models.v1_resource_requirements.V1ResourceRequirements",
"kubernetes.client.models.v1_pod.V1Pod",
]
if TYPE_CHECKING:
from airflow.sdk.serde import U
__version__ = 1
deserializers: list[type[object]] = []
log = logging.getLogger(__name__)
def serialize(o: object) -> tuple[U, str, int, bool]:
from kubernetes.client import models as k8s
if not k8s:
return "", "", 0, False
if isinstance(o, (k8s.V1Pod, k8s.V1ResourceRequirements)):
from airflow.providers.cncf.kubernetes.pod_generator import PodGenerator
# We're running this in an except block, so we don't want it to fail
# under any circumstances, e.g. accessing a non-existing attribute.
def safe_get_name(pod):
try:
return pod.metadata.name
except Exception:
return None
try:
return PodGenerator.serialize_pod(o), qualname(o), __version__, True
except Exception:
log.warning("Serialization failed for pod %s", safe_get_name(o))
log.debug("traceback for serialization error", exc_info=True)
return "", "", 0, False
return "", "", 0, False
| {
"repo_id": "apache/airflow",
"file_path": "task-sdk/src/airflow/sdk/serde/serializers/kubernetes.py",
"license": "Apache License 2.0",
"lines": 51,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:task-sdk/src/airflow/sdk/serde/serializers/numpy.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING, Any
from airflow.sdk.module_loading import qualname
# lazy loading for performance reasons
serializers = [
"numpy.int8",
"numpy.int16",
"numpy.int32",
"numpy.int64",
"numpy.uint8",
"numpy.uint16",
"numpy.uint32",
"numpy.uint64",
"numpy.float64",
"numpy.float32",
"numpy.float16",
"numpy.complex128",
"numpy.complex64",
"numpy.bool",
"numpy.bool_",
]
if TYPE_CHECKING:
from airflow.sdk.serde import U
deserializers = serializers
__version__ = 1
def serialize(o: object) -> tuple[U, str, int, bool]:
import numpy as np
if np is None:
return "", "", 0, False
name = qualname(o)
metadata = (name, __version__, True)
if isinstance(
o,
np.int_
| np.intc
| np.intp
| np.int8
| np.int16
| np.int32
| np.int64
| np.uint8
| np.uint16
| np.uint32
| np.uint64,
):
return int(o), *metadata
if hasattr(np, "bool") and isinstance(o, np.bool) or isinstance(o, np.bool_):
return bool(o), *metadata
if isinstance(o, (np.float16, np.float32, np.float64, np.complex64, np.complex128)):
return float(o), *metadata
return "", "", 0, False
def deserialize(cls: type, version: int, data: str) -> Any:
if version > __version__:
raise TypeError("serialized version is newer than class version")
return cls(data)
| {
"repo_id": "apache/airflow",
"file_path": "task-sdk/src/airflow/sdk/serde/serializers/numpy.py",
"license": "Apache License 2.0",
"lines": 72,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:task-sdk/src/airflow/sdk/serde/serializers/pandas.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING
from airflow.sdk.module_loading import qualname
# lazy loading for performance reasons
serializers = [
"pandas.core.frame.DataFrame",
]
deserializers = serializers
if TYPE_CHECKING:
import pandas as pd
from airflow.sdk.serde import U
__version__ = 1
def serialize(o: object) -> tuple[U, str, int, bool]:
import pandas as pd
import pyarrow as pa
from pyarrow import parquet as pq
if not isinstance(o, pd.DataFrame):
return "", "", 0, False
# for now, we *always* serialize into in memory
# until we have a generic backend that manages
# sinks
table = pa.Table.from_pandas(o)
buf = pa.BufferOutputStream()
pq.write_table(table, buf, compression="snappy")
return buf.getvalue().hex().decode("utf-8"), qualname(o), __version__, True
def deserialize(cls: type, version: int, data: object) -> pd.DataFrame:
if version > __version__:
raise TypeError(f"serialized {version} of {qualname(cls)} > {__version__}")
import pandas as pd
if cls is not pd.DataFrame:
raise TypeError(f"do not know how to deserialize {qualname(cls)}")
if not isinstance(data, str):
raise TypeError(f"serialized {qualname(cls)} has wrong data type {type(data)}")
from io import BytesIO
from pyarrow import parquet as pq
with BytesIO(bytes.fromhex(data)) as buf:
df = pq.read_table(buf).to_pandas()
return df
| {
"repo_id": "apache/airflow",
"file_path": "task-sdk/src/airflow/sdk/serde/serializers/pandas.py",
"license": "Apache License 2.0",
"lines": 55,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:task-sdk/src/airflow/sdk/serde/serializers/pydantic.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING
from airflow.sdk.module_loading import qualname
from airflow.sdk.serde.typing import is_pydantic_model
if TYPE_CHECKING:
from airflow.sdk.serde import U
serializers = [
"pydantic.main.BaseModel",
]
deserializers = serializers
__version__ = 1
def serialize(o: object) -> tuple[U, str, int, bool]:
"""
Serialize a Pydantic BaseModel instance into a dict of built-in types.
Returns a tuple of:
- serialized data (as built-in types)
- fixed class name for registration (BaseModel)
- version number
- is_serialized flag (True if handled)
"""
if not is_pydantic_model(o):
return "", "", 0, False
data = o.model_dump(mode="json") # type: ignore
return data, qualname(o), __version__, True
def deserialize(cls: type, version: int, data: dict):
"""
Deserialize a Pydantic class.
Pydantic models can be serialized into a Python dictionary via `pydantic.main.BaseModel.model_dump`
and the dictionary can be deserialized through `pydantic.main.BaseModel.model_validate`. This function
can deserialize arbitrary Pydantic models that are in `allowed_deserialization_classes`.
:param cls: The actual model class
:param version: Serialization version (must not exceed __version__)
:param data: Dictionary with built-in types, typically from model_dump()
:return: An instance of the actual Pydantic model
"""
if version > __version__:
raise TypeError(f"Serialized version {version} is newer than the supported version {__version__}")
if not is_pydantic_model(cls):
# no deserializer available
raise TypeError(f"No deserializer found for {qualname(cls)}")
# Perform validation-based reconstruction
return cls.model_validate(data) # type: ignore
| {
"repo_id": "apache/airflow",
"file_path": "task-sdk/src/airflow/sdk/serde/serializers/pydantic.py",
"license": "Apache License 2.0",
"lines": 59,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:task-sdk/src/airflow/sdk/serde/serializers/timezone.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import datetime
from typing import TYPE_CHECKING, Any, cast
from airflow.sdk.module_loading import qualname
if TYPE_CHECKING:
from airflow.sdk.serde import U
serializers = [
"pendulum.tz.timezone.FixedTimezone",
"pendulum.tz.timezone.Timezone",
"zoneinfo.ZoneInfo",
]
deserializers = serializers
__version__ = 1
def serialize(o: object) -> tuple[U, str, int, bool]:
"""
Encode a Pendulum Timezone for serialization.
Airflow only supports timezone objects that implements Pendulum's Timezone
interface. We try to keep as much information as possible to make conversion
round-tripping possible (see ``decode_timezone``). We need to special-case
UTC; Pendulum implements it as a FixedTimezone (i.e. it gets encoded as
0 without the special case), but passing 0 into ``pendulum.timezone`` does
not give us UTC (but ``+00:00``).
"""
from pendulum.tz.timezone import FixedTimezone
name = qualname(o)
if isinstance(o, FixedTimezone):
if o.offset == 0:
return "UTC", name, __version__, True
return o.offset, name, __version__, True
tz_name = _get_tzinfo_name(cast("datetime.tzinfo", o))
if tz_name is not None:
return tz_name, name, __version__, True
if cast("datetime.tzinfo", o).utcoffset(None) == datetime.timedelta(0):
return "UTC", qualname(FixedTimezone), __version__, True
return "", "", 0, False
def deserialize(cls: type, version: int, data: object) -> Any:
from zoneinfo import ZoneInfo
from airflow.sdk._shared.timezones.timezone import parse_timezone
if not isinstance(data, (str, int)):
raise TypeError(f"{data} is not of type int or str but of {type(data)}")
if version > __version__:
raise TypeError(f"serialized {version} of {qualname(cls)} > {__version__}")
if cls is ZoneInfo and isinstance(data, str):
return ZoneInfo(data)
return parse_timezone(data)
# ported from pendulum.tz.timezone._get_tzinfo_name
def _get_tzinfo_name(tzinfo: datetime.tzinfo | None) -> str | None:
if tzinfo is None:
return None
if hasattr(tzinfo, "key"):
# zoneinfo timezone
return tzinfo.key
if hasattr(tzinfo, "name"):
# Pendulum timezone
return tzinfo.name
if hasattr(tzinfo, "zone"):
# pytz timezone
return tzinfo.zone
return None
| {
"repo_id": "apache/airflow",
"file_path": "task-sdk/src/airflow/sdk/serde/serializers/timezone.py",
"license": "Apache License 2.0",
"lines": 76,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:task-sdk/src/airflow/sdk/serde/typing.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from dataclasses import is_dataclass
from typing import Any
def is_pydantic_model(cls: Any) -> bool:
"""
Return True if the class is a pydantic.main.BaseModel.
Checking is done by attributes as it is significantly faster than
using isinstance.
"""
# __pydantic_fields__ is always present on Pydantic V2 models and is a dict[str, FieldInfo]
# __pydantic_validator__ is an internal validator object, always set after model build
# Check if it is not a dataclass to prevent detecting pydantic dataclasses as pydantic models
return (
hasattr(cls, "__pydantic_fields__")
and hasattr(cls, "__pydantic_validator__")
and not is_dataclass(cls)
)
| {
"repo_id": "apache/airflow",
"file_path": "task-sdk/src/airflow/sdk/serde/typing.py",
"license": "Apache License 2.0",
"lines": 33,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:airflow-core/tests/unit/serialization/definitions/test_assets.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from airflow.api_fastapi.execution_api.datamodels.asset import AssetProfile
from airflow.serialization.definitions.assets import (
SerializedAsset,
SerializedAssetAlias,
SerializedAssetAll,
SerializedAssetAny,
SerializedAssetUniqueKey,
)
asset1 = SerializedAsset(
name="asset-1",
uri="s3://bucket1/data1/",
group="group-1",
extra={},
watchers=[],
)
def test_asset_iter_assets():
assert list(asset1.iter_assets()) == [
(SerializedAssetUniqueKey("asset-1", "s3://bucket1/data1/"), asset1)
]
def test_asset_iter_asset_aliases():
base_asset = SerializedAssetAll(
[
SerializedAssetAlias(name="example-alias-1", group=""),
SerializedAsset(name="1", uri="foo://1/", group="", extra={}, watchers=[]),
SerializedAssetAny(
[
SerializedAsset(name="2", uri="test://2/", group="", extra={}, watchers=[]),
SerializedAssetAlias(name="example-alias-2", group=""),
SerializedAsset(name="3", uri="test://3/", group="", extra={}, watchers=[]),
SerializedAssetAll(
[
SerializedAssetAlias("example-alias-3", group=""),
SerializedAsset(name="4", uri="test://4/", group="", extra={}, watchers=[]),
SerializedAssetAlias("example-alias-4", group=""),
],
),
],
),
SerializedAssetAll(
[
SerializedAssetAlias("example-alias-5", group=""),
SerializedAsset(name="5", uri="test://5/", group="", extra={}, watchers=[]),
],
),
],
)
assert list(base_asset.iter_asset_aliases()) == [
(f"example-alias-{i}", SerializedAssetAlias(name=f"example-alias-{i}", group="")) for i in range(1, 6)
]
def test_asset_alias_as_expression():
alias = SerializedAssetAlias(name="test_name", group="test")
assert alias.as_expression() == {"alias": {"name": "test_name", "group": "test"}}
class TestSerializedAssetUniqueKey:
def test_from_asset(self):
key = SerializedAssetUniqueKey.from_asset(asset1)
assert key == SerializedAssetUniqueKey(name="asset-1", uri="s3://bucket1/data1/")
def test_from_str(self):
key = SerializedAssetUniqueKey.from_str('{"name": "test", "uri": "test://test/"}')
assert key == SerializedAssetUniqueKey(name="test", uri="test://test/")
def test_to_str(self):
key = SerializedAssetUniqueKey(name="test", uri="test://test/")
assert key.to_str() == '{"name": "test", "uri": "test://test/"}'
def test_asprofile(self):
key = SerializedAssetUniqueKey(name="test", uri="test://test/")
assert key.asprofile() == AssetProfile(name="test", uri="test://test/", type="Asset")
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/tests/unit/serialization/definitions/test_assets.py",
"license": "Apache License 2.0",
"lines": 83,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:scripts/ci/prek/check_version_consistency.py | #!/usr/bin/env python
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# /// script
# requires-python = ">=3.10,<3.11"
# dependencies = [
# "rich>=13.6.0",
# "packaging>=25.0",
# "tomli>=2.0.1",
# ]
# ///
from __future__ import annotations
import ast
import re
import sys
from pathlib import Path
try:
import tomllib
except ImportError:
import tomli as tomllib
from packaging.specifiers import SpecifierSet
from packaging.version import Version
sys.path.insert(0, str(Path(__file__).parent.resolve()))
from common_prek_utils import (
AIRFLOW_CORE_SOURCES_PATH,
AIRFLOW_ROOT_PATH,
AIRFLOW_TASK_SDK_SOURCES_PATH,
console,
)
def read_airflow_version() -> str:
"""Read Airflow version from airflow-core/src/airflow/__init__.py"""
ast_obj = ast.parse((AIRFLOW_CORE_SOURCES_PATH / "airflow" / "__init__.py").read_text())
for node in ast_obj.body:
if isinstance(node, ast.Assign):
if node.targets[0].id == "__version__": # type: ignore[attr-defined]
return ast.literal_eval(node.value)
raise RuntimeError("Couldn't find __version__ in airflow-core/src/airflow/__init__.py")
def read_task_sdk_version() -> str:
"""Read Task SDK version from task-sdk/src/airflow/sdk/__init__.py"""
ast_obj = ast.parse((AIRFLOW_TASK_SDK_SOURCES_PATH / "airflow" / "sdk" / "__init__.py").read_text())
for node in ast_obj.body:
if isinstance(node, ast.Assign):
if node.targets[0].id == "__version__": # type: ignore[attr-defined]
return ast.literal_eval(node.value)
raise RuntimeError("Couldn't find __version__ in task-sdk/src/airflow/sdk/__init__.py")
def read_airflow_version_from_pyproject() -> str:
"""Read Airflow version from airflow-core/pyproject.toml"""
pyproject_path = AIRFLOW_ROOT_PATH / "airflow-core" / "pyproject.toml"
with pyproject_path.open("rb") as f:
data = tomllib.load(f)
version = data.get("project", {}).get("version")
if not version:
raise RuntimeError("Couldn't find version in airflow-core/pyproject.toml")
return str(version)
def read_task_sdk_dependency_constraint() -> str:
"""Read Task SDK dependency constraint from airflow-core/pyproject.toml"""
pyproject_path = AIRFLOW_ROOT_PATH / "airflow-core" / "pyproject.toml"
with pyproject_path.open("rb") as f:
data = tomllib.load(f)
dependencies = data.get("project", {}).get("dependencies", [])
for dep in dependencies:
# Extract package name (everything before >=, >, <, ==, etc.)
package_name = re.split(r"[<>=!]", dep)[0].strip().strip("\"'")
if package_name == "apache-airflow-task-sdk":
# Extract the version constraint part
constraint_match = re.search(r"apache-airflow-task-sdk\s*(.*)", dep)
if constraint_match:
return constraint_match.group(1).strip().strip("\"'")
return dep
raise RuntimeError("Couldn't find apache-airflow-task-sdk dependency in airflow-core/pyproject.toml")
def read_root_pyproject_version() -> str:
"""Read version from root pyproject.toml"""
pyproject_path = AIRFLOW_ROOT_PATH / "pyproject.toml"
with pyproject_path.open("rb") as f:
data = tomllib.load(f)
version = data.get("project", {}).get("version")
if not version:
raise RuntimeError("Couldn't find version in pyproject.toml")
return str(version)
def read_root_airflow_core_dependency() -> str:
"""Read apache-airflow-core dependency from root pyproject.toml"""
pyproject_path = AIRFLOW_ROOT_PATH / "pyproject.toml"
with pyproject_path.open("rb") as f:
data = tomllib.load(f)
dependencies = data.get("project", {}).get("dependencies", [])
for dep in dependencies:
# Extract package name (everything before >=, >, <, ==, etc.)
package_name = re.split(r"[<>=!]", dep)[0].strip().strip("\"'")
if package_name == "apache-airflow-core":
# Extract the version constraint part
constraint_match = re.search(r"apache-airflow-core\s*(.*)", dep)
if constraint_match:
return constraint_match.group(1).strip().strip("\"'")
return dep
raise RuntimeError("Couldn't find apache-airflow-core dependency in pyproject.toml")
def read_root_task_sdk_dependency_constraint() -> str:
"""Read Task SDK dependency constraint from root pyproject.toml"""
pyproject_path = AIRFLOW_ROOT_PATH / "pyproject.toml"
with pyproject_path.open("rb") as f:
data = tomllib.load(f)
dependencies = data.get("project", {}).get("dependencies", [])
for dep in dependencies:
# Extract package name (everything before >=, >, <, ==, etc.)
package_name = re.split(r"[<>=!]", dep)[0].strip().strip("\"'")
if package_name == "apache-airflow-task-sdk":
# Extract the version constraint part
constraint_match = re.search(r"apache-airflow-task-sdk\s*(.*)", dep)
if constraint_match:
return constraint_match.group(1).strip().strip("\"'")
return dep
raise RuntimeError("Couldn't find apache-airflow-task-sdk dependency in pyproject.toml")
def check_version_in_constraint(version: str, constraint: str) -> bool:
"""Check if version satisfies the constraint"""
try:
spec = SpecifierSet(constraint)
return Version(version) in spec
except Exception as e:
console.print(f"[red]Error parsing constraint '{constraint}': {e}[/red]")
return False
def get_minimum_version_from_constraint(constraint: str) -> str | None:
"""
Extract the minimum version from a constraint string.
Returns the highest >= or > version requirement, or None if not found.
"""
try:
spec = SpecifierSet(constraint)
min_version = None
for specifier in spec:
if specifier.operator in (">=", ">"):
if min_version is None or Version(specifier.version) > Version(min_version):
min_version = specifier.version
return min_version
except Exception:
return None
def check_constraint_matches_version(version: str, constraint: str) -> tuple[bool, str | None]:
"""
Check if the constraint's minimum version matches the actual version.
Returns (is_match, min_version_from_constraint)
"""
min_version = get_minimum_version_from_constraint(constraint)
if min_version is None:
return (False, None)
# Check if the minimum version in the constraint matches the actual version
return (Version(min_version) == Version(version), min_version)
def main():
errors: list[str] = []
# Read versions
try:
airflow_version_init = read_airflow_version()
airflow_version_pyproject = read_airflow_version_from_pyproject()
root_pyproject_version = read_root_pyproject_version()
root_airflow_core_dep = read_root_airflow_core_dependency()
task_sdk_version_init = read_task_sdk_version()
task_sdk_constraint = read_task_sdk_dependency_constraint()
root_task_sdk_constraint = read_root_task_sdk_dependency_constraint()
except Exception as e:
console.print(f"[red]Error reading versions: {e}[/red]")
sys.exit(1)
# Check Airflow version consistency
if airflow_version_init != airflow_version_pyproject:
errors.append(
f"Airflow version mismatch:\n"
f" airflow-core/src/airflow/__init__.py: {airflow_version_init}\n"
f" airflow-core/pyproject.toml: {airflow_version_pyproject}"
)
# Check root pyproject.toml version matches Airflow version
if airflow_version_init != root_pyproject_version:
errors.append(
f"Root pyproject.toml version mismatch:\n"
f" airflow-core/src/airflow/__init__.py: {airflow_version_init}\n"
f" pyproject.toml: {root_pyproject_version}"
)
# Check root pyproject.toml apache-airflow-core dependency matches Airflow version exactly
expected_core_dep = f"=={airflow_version_init}"
if root_airflow_core_dep != expected_core_dep:
errors.append(
f"Root pyproject.toml apache-airflow-core dependency mismatch:\n"
f" Expected: apache-airflow-core=={airflow_version_init}\n"
f" Found: apache-airflow-core{root_airflow_core_dep}"
)
# Check Task SDK version is within constraint in airflow-core/pyproject.toml
if not check_version_in_constraint(task_sdk_version_init, task_sdk_constraint):
errors.append(
f"Task SDK version does not satisfy constraint in airflow-core/pyproject.toml:\n"
f" task-sdk/src/airflow/sdk/__init__.py: {task_sdk_version_init}\n"
f" airflow-core/pyproject.toml constraint: apache-airflow-task-sdk{task_sdk_constraint}"
)
# Check Task SDK constraint minimum version matches actual version in airflow-core/pyproject.toml
constraint_matches, min_version = check_constraint_matches_version(
task_sdk_version_init, task_sdk_constraint
)
if not constraint_matches:
errors.append(
f"Task SDK constraint minimum version does not match actual version in airflow-core/pyproject.toml:\n"
f" task-sdk/src/airflow/sdk/__init__.py: {task_sdk_version_init}\n"
f" airflow-core/pyproject.toml constraint minimum: {min_version}\n"
f" Expected constraint to have minimum version: >= {task_sdk_version_init}"
)
# Check Task SDK version is within constraint in root pyproject.toml
if not check_version_in_constraint(task_sdk_version_init, root_task_sdk_constraint):
errors.append(
f"Task SDK version does not satisfy constraint in pyproject.toml:\n"
f" task-sdk/src/airflow/sdk/__init__.py: {task_sdk_version_init}\n"
f" pyproject.toml constraint: apache-airflow-task-sdk{root_task_sdk_constraint}"
)
# Check Task SDK constraint minimum version matches actual version in root pyproject.toml
root_constraint_matches, root_min_version = check_constraint_matches_version(
task_sdk_version_init, root_task_sdk_constraint
)
if not root_constraint_matches:
errors.append(
f"Task SDK constraint minimum version does not match actual version in pyproject.toml:\n"
f" task-sdk/src/airflow/sdk/__init__.py: {task_sdk_version_init}\n"
f" pyproject.toml constraint minimum: {root_min_version}\n"
f" Expected constraint to have minimum version: >= {task_sdk_version_init}"
)
# Verify constraints match between airflow-core and root pyproject.toml
if task_sdk_constraint != root_task_sdk_constraint:
errors.append(
f"Task SDK constraint mismatch between pyproject.toml files:\n"
f" airflow-core/pyproject.toml: apache-airflow-task-sdk{task_sdk_constraint}\n"
f" pyproject.toml: apache-airflow-task-sdk{root_task_sdk_constraint}"
)
# Report results
if errors:
console.print("[red]Version consistency check failed:[/red]\n")
for error in errors:
console.print(f"[red]{error}[/red]\n")
console.print(
"[yellow]Please ensure versions are consistent:\n"
" 1. Set the Airflow version in airflow-core/src/airflow/__init__.py\n"
" 2. Set the Airflow version in airflow-core/pyproject.toml\n"
" 3. Set the Airflow version in pyproject.toml\n"
" 4. Set apache-airflow-core==<version> in pyproject.toml dependencies\n"
" 5. Set the Task SDK version in task-sdk/src/airflow/sdk/__init__.py\n"
" 6. Update the Task SDK version constraint in airflow-core/pyproject.toml to include the Task SDK version\n"
" 7. Update the Task SDK version constraint in pyproject.toml to include the Task SDK version[/yellow]"
)
sys.exit(1)
if __name__ == "__main__":
main()
| {
"repo_id": "apache/airflow",
"file_path": "scripts/ci/prek/check_version_consistency.py",
"license": "Apache License 2.0",
"lines": 255,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:helm-tests/tests/helm_tests/airflow_core/test_pdb_triggerer.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import jmespath
from chart_utils.helm_template_generator import render_chart
class TestTriggererPdb:
"""Tests Triggerer PDB."""
def test_should_pass_validation_with_just_pdb_enabled(self):
render_chart(
values={"triggerer": {"podDisruptionBudget": {"enabled": True}}},
show_only=["templates/triggerer/triggerer-poddisruptionbudget.yaml"],
)
def test_should_add_component_specific_labels(self):
docs = render_chart(
values={
"triggerer": {
"podDisruptionBudget": {"enabled": True},
"labels": {"test_label": "test_label_value"},
},
},
show_only=["templates/triggerer/triggerer-poddisruptionbudget.yaml"],
)
assert "test_label" in jmespath.search("metadata.labels", docs[0])
assert jmespath.search("metadata.labels", docs[0])["test_label"] == "test_label_value"
def test_should_pass_validation_with_pdb_enabled_and_min_available_param(self):
render_chart(
values={
"triggerer": {
"podDisruptionBudget": {
"enabled": True,
"config": {"maxUnavailable": None, "minAvailable": 1},
}
}
},
show_only=["templates/triggerer/triggerer-poddisruptionbudget.yaml"],
) # checks that no validation exception is raised
| {
"repo_id": "apache/airflow",
"file_path": "helm-tests/tests/helm_tests/airflow_core/test_pdb_triggerer.py",
"license": "Apache License 2.0",
"lines": 50,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:helm-tests/tests/helm_tests/airflow_core/test_pdb_worker.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import jmespath
import pytest
from chart_utils.helm_template_generator import render_chart
class TestWorkerPdb:
"""Tests Worker PDB."""
@pytest.mark.parametrize(
"workers_values",
[
{"podDisruptionBudget": {"enabled": True}},
{"celery": {"podDisruptionBudget": {"enabled": True}}},
],
)
def test_pod_disruption_budget_enabled(self, workers_values):
docs = render_chart(
values={"workers": workers_values},
show_only=["templates/workers/worker-poddisruptionbudget.yaml"],
)
assert len(docs) == 1
@pytest.mark.parametrize(
"workers_values",
[
{"podDisruptionBudget": {"enabled": True}},
{"celery": {"podDisruptionBudget": {"enabled": True}}},
],
)
def test_pod_disruption_budget_name(self, workers_values):
docs = render_chart(
values={"workers": workers_values},
show_only=["templates/workers/worker-poddisruptionbudget.yaml"],
)
assert jmespath.search("metadata.name", docs[0]) == "release-name-worker-pdb"
@pytest.mark.parametrize(
"workers_values",
[
{"podDisruptionBudget": {"enabled": True}},
{"celery": {"podDisruptionBudget": {"enabled": True}}},
],
)
def test_should_add_component_specific_labels(self, workers_values):
docs = render_chart(
values={
"workers": {
**workers_values,
"labels": {"test_label": "test_label_value"},
},
},
show_only=["templates/workers/worker-poddisruptionbudget.yaml"],
)
assert jmespath.search("metadata.labels", docs[0])["test_label"] == "test_label_value"
@pytest.mark.parametrize(
"workers_values",
[
{"podDisruptionBudget": {"enabled": True}},
{"celery": {"podDisruptionBudget": {"enabled": True}}},
],
)
def test_pod_disruption_budget_config_max_unavailable_default(self, workers_values):
docs = render_chart(
values={"workers": workers_values},
show_only=["templates/workers/worker-poddisruptionbudget.yaml"],
)
assert jmespath.search("spec.maxUnavailable", docs[0]) == 1
assert jmespath.search("spec.minAvailable", docs[0]) is None
@pytest.mark.parametrize(
"workers_values",
[
{"podDisruptionBudget": {"enabled": True, "config": {"maxUnavailable": 2}}},
{"celery": {"podDisruptionBudget": {"enabled": True, "config": {"maxUnavailable": 2}}}},
{
"podDisruptionBudget": {"enabled": True, "config": {"maxUnavailable": 3}},
"celery": {"podDisruptionBudget": {"enabled": True, "config": {"maxUnavailable": 2}}},
},
],
)
def test_pod_disruption_budget_config_max_unavailable_overwrite(self, workers_values):
docs = render_chart(
values={"workers": workers_values},
show_only=["templates/workers/worker-poddisruptionbudget.yaml"],
)
assert jmespath.search("spec.maxUnavailable", docs[0]) == 2
assert jmespath.search("spec.minAvailable", docs[0]) is None
@pytest.mark.parametrize(
"workers_values",
[
{"podDisruptionBudget": {"enabled": True, "config": {"maxUnavailable": None, "minAvailable": 1}}},
{
"celery": {
"podDisruptionBudget": {
"enabled": True,
"config": {"maxUnavailable": None, "minAvailable": 1},
}
}
},
{
"podDisruptionBudget": {"enabled": True, "config": {"maxUnavailable": 2, "minAvailable": 3}},
"celery": {
"podDisruptionBudget": {
"enabled": True,
"config": {"maxUnavailable": None, "minAvailable": 1},
}
},
},
],
)
def test_pod_disruption_budget_config_min_available_set(self, workers_values):
docs = render_chart(
values={"workers": workers_values},
show_only=["templates/workers/worker-poddisruptionbudget.yaml"],
)
assert jmespath.search("spec.maxUnavailable", docs[0]) is None
assert jmespath.search("spec.minAvailable", docs[0]) == 1
| {
"repo_id": "apache/airflow",
"file_path": "helm-tests/tests/helm_tests/airflow_core/test_pdb_worker.py",
"license": "Apache License 2.0",
"lines": 128,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:dev/breeze/tests/test_release_command.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import os
from unittest import mock
from unittest.mock import patch
import pytest
from airflow_breeze.commands.release_command import find_latest_release_candidate
def is_ci_environment() -> bool:
"""Check if running in CI environment by checking the CI environment variable."""
return os.environ.get("CI", "").lower() in ("true", "1", "yes")
class TestFindLatestReleaseCandidate:
"""Test the find_latest_release_candidate function."""
@mock.patch.dict(os.environ, {"CI": "false"})
def test_find_latest_rc_single_candidate(self, tmp_path):
"""Test finding release candidate when only one exists."""
svn_dev_repo = tmp_path / "dev" / "airflow"
svn_dev_repo.mkdir(parents=True)
# Create a single RC directory
(svn_dev_repo / "3.1.7rc1").mkdir()
result = find_latest_release_candidate("3.1.7", str(svn_dev_repo), component="airflow")
assert result == "3.1.7rc1"
@mock.patch.dict(os.environ, {"CI": "false"})
def test_find_latest_rc_multiple_candidates(self, tmp_path):
"""Test finding latest release candidate when multiple exist."""
svn_dev_repo = tmp_path / "dev" / "airflow"
svn_dev_repo.mkdir(parents=True)
# Create multiple RC directories
(svn_dev_repo / "3.1.7rc1").mkdir()
(svn_dev_repo / "3.1.7rc2").mkdir()
(svn_dev_repo / "3.1.7rc3").mkdir()
(svn_dev_repo / "3.1.7rc10").mkdir() # Test that rc10 > rc3
result = find_latest_release_candidate("3.1.7", str(svn_dev_repo), component="airflow")
assert result == "3.1.7rc10"
@mock.patch.dict(os.environ, {"CI": "false"})
def test_find_latest_rc_ignores_other_versions(self, tmp_path):
"""Test that function ignores RCs for other versions."""
svn_dev_repo = tmp_path / "dev" / "airflow"
svn_dev_repo.mkdir(parents=True)
# Create RCs for different versions
(svn_dev_repo / "3.0.4rc1").mkdir()
(svn_dev_repo / "3.1.7rc1").mkdir()
(svn_dev_repo / "3.1.7rc2").mkdir()
(svn_dev_repo / "3.0.6rc1").mkdir()
result = find_latest_release_candidate("3.1.7", str(svn_dev_repo), component="airflow")
assert result == "3.1.7rc2"
@mock.patch.dict(os.environ, {"CI": "false"})
def test_find_latest_rc_ignores_non_rc_directories(self, tmp_path):
"""Test that function ignores directories that don't match RC pattern."""
svn_dev_repo = tmp_path / "dev" / "airflow"
svn_dev_repo.mkdir(parents=True)
# Create RC directory and non-RC directories
(svn_dev_repo / "3.1.7rc1").mkdir()
(svn_dev_repo / "3.1.7").mkdir() # Final release directory
(svn_dev_repo / "some-other-dir").mkdir()
result = find_latest_release_candidate("3.1.7", str(svn_dev_repo), component="airflow")
assert result == "3.1.7rc1"
@mock.patch.dict(os.environ, {"CI": "false"})
def test_find_latest_rc_no_match(self, tmp_path):
"""Test that function returns None when no matching RC found."""
svn_dev_repo = tmp_path / "dev" / "airflow"
svn_dev_repo.mkdir(parents=True)
# Create RCs for different version
(svn_dev_repo / "3.0.4rc1").mkdir()
result = find_latest_release_candidate("3.1.5", str(svn_dev_repo), component="airflow")
assert result is None
@mock.patch.dict(os.environ, {"CI": "false"})
def test_find_latest_rc_directory_not_exists(self, tmp_path):
"""Test that function returns None when directory doesn't exist."""
svn_dev_repo = tmp_path / "dev" / "airflow"
# Don't create the directory
result = find_latest_release_candidate("3.1.7", str(svn_dev_repo), component="airflow")
assert result is None
@mock.patch.dict(os.environ, {"CI": "false"})
def test_find_latest_rc_empty_directory(self, tmp_path):
"""Test that function returns None when directory is empty."""
svn_dev_repo = tmp_path / "dev" / "airflow"
svn_dev_repo.mkdir(parents=True)
result = find_latest_release_candidate("3.1.7", str(svn_dev_repo), component="airflow")
assert result is None
@mock.patch.dict(os.environ, {"CI": "false"})
def test_find_latest_rc_task_sdk_component(self, tmp_path):
"""Test finding release candidate for task-sdk component."""
svn_dev_repo = tmp_path / "dev" / "airflow"
task_sdk_dir = svn_dev_repo / "task-sdk"
task_sdk_dir.mkdir(parents=True)
# Create multiple Task SDK RC directories
(task_sdk_dir / "1.0.5rc1").mkdir()
(task_sdk_dir / "1.0.5rc2").mkdir()
(task_sdk_dir / "1.0.5rc3").mkdir()
result = find_latest_release_candidate("1.0.5", str(svn_dev_repo), component="task-sdk")
assert result == "1.0.5rc3"
@mock.patch.dict(os.environ, {"CI": "false"})
def test_find_latest_rc_task_sdk_ignores_airflow_rcs(self, tmp_path):
"""Test that task-sdk component ignores airflow RCs."""
svn_dev_repo = tmp_path / "dev" / "airflow"
svn_dev_repo.mkdir(parents=True)
task_sdk_dir = svn_dev_repo / "task-sdk"
task_sdk_dir.mkdir()
# Create airflow RC (should be ignored)
(svn_dev_repo / "3.1.7rc1").mkdir()
# Create task-sdk RC
(task_sdk_dir / "1.1.7rc1").mkdir()
result = find_latest_release_candidate("1.1.7", str(svn_dev_repo), component="task-sdk")
assert result == "1.1.7rc1"
@mock.patch.dict(os.environ, {"CI": "false"})
def test_find_latest_rc_handles_oserror(self, tmp_path):
"""Test that function handles OSError gracefully."""
svn_dev_repo = tmp_path / "dev" / "airflow"
svn_simulate_repo = tmp_path / "release" / "airflow"
svn_dev_repo.mkdir(parents=True)
svn_simulate_repo.mkdir(parents=True)
with patch("os.listdir", side_effect=OSError("Permission denied")):
result = find_latest_release_candidate(
"3.1.5", str(svn_dev_repo), component="airflow", svn_simulation_repo=svn_simulate_repo
)
assert result is None
class FakeDirEntry:
def __init__(self, name: str, *, is_dir: bool):
self.name = name
self._is_dir = is_dir
def is_dir(self) -> bool:
return self._is_dir
@pytest.fixture
def release_cmd():
"""Lazy import the release command module."""
import airflow_breeze.commands.release_command as module
return module
def test_remove_old_release_only_collects_release_directories(monkeypatch, release_cmd):
version = "3.1.7"
task_sdk_version = "1.0.5"
svn_release_repo = "/svn/release/repo"
# Arrange: entries include current release, old release directories, a matching "file", and non-release directory.
entries = [
FakeDirEntry(version, is_dir=True), # current release: should be skipped
FakeDirEntry("3.0.4", is_dir=True), # old release dir: should be included
FakeDirEntry("3.0.3", is_dir=True), # old release dir: should be included
FakeDirEntry("3.0.2", is_dir=False), # matches pattern but not a directory: excluded
FakeDirEntry("task-sdk", is_dir=True), # task-sdk directory: will be scanned separately
FakeDirEntry("not-a-release", is_dir=True), # directory but not matching pattern: excluded
]
# Task SDK directory entries
task_sdk_entries = [
FakeDirEntry(task_sdk_version, is_dir=True), # current task-sdk release: should be skipped
FakeDirEntry("1.0.4", is_dir=True), # old task-sdk release: should be included
FakeDirEntry("1.0.3", is_dir=True), # old task-sdk release: should be included
]
chdir_calls: list[str] = []
console_messages: list[str] = []
run_command_calls: list[list[str]] = []
getcwd_calls: list[int] = []
path_exists_calls: list[str] = []
def fake_confirm_action(prompt: str, **_kwargs) -> bool:
# First prompt decides whether we scan. We want to.
if prompt == "Do you want to look for old releases to remove?":
return True
# For each candidate, we decline removal to avoid running svn commands.
if prompt.startswith("Remove old release "):
return False
raise AssertionError(f"Unexpected confirm prompt: {prompt}")
def fake_getcwd() -> str:
getcwd_calls.append(1)
return "/original/dir"
def fake_path_exists(path: str) -> bool:
path_exists_calls.append(path)
return path == "/svn/release/repo/task-sdk"
def fake_scandir(path=None):
if path == "/svn/release/repo/task-sdk":
return iter(task_sdk_entries)
return iter(entries)
monkeypatch.setattr(release_cmd.os, "getcwd", fake_getcwd)
monkeypatch.setattr(release_cmd.os, "chdir", lambda p: chdir_calls.append(p))
monkeypatch.setattr(release_cmd.os, "scandir", fake_scandir)
monkeypatch.setattr(release_cmd.os.path, "exists", fake_path_exists)
monkeypatch.setattr(release_cmd.os.path, "join", lambda *args: "/".join(args))
monkeypatch.setattr(release_cmd, "confirm_action", fake_confirm_action)
monkeypatch.setattr(release_cmd, "console_print", lambda msg="": console_messages.append(str(msg)))
monkeypatch.setattr(release_cmd, "run_command", lambda cmd, **_kwargs: run_command_calls.append(cmd))
# Act
release_cmd.remove_old_release(
version=version, task_sdk_version=task_sdk_version, svn_release_repo=svn_release_repo
)
# Assert: only directory entries matching RELEASE_PATTERN, excluding current version, and sorted.
assert svn_release_repo in chdir_calls
assert "/original/dir" in chdir_calls
assert "The following old Airflow releases should be removed: ['3.0.3', '3.0.4']" in console_messages
assert (
"The following old Task SDK releases should be removed: ['task-sdk/1.0.3', 'task-sdk/1.0.4']"
in console_messages
)
assert run_command_calls == []
def test_remove_old_release_returns_early_when_user_declines(monkeypatch, release_cmd):
version = "3.1.7"
task_sdk_version = "1.0.5"
svn_release_repo = "/svn/release/repo"
confirm_prompts: list[str] = []
def fake_confirm_action(prompt: str, **_kwargs) -> bool:
confirm_prompts.append(prompt)
return False
def should_not_be_called(*_args, **_kwargs):
raise AssertionError("This should not have been called when user declines the initial prompt.")
monkeypatch.setattr(release_cmd, "confirm_action", fake_confirm_action)
monkeypatch.setattr(release_cmd.os, "getcwd", should_not_be_called)
monkeypatch.setattr(release_cmd.os, "chdir", should_not_be_called)
monkeypatch.setattr(release_cmd.os, "scandir", should_not_be_called)
monkeypatch.setattr(release_cmd, "console_print", should_not_be_called)
monkeypatch.setattr(release_cmd, "run_command", should_not_be_called)
release_cmd.remove_old_release(
version=version, task_sdk_version=task_sdk_version, svn_release_repo=svn_release_repo
)
assert confirm_prompts == ["Do you want to look for old releases to remove?"]
def test_remove_old_release_removes_confirmed_old_releases(monkeypatch, release_cmd):
version = "3.1.5"
task_sdk_version = "1.1.5"
svn_release_repo = "/svn/release/repo"
entries = [
FakeDirEntry("3.1.4", is_dir=True),
FakeDirEntry(version, is_dir=True),
FakeDirEntry("3.1.0", is_dir=True),
]
task_sdk_entries = [
FakeDirEntry("1.1.4", is_dir=True),
FakeDirEntry(task_sdk_version, is_dir=True),
FakeDirEntry("1.1.0", is_dir=True),
]
chdir_calls: list[str] = []
console_messages: list[str] = []
run_command_calls: list[tuple[list[str], dict]] = []
confirm_prompts: list[str] = []
getcwd_calls: list[int] = []
def fake_confirm_action(prompt: str, **_kwargs) -> bool:
confirm_prompts.append(prompt)
if prompt == "Do you want to look for old releases to remove?":
return True
if prompt == "Remove old release 3.1.0?":
return True
if prompt == "Remove old release 3.1.4?":
return False
if prompt.startswith("Remove old release task-sdk/"):
return False
raise AssertionError(f"Unexpected confirm prompt: {prompt}")
def fake_getcwd() -> str:
getcwd_calls.append(1)
return "/original/dir"
def fake_path_exists(path: str) -> bool:
return path == "/svn/release/repo/task-sdk"
def fake_scandir(path=None):
if path == "/svn/release/repo/task-sdk":
return iter(task_sdk_entries)
return iter(entries)
monkeypatch.setattr(release_cmd.os, "getcwd", fake_getcwd)
monkeypatch.setattr(release_cmd.os, "chdir", lambda path: chdir_calls.append(path))
monkeypatch.setattr(release_cmd.os, "scandir", fake_scandir)
monkeypatch.setattr(release_cmd.os.path, "exists", fake_path_exists)
monkeypatch.setattr(release_cmd.os.path, "join", lambda *args: "/".join(args))
monkeypatch.setattr(release_cmd, "confirm_action", fake_confirm_action)
monkeypatch.setattr(release_cmd, "console_print", lambda msg="": console_messages.append(str(msg)))
def fake_run_command(cmd: list[str], **kwargs):
run_command_calls.append((cmd, kwargs))
monkeypatch.setattr(release_cmd, "run_command", fake_run_command)
release_cmd.remove_old_release(
version=version, task_sdk_version=task_sdk_version, svn_release_repo=svn_release_repo
)
assert chdir_calls == [svn_release_repo, "/original/dir"]
assert confirm_prompts == [
"Do you want to look for old releases to remove?",
"Remove old release 3.1.0?",
"Remove old release 3.1.4?",
"Remove old release task-sdk/1.1.0?",
"Remove old release task-sdk/1.1.4?",
]
assert "The following old Airflow releases should be removed: ['3.1.0', '3.1.4']" in console_messages
assert (
"The following old Task SDK releases should be removed: ['task-sdk/1.1.0', 'task-sdk/1.1.4']"
in console_messages
)
assert "Removing old release 3.1.0" in console_messages
assert "Removing old release 3.1.4" in console_messages
assert "[success]Old releases removed" in console_messages
# Only 3.1.0 was confirmed, so we should run rm+commit for 3.1.0 only.
assert run_command_calls == [
(["svn", "rm", "3.1.0"], {"check": True}),
(["svn", "commit", "-m", "Remove old release: 3.1.0"], {"check": True}),
]
def test_remove_old_release_no_old_releases(monkeypatch, release_cmd):
version = "3.1.7"
task_sdk_version = "1.0.5"
svn_release_repo = "/svn/release/repo"
# Only current release exists
entries = [
FakeDirEntry(version, is_dir=True),
FakeDirEntry("task-sdk", is_dir=True), # task-sdk directory exists
]
task_sdk_entries = [
FakeDirEntry(task_sdk_version, is_dir=True), # Only current task-sdk release
]
chdir_calls: list[str] = []
console_messages: list[str] = []
run_command_calls: list[list[str]] = []
def fake_confirm_action(prompt: str, **_kwargs) -> bool:
if prompt == "Do you want to look for old releases to remove?":
return True
raise AssertionError(f"Unexpected confirm prompt: {prompt}")
def fake_getcwd() -> str:
return "/original/dir"
def fake_path_exists(path: str) -> bool:
return path == "/svn/release/repo/task-sdk"
def fake_scandir(path=None):
if path == "/svn/release/repo/task-sdk":
return iter(task_sdk_entries)
return iter(entries)
monkeypatch.setattr(release_cmd.os, "getcwd", fake_getcwd)
monkeypatch.setattr(release_cmd.os, "chdir", lambda path: chdir_calls.append(path))
monkeypatch.setattr(release_cmd.os, "scandir", fake_scandir)
monkeypatch.setattr(release_cmd.os.path, "exists", fake_path_exists)
monkeypatch.setattr(release_cmd.os.path, "join", lambda *args: "/".join(args))
monkeypatch.setattr(release_cmd, "confirm_action", fake_confirm_action)
monkeypatch.setattr(release_cmd, "console_print", lambda msg="": console_messages.append(str(msg)))
monkeypatch.setattr(release_cmd, "run_command", lambda cmd, **_kwargs: run_command_calls.append(cmd))
release_cmd.remove_old_release(
version=version, task_sdk_version=task_sdk_version, svn_release_repo=svn_release_repo
)
assert "No old releases to remove." in console_messages
assert run_command_calls == []
assert chdir_calls == [svn_release_repo, "/original/dir"]
def test_remove_old_release_task_sdk_only(monkeypatch, release_cmd):
version = "3.1.7"
task_sdk_version = "1.0.5"
svn_release_repo = "/svn/release/repo"
# Only current Airflow release exists, but old Task SDK releases exist
entries = [
FakeDirEntry(version, is_dir=True),
]
task_sdk_entries = [
FakeDirEntry(task_sdk_version, is_dir=True), # current task-sdk release
FakeDirEntry("1.0.4", is_dir=True), # old task-sdk release
FakeDirEntry("1.0.3", is_dir=True), # old task-sdk release
]
chdir_calls: list[str] = []
console_messages: list[str] = []
run_command_calls: list[tuple[list[str], dict]] = []
confirm_prompts: list[str] = []
def fake_confirm_action(prompt: str, **_kwargs) -> bool:
confirm_prompts.append(prompt)
if prompt == "Do you want to look for old releases to remove?":
return True
if prompt == "Remove old release task-sdk/1.0.3?":
return True
if prompt == "Remove old release task-sdk/1.0.4?":
return False
raise AssertionError(f"Unexpected confirm prompt: {prompt}")
def fake_getcwd() -> str:
return "/original/dir"
def fake_path_exists(path: str) -> bool:
return path == "/svn/release/repo/task-sdk"
def fake_scandir(path=None):
if path == "/svn/release/repo/task-sdk":
return iter(task_sdk_entries)
return iter(entries)
monkeypatch.setattr(release_cmd.os, "getcwd", fake_getcwd)
monkeypatch.setattr(release_cmd.os, "chdir", lambda path: chdir_calls.append(path))
monkeypatch.setattr(release_cmd.os, "scandir", fake_scandir)
monkeypatch.setattr(release_cmd.os.path, "exists", fake_path_exists)
monkeypatch.setattr(release_cmd.os.path, "join", lambda *args: "/".join(args))
monkeypatch.setattr(release_cmd, "confirm_action", fake_confirm_action)
monkeypatch.setattr(release_cmd, "console_print", lambda msg="": console_messages.append(str(msg)))
def fake_run_command(cmd: list[str], **kwargs):
run_command_calls.append((cmd, kwargs))
monkeypatch.setattr(release_cmd, "run_command", fake_run_command)
release_cmd.remove_old_release(
version=version, task_sdk_version=task_sdk_version, svn_release_repo=svn_release_repo
)
assert chdir_calls == [svn_release_repo, "/original/dir"]
assert (
"The following old Task SDK releases should be removed: ['task-sdk/1.0.3', 'task-sdk/1.0.4']"
in console_messages
)
assert "Removing old release task-sdk/1.0.3" in console_messages
assert "Removing old release task-sdk/1.0.4" in console_messages
assert "[success]Old releases removed" in console_messages
assert run_command_calls == [
(["svn", "rm", "task-sdk/1.0.3"], {"check": True}),
(["svn", "commit", "-m", "Remove old release: task-sdk/1.0.3"], {"check": True}),
]
def test_remove_old_release_no_task_sdk_version(monkeypatch, release_cmd):
version = "3.1.7"
task_sdk_version = None
svn_release_repo = "/svn/release/repo"
entries = [
FakeDirEntry(version, is_dir=True),
FakeDirEntry("3.0.4", is_dir=True), # old release
]
chdir_calls: list[str] = []
console_messages: list[str] = []
run_command_calls: list[list[str]] = []
def fake_confirm_action(prompt: str, **_kwargs) -> bool:
if prompt == "Do you want to look for old releases to remove?":
return True
if prompt.startswith("Remove old release "):
return False
raise AssertionError(f"Unexpected confirm prompt: {prompt}")
def fake_getcwd() -> str:
return "/original/dir"
monkeypatch.setattr(release_cmd.os, "getcwd", fake_getcwd)
monkeypatch.setattr(release_cmd.os, "chdir", lambda path: chdir_calls.append(path))
monkeypatch.setattr(release_cmd.os, "scandir", lambda: iter(entries))
monkeypatch.setattr(release_cmd, "confirm_action", fake_confirm_action)
monkeypatch.setattr(release_cmd, "console_print", lambda msg="": console_messages.append(str(msg)))
monkeypatch.setattr(release_cmd, "run_command", lambda cmd, **_kwargs: run_command_calls.append(cmd))
release_cmd.remove_old_release(
version=version, task_sdk_version=task_sdk_version, svn_release_repo=svn_release_repo
)
assert "The following old Airflow releases should be removed: ['3.0.4']" in console_messages
assert "task-sdk" not in " ".join(console_messages).lower()
assert run_command_calls == []
assert chdir_calls == [svn_release_repo, "/original/dir"]
| {
"repo_id": "apache/airflow",
"file_path": "dev/breeze/tests/test_release_command.py",
"license": "Apache License 2.0",
"lines": 430,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:dev/breeze/tests/test_release_candidate_command.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import os
from unittest import mock
import pytest
def is_ci_environment() -> bool:
"""Check if running in CI environment by checking the CI environment variable."""
return os.environ.get("CI", "").lower() in ("true", "1", "yes")
class FakeDirEntry:
def __init__(self, name: str, *, is_dir: bool):
self.name = name
self._is_dir = is_dir
def is_dir(self) -> bool:
return self._is_dir
@pytest.fixture
def rc_cmd():
"""Lazy import the rc command module."""
import airflow_breeze.commands.release_candidate_command as module
return module
def test_clone_asf_repo(monkeypatch, rc_cmd):
"""Test that clone_asf_repo behaves correctly based on CI environment."""
version = "2.10.0rc3"
repo_root = "/repo/root"
chdir_calls: list[str] = []
console_messages: list[str] = []
run_command_calls: list[tuple[list[str], dict]] = []
confirm_prompts: list[str] = []
# Check if we're in CI environment
is_ci = is_ci_environment()
def fake_confirm_action(prompt: str, **kwargs):
confirm_prompts.append(prompt)
return True
def fake_chdir(path: str):
chdir_calls.append(path)
def fake_run_command(cmd: list[str], **kwargs):
run_command_calls.append((cmd, kwargs))
return None
monkeypatch.setattr(rc_cmd.os, "chdir", fake_chdir)
monkeypatch.setattr(rc_cmd, "confirm_action", fake_confirm_action)
monkeypatch.setattr(rc_cmd, "console_print", lambda msg="": console_messages.append(str(msg)))
monkeypatch.setattr(rc_cmd, "run_command", fake_run_command)
rc_cmd.clone_asf_repo(version=version, repo_root=repo_root)
assert confirm_prompts == ["Do you want to clone asf repo?"]
assert chdir_calls == [repo_root]
if is_ci:
# In CI, should simulate SVN checkout
assert "[info]Running in CI environment - simulating SVN checkout" in console_messages
assert "[success]Simulated ASF repo checkout in CI" in console_messages
# Should use mkdir -p to create directory structure
assert any(
cmd == ["mkdir", "-p", f"{repo_root}/asf-dist/dev/airflow"] and kwargs.get("check") is True
for cmd, kwargs in run_command_calls
)
# Should NOT have any svn commands
assert not any(len(cmd) >= 1 and cmd[0] == "svn" for cmd, kwargs in run_command_calls)
else:
# In normal environment, should use actual SVN commands
assert "[success]Cloned ASF repo successfully" in console_messages
assert "[info]Running in CI environment - simulating SVN checkout" not in console_messages
# Should have SVN checkout command
assert any(
cmd[0] == "svn" and cmd[1] == "checkout" and "https://dist.apache.org/repos/dist" in cmd
for cmd, kwargs in run_command_calls
)
# Should have SVN update command
assert any(cmd[0] == "svn" and cmd[1] == "update" for cmd, kwargs in run_command_calls)
def test_remove_old_releases_only_collects_rc_directories(monkeypatch, rc_cmd):
version = "2.10.0rc3"
task_sdk_version = "1.0.6rc3"
repo_root = "/repo/root"
# Arrange: entries include current RC, old RC directories, a matching "file", and non-RC directory.
entries = [
FakeDirEntry(version, is_dir=True), # current RC: should be skipped
FakeDirEntry("2.10.0rc2", is_dir=True), # old RC dir: should be included
FakeDirEntry("2.10.0rc1", is_dir=True), # old RC dir: should be included
FakeDirEntry("2.10.0rc0", is_dir=False), # matches pattern but not a directory: excluded
FakeDirEntry("not-a-rc", is_dir=True), # directory but not matching pattern: excluded
]
chdir_calls: list[str] = []
console_messages: list[str] = []
run_command_calls: list[list[str]] = []
def fake_confirm_action(prompt: str, **_kwargs) -> bool:
# First prompt decides whether we scan. We want to.
if prompt == "Do you want to look for old RCs to remove?":
return True
# For each candidate, we decline removal to avoid running svn commands.
if prompt.startswith("Remove old RC ") or prompt.startswith("Remove old Task SDK RC "):
return False
raise AssertionError(f"Unexpected confirm prompt: {prompt}")
def fake_path_exists(path: str) -> bool:
# Task SDK path doesn't exist in this test
return False
monkeypatch.setattr(rc_cmd.os, "chdir", lambda path: chdir_calls.append(path))
monkeypatch.setattr(rc_cmd.os, "scandir", lambda: iter(entries))
monkeypatch.setattr(rc_cmd.os.path, "exists", fake_path_exists)
monkeypatch.setattr(rc_cmd, "confirm_action", fake_confirm_action)
monkeypatch.setattr(rc_cmd, "console_print", lambda msg="": console_messages.append(str(msg)))
monkeypatch.setattr(rc_cmd, "run_command", lambda cmd, **_kwargs: run_command_calls.append(cmd))
# Act
rc_cmd.remove_old_releases(version=version, task_sdk_version=task_sdk_version, repo_root=repo_root)
# Assert: only directory entries matching RC_PATTERN, excluding current version, and sorted.
assert f"{repo_root}/asf-dist/dev/airflow" in chdir_calls
assert repo_root in chdir_calls
assert (
"The following old Airflow releases should be removed: ['2.10.0rc1', '2.10.0rc2']" in console_messages
)
assert run_command_calls == []
def test_remove_old_releases_returns_early_when_user_declines(monkeypatch, rc_cmd):
version = "2.10.0rc3"
task_sdk_version = "1.0.6rc3"
repo_root = "/repo/root"
confirm_prompts: list[str] = []
def fake_confirm_action(prompt: str, **_kwargs) -> bool:
confirm_prompts.append(prompt)
return False
def should_not_be_called(*_args, **_kwargs):
raise AssertionError("This should not have been called when user declines the initial prompt.")
monkeypatch.setattr(rc_cmd, "confirm_action", fake_confirm_action)
monkeypatch.setattr(rc_cmd.os, "chdir", should_not_be_called)
monkeypatch.setattr(rc_cmd.os, "scandir", should_not_be_called)
monkeypatch.setattr(rc_cmd, "console_print", should_not_be_called)
monkeypatch.setattr(rc_cmd, "run_command", should_not_be_called)
rc_cmd.remove_old_releases(version=version, task_sdk_version=task_sdk_version, repo_root=repo_root)
assert confirm_prompts == ["Do you want to look for old RCs to remove?"]
@mock.patch.dict(os.environ, {"CI": "true"})
def test_remove_old_releases_removes_confirmed_old_releases(monkeypatch, rc_cmd):
"""Test that remove_old_releases works correctly based on CI environment."""
version = "3.1.5rc3"
task_sdk_version = "1.0.6rc3"
repo_root = "/repo/root"
# Check if we're in CI environment
is_ci = is_ci_environment()
# Unsorted on purpose to verify sorting before prompting/removing.
entries = [
FakeDirEntry("3.1.5rc2", is_dir=True),
FakeDirEntry(version, is_dir=True),
FakeDirEntry("3.1.0rc1", is_dir=True),
]
chdir_calls: list[str] = []
console_messages: list[str] = []
run_command_calls: list[tuple[list[str], dict]] = []
confirm_prompts: list[str] = []
def fake_confirm_action(prompt: str, **_kwargs) -> bool:
confirm_prompts.append(prompt)
if prompt == "Do you want to look for old RCs to remove?":
return True
if prompt == "Remove old RC 3.1.0rc1?":
return True
if prompt == "Remove old RC 3.1.5rc2?":
return False
raise AssertionError(f"Unexpected confirm prompt: {prompt}")
def fake_path_exists(path: str) -> bool:
# Task SDK path doesn't exist in this test
return False
monkeypatch.setattr(rc_cmd.os, "chdir", lambda path: chdir_calls.append(path))
monkeypatch.setattr(rc_cmd.os, "scandir", lambda: iter(entries))
monkeypatch.setattr(rc_cmd.os.path, "exists", fake_path_exists)
monkeypatch.setattr(rc_cmd, "confirm_action", fake_confirm_action)
monkeypatch.setattr(rc_cmd, "console_print", lambda msg="": console_messages.append(str(msg)))
def fake_run_command(cmd: list[str], **kwargs):
run_command_calls.append((cmd, kwargs))
monkeypatch.setattr(rc_cmd, "run_command", fake_run_command)
rc_cmd.remove_old_releases(version=version, task_sdk_version=task_sdk_version, repo_root=repo_root)
assert chdir_calls == [f"{repo_root}/asf-dist/dev/airflow", repo_root]
assert confirm_prompts == [
"Do you want to look for old RCs to remove?",
"Remove old RC 3.1.0rc1?",
"Remove old RC 3.1.5rc2?",
]
assert (
"The following old Airflow releases should be removed: ['3.1.0rc1', '3.1.5rc2']" in console_messages
)
assert "Removing old Airflow release 3.1.0rc1" in console_messages
assert "Removing old Airflow release 3.1.5rc2" in console_messages
assert "[success]Old releases removed" in console_messages
if is_ci:
# In CI, should simulate SVN commands
assert "[success]Old releases removed" in console_messages
assert (
"The following old Airflow releases should be removed: ['3.1.0rc1', '3.1.5rc2']"
in console_messages
)
assert "Removing old Airflow release 3.1.0rc1" in console_messages
assert "Removing old Airflow release 3.1.5rc2" in console_messages
# Should NOT have any actual svn commands (only rc1 was confirmed)
assert run_command_calls == [
(
[
"svn",
"rm",
"3.1.0rc1",
],
{
"check": True,
},
),
(
[
"svn",
"commit",
"-m",
"Remove old release: 3.1.0rc1",
],
{
"check": True,
},
),
]
else:
# Only rc1 was confirmed, so we should run rm+commit for rc1 only.
assert run_command_calls == [
(["svn", "rm", "3.1.0rc1"], {"check": True}),
(["svn", "commit", "-m", "Remove old release: 3.1.0rc1"], {"check": True}),
]
def test_remove_old_releases_removes_task_sdk_releases(monkeypatch, rc_cmd):
version = "3.1.5rc3"
task_sdk_version = "1.0.6rc3"
repo_root = "/repo/root"
# Airflow entries
airflow_entries = [
FakeDirEntry(version, is_dir=True),
FakeDirEntry("3.1.5rc2", is_dir=True),
]
# Task SDK entries
task_sdk_entries = [
FakeDirEntry(task_sdk_version, is_dir=True), # current RC: should be skipped
FakeDirEntry("1.0.6rc2", is_dir=True), # old RC dir: should be included
FakeDirEntry("1.0.6rc1", is_dir=True), # old RC dir: should be included
]
chdir_calls: list[str] = []
console_messages: list[str] = []
run_command_calls: list[tuple[list[str], dict]] = []
confirm_prompts: list[str] = []
scandir_call_count = 0
def fake_confirm_action(prompt: str, **_kwargs) -> bool:
confirm_prompts.append(prompt)
if prompt == "Do you want to look for old RCs to remove?":
return True
# Decline all removals to avoid running svn commands
if prompt.startswith("Remove old RC ") or prompt.startswith("Remove old Task SDK RC "):
return False
raise AssertionError(f"Unexpected confirm prompt: {prompt}")
def fake_path_exists(path: str) -> bool:
# Task SDK path exists in this test
return path == f"{repo_root}/asf-dist/dev/airflow/task-sdk"
def fake_scandir():
nonlocal scandir_call_count
scandir_call_count += 1
# First call is for Airflow, second is for Task SDK
if scandir_call_count == 1:
return iter(airflow_entries)
if scandir_call_count == 2:
return iter(task_sdk_entries)
raise AssertionError("Unexpected scandir call")
monkeypatch.setattr(rc_cmd.os, "chdir", lambda path: chdir_calls.append(path))
monkeypatch.setattr(rc_cmd.os, "scandir", fake_scandir)
monkeypatch.setattr(rc_cmd.os.path, "exists", fake_path_exists)
monkeypatch.setattr(rc_cmd, "confirm_action", fake_confirm_action)
monkeypatch.setattr(rc_cmd, "console_print", lambda msg="": console_messages.append(str(msg)))
monkeypatch.setattr(rc_cmd, "run_command", lambda cmd, **_kwargs: run_command_calls.append((cmd, {})))
rc_cmd.remove_old_releases(version=version, task_sdk_version=task_sdk_version, repo_root=repo_root)
assert f"{repo_root}/asf-dist/dev/airflow" in chdir_calls
assert f"{repo_root}/asf-dist/dev/airflow/task-sdk" in chdir_calls
assert repo_root in chdir_calls
assert "The following old Airflow releases should be removed: ['3.1.5rc2']" in console_messages
assert (
"The following old Task SDK releases should be removed: ['1.0.6rc1', '1.0.6rc2']" in console_messages
)
assert "[success]Old releases removed" in console_messages
# No removals were confirmed, so no svn commands should be run
assert run_command_calls == []
@mock.patch.dict(os.environ, {"CI": "true"})
def test_remove_old_releases_removes_both_airflow_and_task_sdk_releases(monkeypatch, rc_cmd):
"""Test that remove_old_releases works correctly based on CI environment."""
version = "3.1.5rc3"
task_sdk_version = "1.0.6rc3"
repo_root = "/repo/root"
# Check if we're in CI environment
is_ci = is_ci_environment()
# Airflow entries
airflow_entries = [
FakeDirEntry(version, is_dir=True),
FakeDirEntry("3.1.5rc2", is_dir=True),
]
# Task SDK entries
task_sdk_entries = [
FakeDirEntry(task_sdk_version, is_dir=True),
FakeDirEntry("1.0.6rc2", is_dir=True),
FakeDirEntry("1.0.6rc1", is_dir=True),
]
chdir_calls: list[str] = []
console_messages: list[str] = []
run_command_calls: list[tuple[list[str], dict]] = []
confirm_prompts: list[str] = []
scandir_call_count = 0
def fake_confirm_action(prompt: str, **_kwargs) -> bool:
confirm_prompts.append(prompt)
if prompt == "Do you want to look for old RCs to remove?":
return True
# Confirm removal of one Airflow and one Task SDK release
if prompt == "Remove old RC 3.1.5rc2?":
return True
if prompt == "Remove old Task SDK RC 1.0.6rc1?":
return True
# Decline others
if prompt.startswith("Remove old RC ") or prompt.startswith("Remove old Task SDK RC "):
return False
raise AssertionError(f"Unexpected confirm prompt: {prompt}")
def fake_path_exists(path: str) -> bool:
return path == f"{repo_root}/asf-dist/dev/airflow/task-sdk"
def fake_scandir():
nonlocal scandir_call_count
scandir_call_count += 1
if scandir_call_count == 1:
return iter(airflow_entries)
if scandir_call_count == 2:
return iter(task_sdk_entries)
raise AssertionError("Unexpected scandir call")
monkeypatch.setattr(rc_cmd.os, "chdir", lambda path: chdir_calls.append(path))
monkeypatch.setattr(rc_cmd.os, "scandir", fake_scandir)
monkeypatch.setattr(rc_cmd.os.path, "exists", fake_path_exists)
monkeypatch.setattr(rc_cmd, "confirm_action", fake_confirm_action)
monkeypatch.setattr(rc_cmd, "console_print", lambda msg="": console_messages.append(str(msg)))
def fake_run_command(cmd: list[str], **kwargs):
run_command_calls.append((cmd, kwargs))
monkeypatch.setattr(rc_cmd, "run_command", fake_run_command)
rc_cmd.remove_old_releases(version=version, task_sdk_version=task_sdk_version, repo_root=repo_root)
assert chdir_calls == [
f"{repo_root}/asf-dist/dev/airflow",
f"{repo_root}/asf-dist/dev/airflow/task-sdk",
repo_root,
]
assert "The following old Airflow releases should be removed: ['3.1.5rc2']" in console_messages
assert (
"The following old Task SDK releases should be removed: ['1.0.6rc1', '1.0.6rc2']" in console_messages
)
assert "Removing old Airflow release 3.1.5rc2" in console_messages
assert "Removing old Task SDK release 1.0.6rc1" in console_messages
assert "Removing old Task SDK release 1.0.6rc2" in console_messages
assert "[success]Old releases removed" in console_messages
if is_ci:
# In CI, should simulate SVN commands
assert "The following old Airflow releases should be removed: ['3.1.5rc2']" in console_messages
assert "Removing old Airflow release 3.1.5rc2" in console_messages
assert (
"The following old Task SDK releases should be removed: ['1.0.6rc1', '1.0.6rc2']"
in console_messages
)
assert "Removing old Task SDK release 1.0.6rc1" in console_messages
# Should NOT have any actual svn commands
assert run_command_calls == [
(
[
"svn",
"rm",
"3.1.5rc2",
],
{
"check": True,
},
),
(
[
"svn",
"commit",
"-m",
"Remove old release: 3.1.5rc2",
],
{
"check": True,
},
),
(
[
"svn",
"rm",
"1.0.6rc1",
],
{
"check": True,
},
),
(
[
"svn",
"commit",
"-m",
"Remove old Task SDK release: 1.0.6rc1",
],
{
"check": True,
},
),
]
else:
# Both Airflow and Task SDK removals were confirmed
assert run_command_calls == [
(["svn", "rm", "3.1.5rc2"], {"check": True}),
(["svn", "commit", "-m", "Remove old release: 3.1.5rc2"], {"check": True}),
(["svn", "rm", "1.0.6rc1"], {"check": True}),
(["svn", "commit", "-m", "Remove old Task SDK release: 1.0.6rc1"], {"check": True}),
]
def test_move_artifacts_to_svn_returns_early_when_user_declines(monkeypatch, rc_cmd):
"""Test that function returns early when user declines initial prompt."""
version = "2.10.0rc3"
version_without_rc = "2.10.0"
task_sdk_version = "1.0.6rc3"
task_sdk_version_without_rc = "1.0.6"
repo_root = "/repo/root"
confirm_prompts: list[str] = []
def fake_confirm_action(prompt: str, **kwargs):
confirm_prompts.append(prompt)
return False
def should_not_be_called(*_args, **_kwargs):
raise AssertionError("This should not have been called when user declines the initial prompt.")
monkeypatch.setattr(rc_cmd, "confirm_action", fake_confirm_action)
monkeypatch.setattr(rc_cmd.os, "chdir", should_not_be_called)
monkeypatch.setattr(rc_cmd, "console_print", should_not_be_called)
monkeypatch.setattr(rc_cmd, "run_command", should_not_be_called)
rc_cmd.move_artifacts_to_svn(
version=version,
version_without_rc=version_without_rc,
task_sdk_version=task_sdk_version,
task_sdk_version_without_rc=task_sdk_version_without_rc,
repo_root=repo_root,
)
assert confirm_prompts == ["Do you want to move artifacts to SVN?"]
@mock.patch.dict(os.environ, {"CI": "true"})
def test_move_artifacts_to_svn_completes_successfully(monkeypatch, rc_cmd):
"""Test that function completes successfully when user confirms based on CI environment."""
version = "2.10.0rc3"
version_without_rc = "2.10.0"
task_sdk_version = "1.0.6rc3"
task_sdk_version_without_rc = "1.0.6"
repo_root = "/repo/root"
# Check if we're in CI environment
is_ci = is_ci_environment()
chdir_calls: list[str] = []
console_messages: list[str] = []
run_command_calls: list[tuple[list[str] | str, dict]] = []
confirm_prompts: list[str] = []
def fake_confirm_action(prompt: str, **kwargs):
confirm_prompts.append(prompt)
return True
def fake_chdir(path: str):
chdir_calls.append(path)
def fake_run_command(cmd: list[str] | str, **kwargs):
run_command_calls.append((cmd, kwargs))
monkeypatch.setattr(rc_cmd, "confirm_action", fake_confirm_action)
monkeypatch.setattr(rc_cmd.os, "chdir", fake_chdir)
monkeypatch.setattr(rc_cmd, "console_print", lambda msg="": console_messages.append(str(msg)))
monkeypatch.setattr(rc_cmd, "run_command", fake_run_command)
rc_cmd.move_artifacts_to_svn(
version=version,
version_without_rc=version_without_rc,
task_sdk_version=task_sdk_version,
task_sdk_version_without_rc=task_sdk_version_without_rc,
repo_root=repo_root,
)
assert confirm_prompts == ["Do you want to move artifacts to SVN?"]
assert chdir_calls == [f"{repo_root}/asf-dist/dev/airflow"]
if is_ci:
# In CI, should use mkdir -p instead of svn mkdir
assert (
"[info]Running in CI environment - executing mkdir (override dry-run mode if specified)"
in console_messages
)
assert any(
cmd == ["mkdir", "-p", version] and kwargs.get("check") is True
for cmd, kwargs in run_command_calls
)
assert any(
cmd == ["mkdir", "-p", f"task-sdk/{task_sdk_version}"] and kwargs.get("check") is True
for cmd, kwargs in run_command_calls
)
# Should NOT have any svn mkdir commands
assert not any(
isinstance(cmd, list) and len(cmd) >= 2 and cmd[0] == "svn" and cmd[1] == "mkdir"
for cmd, kwargs in run_command_calls
)
else:
# Verify svn mkdir for airflow version
assert any(
cmd == ["svn", "mkdir", version] and kwargs.get("check") is True
for cmd, kwargs in run_command_calls
)
# Verify svn mkdir for task-sdk version
assert any(
cmd == ["svn", "mkdir", f"task-sdk/{task_sdk_version}"] for cmd, kwargs in run_command_calls
)
# Verify mv commands run normally in both cases
assert any(
cmd == f"mv {repo_root}/dist/*{version_without_rc}* {version}/"
and kwargs.get("check") is True
and kwargs.get("shell") is True
for cmd, kwargs in run_command_calls
)
assert any(
cmd == f"mv {repo_root}/dist/*{task_sdk_version_without_rc}* task-sdk/{task_sdk_version}/"
and kwargs.get("check") is True
and kwargs.get("shell") is True
for cmd, kwargs in run_command_calls
)
assert "[success]Moved artifacts to SVN" in console_messages
def test_push_artifacts_to_asf_repo_returns_early_when_user_declines(monkeypatch, rc_cmd):
"""Test that function returns early when user declines initial prompt."""
version = "2.10.0rc3"
task_sdk_version = "1.0.6rc3"
repo_root = "/repo/root"
confirm_prompts: list[str] = []
def fake_confirm_action(prompt: str, **kwargs):
confirm_prompts.append(prompt)
if kwargs.get("abort") and not prompt.startswith("Do you want to push"):
# Simulate abort behavior
import sys
sys.exit(1)
return False
def should_not_be_called(*_args, **_kwargs):
raise AssertionError("This should not have been called when user declines the initial prompt.")
monkeypatch.setattr(rc_cmd, "confirm_action", fake_confirm_action)
monkeypatch.setattr(rc_cmd, "get_dry_run", lambda: False)
monkeypatch.setattr(rc_cmd.os, "chdir", should_not_be_called)
monkeypatch.setattr(rc_cmd, "console_print", should_not_be_called)
monkeypatch.setattr(rc_cmd, "run_command", should_not_be_called)
rc_cmd.push_artifacts_to_asf_repo(version=version, task_sdk_version=task_sdk_version, repo_root=repo_root)
assert confirm_prompts == ["Do you want to push artifacts to ASF repo?"]
@mock.patch.dict(os.environ, {"CI": "true"})
def test_push_artifacts_to_asf_repo_completes_successfully(monkeypatch, rc_cmd):
"""Test that function completes successfully when user confirms all prompts based on CI environment."""
version = "2.10.0rc3"
task_sdk_version = "1.0.6rc3"
repo_root = "/repo/root"
# Check if we're in CI environment
is_ci = is_ci_environment()
chdir_calls: list[str] = []
console_messages: list[str] = []
run_command_calls: list[tuple[list[str] | str, dict]] = []
confirm_prompts: list[str] = []
def fake_confirm_action(prompt: str, **kwargs):
confirm_prompts.append(prompt)
return True
def fake_chdir(path: str):
chdir_calls.append(path)
def fake_run_command(cmd: list[str] | str, **kwargs):
run_command_calls.append((cmd, kwargs))
monkeypatch.setattr(rc_cmd, "confirm_action", fake_confirm_action)
monkeypatch.setattr(rc_cmd, "get_dry_run", lambda: False)
monkeypatch.setattr(rc_cmd.os, "chdir", fake_chdir)
monkeypatch.setattr(rc_cmd, "console_print", lambda msg="": console_messages.append(str(msg)))
monkeypatch.setattr(rc_cmd, "run_command", fake_run_command)
rc_cmd.push_artifacts_to_asf_repo(version=version, task_sdk_version=task_sdk_version, repo_root=repo_root)
assert confirm_prompts == [
"Do you want to push artifacts to ASF repo?",
"Do you want to continue?",
"Do you want to continue?",
]
assert chdir_calls == [f"{repo_root}/asf-dist/dev/airflow"]
ls_calls = [(cmd, kwargs) for cmd, kwargs in run_command_calls if cmd == ["ls"]]
assert len(ls_calls) == 2 # Two ls calls
assert any(kwargs.get("cwd") == f"{repo_root}/asf-dist/dev/airflow/{version}" for cmd, kwargs in ls_calls)
assert any(
kwargs.get("cwd") == f"{repo_root}/asf-dist/dev/airflow/task-sdk/{task_sdk_version}"
for cmd, kwargs in ls_calls
)
if is_ci:
# In CI, should simulate SVN commands
assert "Airflow Version Files to push to svn:" in console_messages
assert "Task SDK Version Files to push to svn:" in console_messages
assert "[success]Files pushed to svn" in console_messages
assert (
"Verify that the files are available here: https://dist.apache.org/repos/dist/dev/airflow/"
in console_messages
)
# There are some SVN traces in the code but still they are just printing and not actually running any SVN commands in CI
else:
# In normal environment, should execute SVN commands
assert any(
cmd == f"svn add {version}/* task-sdk/{task_sdk_version}/*" for cmd, kwargs in run_command_calls
)
assert any(
cmd
== ["svn", "commit", "-m", f"Add artifacts for Airflow {version} and Task SDK {task_sdk_version}"]
for cmd, kwargs in run_command_calls
)
assert "[success]Files pushed to svn" in console_messages
assert (
"Verify that the files are available here: https://dist.apache.org/repos/dist/dev/airflow/"
in console_messages
)
| {
"repo_id": "apache/airflow",
"file_path": "dev/breeze/tests/test_release_candidate_command.py",
"license": "Apache License 2.0",
"lines": 604,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-core/tests/unit/api_fastapi/common/test_types.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import pytest
from pydantic import ValidationError
from airflow.api_fastapi.common.types import OklchColor
class TestOklchColor:
@pytest.mark.parametrize(
("input_str", "expected"),
[
("oklch(0.637 0.237 25.331)", (0.637, 0.237, 25.331, "oklch(0.637 0.237 25.331)")),
("oklch(1 0.230 25.331)", (1.0, 0.23, 25.331, "oklch(1.0 0.23 25.331)")),
],
)
def test_valid_oklch(self, input_str, expected):
color = OklchColor.model_validate(input_str)
assert color.lightness == pytest.approx(expected[0])
assert color.chroma == pytest.approx(expected[1])
assert color.hue == pytest.approx(expected[2])
assert color.model_dump() == expected[3]
@pytest.mark.parametrize(
("input_str", "error_message"),
[
("oklch(-0.1 0.15 240)", "Invalid lightness: -0.1 Must be between 0 and 1"),
("oklch(1.5 0.15 240)", "Invalid lightness: 1.5 Must be between 0 and 1"),
("oklch(0.5 -0.1 240)", "Invalid chroma: -0.1 Must be between 0 and 0.5"),
("oklch(0.5 0.6 240)", "Invalid chroma: 0.6 Must be between 0 and 0.5"),
("oklch(0.5 0.15 -10)", "Invalid hue: -10.0 Must be between 0 and 360"),
("oklch(0.5 0.15 400)", "Invalid hue: 400.0 Must be between 0 and 360"),
("rgb(255, 0, 0)", "Invalid OKLCH format: rgb(255, 0, 0) Expected format oklch(l c h)"),
("oklch(0.5 0.15)", "Invalid OKLCH format: oklch(0.5 0.15) Expected format oklch(l c h)"),
(
"oklch(0.5 0.15 240 0.5)",
"Invalid OKLCH format: oklch(0.5 0.15 240 0.5) Expected format oklch(l c h)",
),
(
"oklch(abc 0.15 240)",
"Invalid OKLCH format: oklch(abc 0.15 240) Expected format oklch(l c h)",
),
(
"oklch(10 0. 240)",
"Invalid OKLCH format: oklch(10 0. 240) Expected format oklch(l c h)",
),
(
"oklch(10 3 .240)",
"Invalid OKLCH format: oklch(10 3 .240) Expected format oklch(l c h)",
),
(
"oklch(. 3 240)",
"Invalid OKLCH format: oklch(. 3 240) Expected format oklch(l c h)",
),
],
)
def test_invalid_oklch(self, input_str, error_message):
with pytest.raises(ValidationError) as exc_info:
OklchColor.model_validate(input_str)
assert error_message in str(exc_info.value)
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/tests/unit/api_fastapi/common/test_types.py",
"license": "Apache License 2.0",
"lines": 71,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:dev/breeze/src/airflow_breeze/utils/docker_compose_utils.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Utilities for managing Airflow docker-compose environments in tests."""
from __future__ import annotations
import os
import sys
import tempfile
import time
import urllib.error
import urllib.request
from collections.abc import Callable
from pathlib import Path
from shutil import copyfile
import yaml
from cryptography.fernet import Fernet
from airflow_breeze.utils.console import get_console
from airflow_breeze.utils.run_utils import run_command
def setup_airflow_docker_compose_environment(
docker_compose_source: Path,
tmp_dir: Path | None = None,
env_vars: dict[str, str] | None = None,
docker_compose_modifications: Callable[[dict, Path], dict] | None = None,
) -> tuple[Path, Path]:
"""Set up a temporary directory with docker-compose files for Airflow."""
if tmp_dir is None:
tmp_dir = Path(tempfile.mkdtemp(prefix="airflow-docker-compose-"))
docker_compose_path = tmp_dir / "docker-compose.yaml"
copyfile(docker_compose_source, docker_compose_path)
for subdir in ("dags", "logs", "plugins", "config"):
(tmp_dir / subdir).mkdir(exist_ok=True)
env_vars = env_vars or {}
if "FERNET_KEY" not in env_vars:
env_vars["FERNET_KEY"] = Fernet.generate_key().decode()
if "AIRFLOW_UID" not in env_vars:
env_vars["AIRFLOW_UID"] = str(os.getuid())
dot_env_file = tmp_dir / ".env"
env_content = "\n".join([f"{key}={value}" for key, value in env_vars.items()])
dot_env_file.write_text(env_content + "\n")
if docker_compose_modifications:
with open(docker_compose_path) as f:
compose_config = yaml.safe_load(f)
compose_config = docker_compose_modifications(compose_config, tmp_dir)
with open(docker_compose_path, "w") as f:
yaml.dump(compose_config, f, default_flow_style=False)
return tmp_dir, dot_env_file
def start_docker_compose_and_wait_for_health(
tmp_dir: Path,
airflow_base_url: str = "http://localhost:8080",
max_wait: int = 180,
check_interval: int = 5,
) -> int:
"""Start docker-compose and wait for Airflow to be healthy."""
health_check_url = f"{airflow_base_url}/api/v2/monitor/health"
get_console().print("[info]Starting Airflow services with docker-compose...[/]")
compose_up_result = run_command(
["docker", "compose", "up", "-d"], cwd=tmp_dir, check=False, verbose_override=True
)
if compose_up_result.returncode != 0:
get_console().print("[error]Failed to start docker-compose[/]")
return compose_up_result.returncode
get_console().print(f"[info]Waiting for Airflow at {health_check_url}...[/]")
elapsed = 0
while elapsed < max_wait:
try:
response = urllib.request.urlopen(health_check_url, timeout=5)
if response.status == 200:
get_console().print("[success]Airflow is ready![/]")
return 0
except (urllib.error.URLError, urllib.error.HTTPError, Exception):
time.sleep(check_interval)
elapsed += check_interval
if elapsed % 15 == 0:
get_console().print(f"[info]Still waiting... ({elapsed}s/{max_wait}s)[/]")
get_console().print(f"[error]Airflow did not become ready within {max_wait} seconds[/]")
get_console().print("[info]Docker compose logs:[/]")
run_command(["docker", "compose", "logs"], cwd=tmp_dir, check=False)
return 1
def stop_docker_compose(tmp_dir: Path, remove_volumes: bool = True) -> None:
"""Stop and cleanup docker-compose services."""
get_console().print("[info]Stopping docker-compose services...[/]")
cmd = ["docker", "compose", "down"]
if remove_volumes:
cmd.append("-v")
run_command(cmd, cwd=tmp_dir, check=False)
get_console().print("[success]Docker-compose cleaned up.[/]")
def ensure_image_exists_and_build_if_needed(image_name: str, python: str) -> None:
inspect_result = run_command(
["docker", "inspect", image_name], check=False, capture_output=True, text=True
)
if inspect_result.returncode != 0:
get_console().print(f"[info]Image {image_name} not found locally[/]")
if "no such object" in inspect_result.stderr.lower():
# Check if it looks like a Docker Hub image (apache/airflow:*)
if image_name.startswith("apache/airflow:"):
get_console().print(f"[info]Pulling image from Docker Hub: {image_name}[/]")
pull_result = run_command(["docker", "pull", image_name], check=False)
if pull_result.returncode == 0:
get_console().print(f"[success]Successfully pulled {image_name}[/]")
return
get_console().print(f"[warning]Failed to pull {image_name}, will try to build[/]")
get_console().print(f"[info]Building image with: breeze prod-image build --python {python}[/]")
build_result = run_command(["breeze", "prod-image", "build", "--python", python], check=False)
if build_result.returncode != 0:
get_console().print("[error]Failed to build image[/]")
sys.exit(1)
get_console().print(f"[info]Tagging the built image as {image_name}[/]")
list_images_result = run_command(
[
"docker",
"images",
"--format",
"{{.Repository}}:{{.Tag}}",
"--filter",
"reference=*/airflow:latest",
],
check=False,
capture_output=True,
text=True,
)
if list_images_result.returncode == 0 and list_images_result.stdout.strip():
built_image = list_images_result.stdout.strip().split("\n")[0]
get_console().print(f"[info]Found built image: {built_image}[/]")
tag_result = run_command(["docker", "tag", built_image, image_name], check=False)
if tag_result.returncode != 0:
get_console().print(f"[error]Failed to tag image {built_image} as {image_name}[/]")
sys.exit(1)
get_console().print(f"[success]Successfully tagged {built_image} as {image_name}[/]")
else:
get_console().print("[warning]Could not find built image to tag. Docker compose may fail.[/]")
else:
get_console().print(f"[error]Failed to inspect image {image_name}[/]")
sys.exit(1)
| {
"repo_id": "apache/airflow",
"file_path": "dev/breeze/src/airflow_breeze/utils/docker_compose_utils.py",
"license": "Apache License 2.0",
"lines": 146,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:shared/module_loading/tests/module_loading/test_module_loading.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import functools
import pytest
from airflow_shared.module_loading import import_string, is_valid_dotpath, qualname
def _import_string():
pass
def _sample_function():
pass
class TestModuleImport:
def test_import_string(self):
cls = import_string("module_loading.test_module_loading._import_string")
assert cls == _import_string
# Test exceptions raised
with pytest.raises(ImportError):
import_string("no_dots_in_path")
msg = 'Module "module_loading.test_module_loading" does not define a "nonexistent" attribute'
with pytest.raises(ImportError, match=msg):
import_string("module_loading.test_module_loading.nonexistent")
class TestModuleLoading:
@pytest.mark.parametrize(
("path", "expected"),
[
pytest.param("valid_path", True, id="module_no_dots"),
pytest.param("valid.dot.path", True, id="standard_dotpath"),
pytest.param("package.sub_package.module", True, id="dotpath_with_underscores"),
pytest.param("MyPackage.MyClass", True, id="mixed_case_path"),
pytest.param("invalid..path", False, id="consecutive_dots_fails"),
pytest.param(".invalid.path", False, id="leading_dot_fails"),
pytest.param("invalid.path.", False, id="trailing_dot_fails"),
pytest.param("1invalid.path", False, id="leading_number_fails"),
pytest.param(42, False, id="not_a_string"),
],
)
def test_is_valid_dotpath(self, path, expected):
assert is_valid_dotpath(path) == expected
class TestQualname:
def test_qualname_default_includes_module(self):
"""Test that qualname() by default includes the module path."""
result = qualname(_sample_function)
assert result == "module_loading.test_module_loading._sample_function"
def test_qualname_exclude_module_simple_function(self):
"""Test that exclude_module=True returns only the function name."""
result = qualname(_sample_function, exclude_module=True)
assert result == "_sample_function"
def test_qualname_exclude_module_nested_function(self):
"""Test that exclude_module=True works with nested functions."""
def outer():
def inner():
pass
return inner
inner_func = outer()
result = qualname(inner_func, exclude_module=True)
assert (
result
== "TestQualname.test_qualname_exclude_module_nested_function.<locals>.outer.<locals>.inner"
)
def test_qualname_exclude_module_functools_partial(self):
"""Test that exclude_module=True handles functools.partial correctly."""
def base_func(x, y):
pass
partial_func = functools.partial(base_func, x=1)
result = qualname(partial_func, exclude_module=True)
assert result == "TestQualname.test_qualname_exclude_module_functools_partial.<locals>.base_func"
def test_qualname_exclude_module_class(self):
"""Test that exclude_module=True works with classes."""
class MyClass:
pass
result = qualname(MyClass, exclude_module=True)
assert result == "TestQualname.test_qualname_exclude_module_class.<locals>.MyClass"
def test_qualname_exclude_module_instance(self):
"""Test that exclude_module=True works with class instances."""
class MyClass:
pass
instance = MyClass()
result = qualname(instance, exclude_module=True)
assert result == "TestQualname.test_qualname_exclude_module_instance.<locals>.MyClass"
def test_qualname_use_qualname_still_includes_module(self):
"""Test that use_qualname=True still includes module prefix."""
result = qualname(_sample_function, use_qualname=True)
assert result == "module_loading.test_module_loading._sample_function"
| {
"repo_id": "apache/airflow",
"file_path": "shared/module_loading/tests/module_loading/test_module_loading.py",
"license": "Apache License 2.0",
"lines": 97,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-core/src/airflow/api_fastapi/execution_api/versions/v2025_12_08.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from cadwyn import VersionChange, endpoint
class MovePreviousRunEndpoint(VersionChange):
"""Add new previous-run endpoint and migrate old endpoint."""
description = __doc__
instructions_to_migrate_to_previous_version = (
endpoint("/dag-runs/previous", ["GET"]).didnt_exist,
endpoint("/dag-runs/{dag_id}/previous", ["GET"]).existed,
)
class AddDagRunDetailEndpoint(VersionChange):
"""Add dag run detail endpoint."""
description = __doc__
instructions_to_migrate_to_previous_version = (
endpoint("/dag-runs/{dag_id}/{run_id}", ["GET"]).didnt_exist,
)
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/api_fastapi/execution_api/versions/v2025_12_08.py",
"license": "Apache License 2.0",
"lines": 31,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:airflow-core/tests/unit/api_fastapi/execution_api/versions/v2025_11_07/test_dag_runs.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import pytest
from airflow._shared.timezones import timezone
from airflow.utils.state import DagRunState
pytestmark = pytest.mark.db_test
@pytest.fixture
def ver_client(client):
client.headers["Airflow-API-Version"] = "2025-11-07"
return client
def test_get_previous_dag_run_redirect(ver_client, session, dag_maker):
with dag_maker(dag_id="test_dag_id", session=session, serialized=True):
pass
dag_maker.create_dagrun(
state=DagRunState.SUCCESS,
logical_date=timezone.datetime(2025, 1, 1),
run_id="run1",
)
dag_maker.create_dagrun(
state=DagRunState.FAILED,
logical_date=timezone.datetime(2025, 1, 5),
run_id="run2",
)
dag_maker.create_dagrun(
state=DagRunState.SUCCESS,
logical_date=timezone.datetime(2025, 1, 10),
run_id="run3",
)
session.commit()
response = ver_client.get(
"/execution/dag-runs/test_dag_id/previous",
params={"logical_date": timezone.datetime(2025, 1, 10).isoformat()},
)
assert response.status_code == 200
result = response.json()
assert result["dag_id"] == "test_dag_id"
assert result["run_id"] == "run2" # Most recent before 2025-01-10
assert result["state"] == "failed"
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/tests/unit/api_fastapi/execution_api/versions/v2025_11_07/test_dag_runs.py",
"license": "Apache License 2.0",
"lines": 53,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:providers/keycloak/src/airflow/providers/keycloak/auth_manager/cli/utils.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import functools
from collections.abc import Callable
from typing import Any
def dry_run_message_wrap(func: Callable) -> Callable:
"""Wrap CLI commands to display dry-run messages."""
@functools.wraps(func)
def wrapper(*args, **kwargs):
# detect args object (first positional or keyword)
if args:
arg_obj = args[0]
else:
arg_obj = kwargs.get("args")
dry_run = getattr(arg_obj, "dry_run", False)
if dry_run:
print(
"Performing dry run. "
"It will check the connection to Keycloak but won't create any resources.\n"
)
result = func(*args, **kwargs)
if dry_run:
print("Dry run completed.")
return result
return wrapper
def dry_run_preview(preview_func: Callable[..., None]) -> Callable:
"""
Handle dry-run preview logic for create functions.
When dry_run=True, executes the preview function and returns early.
Otherwise, proceeds with normal execution without passing dry_run
to the decorated function.
:param preview_func: Function to call for previewing what would be created.
Should accept the same arguments as the decorated function.
"""
def decorator(func: Callable) -> Callable:
@functools.wraps(func)
def wrapper(*args, **kwargs) -> Any:
# Extract dry_run from kwargs (default to False if not provided)
dry_run = kwargs.pop("_dry_run", False)
if dry_run:
# Pass args and remaining kwargs to preview function
preview_func(*args, **kwargs)
return None
# Pass args and remaining kwargs to actual function
return func(*args, **kwargs)
return wrapper
return decorator
| {
"repo_id": "apache/airflow",
"file_path": "providers/keycloak/src/airflow/providers/keycloak/auth_manager/cli/utils.py",
"license": "Apache License 2.0",
"lines": 62,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:providers/keycloak/tests/unit/keycloak/auth_manager/cli/test_utils.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from unittest.mock import MagicMock
from airflow.providers.keycloak.auth_manager.cli.utils import dry_run_message_wrap, dry_run_preview
class TestDryRunMessageWrap:
def test_prints_messages_when_dry_run_true(self, capsys):
@dry_run_message_wrap
def test_func(args):
return "executed"
args = MagicMock()
args.dry_run = True
result = test_func(args)
captured = capsys.readouterr()
assert "Performing dry run" in captured.out
assert "Dry run completed" in captured.out
assert result == "executed"
class TestDryRunPreview:
def test_calls_preview_when_dry_run_true(self):
preview_called = []
def preview_func(*args, **kwargs):
preview_called.append(True)
@dry_run_preview(preview_func)
def actual_func(*args, **kwargs):
return "actual"
result = actual_func(_dry_run=True)
assert result is None
assert len(preview_called) == 1
def test_calls_actual_when_dry_run_false(self):
@dry_run_preview(lambda *a, **k: None)
def actual_func(*args, **kwargs):
return "actual"
result = actual_func(_dry_run=False)
assert result == "actual"
| {
"repo_id": "apache/airflow",
"file_path": "providers/keycloak/tests/unit/keycloak/auth_manager/cli/test_utils.py",
"license": "Apache License 2.0",
"lines": 48,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:helm-tests/tests/helm_tests/apiserver/test_hpa_apiserver.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import jmespath
import pytest
from chart_utils.helm_template_generator import render_chart
class TestAPIServerHPA:
"""Tests HPA."""
def test_hpa_disabled_by_default(self):
"""Disabled by default."""
docs = render_chart(
values={},
show_only=["templates/api-server/api-server-hpa.yaml"],
)
assert docs == []
def test_should_add_component_specific_labels(self):
docs = render_chart(
values={
"airflowVersion": "3.0.2",
"apiServer": {
"hpa": {"enabled": True},
"labels": {"test_label": "test_label_value"},
},
},
show_only=["templates/api-server/api-server-hpa.yaml"],
)
assert "test_label" in jmespath.search("metadata.labels", docs[0])
assert jmespath.search("metadata.labels", docs[0])["test_label"] == "test_label_value"
@pytest.mark.parametrize(
("min_replicas", "max_replicas"),
[
(None, None),
(2, 8),
],
)
def test_min_max_replicas(self, min_replicas, max_replicas):
"""Verify minimum and maximum replicas."""
docs = render_chart(
values={
"airflowVersion": "3.0.2",
"apiServer": {
"hpa": {
"enabled": True,
**({"minReplicaCount": min_replicas} if min_replicas else {}),
**({"maxReplicaCount": max_replicas} if max_replicas else {}),
}
},
},
show_only=["templates/api-server/api-server-hpa.yaml"],
)
assert jmespath.search("spec.minReplicas", docs[0]) == 1 if min_replicas is None else min_replicas
assert jmespath.search("spec.maxReplicas", docs[0]) == 5 if max_replicas is None else max_replicas
def test_hpa_behavior(self):
"""Verify HPA behavior."""
expected_behavior = {
"scaleDown": {
"stabilizationWindowSeconds": 300,
"policies": [{"type": "Percent", "value": 100, "periodSeconds": 15}],
}
}
docs = render_chart(
values={
"airflowVersion": "3.0.2",
"apiServer": {
"hpa": {
"enabled": True,
"behavior": expected_behavior,
},
},
},
show_only=["templates/api-server/api-server-hpa.yaml"],
)
assert jmespath.search("spec.behavior", docs[0]) == expected_behavior
@pytest.mark.parametrize(
("metrics", "expected_metrics"),
[
# default metrics
(
None,
{
"type": "Resource",
"resource": {"name": "cpu", "target": {"type": "Utilization", "averageUtilization": 50}},
},
),
# custom metric
(
[
{
"type": "Pods",
"pods": {
"metric": {"name": "custom"},
"target": {"type": "Utilization", "averageUtilization": 50},
},
}
],
{
"type": "Pods",
"pods": {
"metric": {"name": "custom"},
"target": {"type": "Utilization", "averageUtilization": 50},
},
},
),
],
)
def test_should_use_hpa_metrics(self, metrics, expected_metrics):
docs = render_chart(
values={
"airflowVersion": "3.0.2",
"apiServer": {
"hpa": {"enabled": True, **({"metrics": metrics} if metrics else {})},
},
},
show_only=["templates/api-server/api-server-hpa.yaml"],
)
assert jmespath.search("spec.metrics[0]", docs[0]) == expected_metrics
| {
"repo_id": "apache/airflow",
"file_path": "helm-tests/tests/helm_tests/apiserver/test_hpa_apiserver.py",
"license": "Apache License 2.0",
"lines": 130,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
apache/airflow:airflow-core/src/airflow/observability/metrics/datadog_logger.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING
from airflow._shared.observability.metrics import datadog_logger
from airflow.configuration import conf
if TYPE_CHECKING:
from airflow._shared.observability.metrics.datadog_logger import SafeDogStatsdLogger
def get_dogstatsd_logger(cls) -> SafeDogStatsdLogger:
return datadog_logger.get_dogstatsd_logger(
cls,
tags_in_string=conf.get("metrics", "statsd_datadog_tags"),
host=conf.get("metrics", "statsd_host"),
port=conf.getint("metrics", "statsd_port"),
namespace=conf.get("metrics", "statsd_prefix"),
datadog_metrics_tags=conf.getboolean("metrics", "statsd_datadog_metrics_tags", fallback=True),
statsd_disabled_tags=conf.get("metrics", "statsd_disabled_tags", fallback=None),
metrics_allow_list=conf.get("metrics", "metrics_allow_list", fallback=None),
metrics_block_list=conf.get("metrics", "metrics_block_list", fallback=None),
stat_name_handler=conf.getimport("metrics", "stat_name_handler"),
statsd_influxdb_enabled=conf.getboolean("metrics", "statsd_influxdb_enabled", fallback=False),
)
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/observability/metrics/datadog_logger.py",
"license": "Apache License 2.0",
"lines": 36,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:airflow-core/src/airflow/observability/metrics/otel_logger.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING
from airflow._shared.observability.metrics import otel_logger
from airflow.configuration import conf
if TYPE_CHECKING:
from airflow._shared.observability.metrics.otel_logger import SafeOtelLogger
def get_otel_logger() -> SafeOtelLogger:
# The config values have been deprecated and therefore,
# if the user hasn't added them to the config, the default values won't be used.
# A fallback is needed to avoid an exception.
port = None
if conf.has_option("metrics", "otel_port"):
port = conf.getint("metrics", "otel_port")
conf_interval = None
if conf.has_option("metrics", "otel_interval_milliseconds"):
conf_interval = conf.getfloat("metrics", "otel_interval_milliseconds")
return otel_logger.get_otel_logger(
host=conf.get("metrics", "otel_host", fallback=None), # ex: "breeze-otel-collector"
port=port, # ex: 4318
prefix=conf.get("metrics", "otel_prefix", fallback=None), # ex: "airflow"
ssl_active=conf.getboolean("metrics", "otel_ssl_active", fallback=False),
# PeriodicExportingMetricReader will default to an interval of 60000 millis.
conf_interval=conf_interval, # ex: 30000
debug=conf.getboolean("metrics", "otel_debugging_on", fallback=False),
service_name=conf.get("metrics", "otel_service", fallback=None),
metrics_allow_list=conf.get("metrics", "metrics_allow_list", fallback=None),
metrics_block_list=conf.get("metrics", "metrics_block_list", fallback=None),
stat_name_handler=conf.getimport("metrics", "stat_name_handler", fallback=None),
statsd_influxdb_enabled=conf.getboolean("metrics", "statsd_influxdb_enabled", fallback=False),
)
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/observability/metrics/otel_logger.py",
"license": "Apache License 2.0",
"lines": 46,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:airflow-core/src/airflow/observability/metrics/statsd_logger.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import logging
from typing import TYPE_CHECKING
from airflow._shared.configuration import AirflowConfigException
from airflow._shared.observability.metrics import statsd_logger
from airflow.configuration import conf
if TYPE_CHECKING:
from airflow._shared.observability.metrics.statsd_logger import SafeStatsdLogger
log = logging.getLogger(__name__)
def get_statsd_logger() -> SafeStatsdLogger:
stats_class = conf.getimport("metrics", "statsd_custom_client_path", fallback=None)
# no need to check for the scheduler/statsd_on -> this method is only called when it is set
# and previously it would crash with None is callable if it was called without it.
from statsd import StatsClient
if stats_class:
if not issubclass(stats_class, StatsClient):
raise AirflowConfigException(
"Your custom StatsD client must extend the statsd.StatsClient in order to ensure "
"backwards compatibility."
)
log.info("Successfully loaded custom StatsD client")
else:
stats_class = StatsClient
return statsd_logger.get_statsd_logger(
stats_class=stats_class,
host=conf.get("metrics", "statsd_host"),
port=conf.getint("metrics", "statsd_port"),
prefix=conf.get("metrics", "statsd_prefix"),
ipv6=conf.getboolean("metrics", "statsd_ipv6", fallback=False),
influxdb_tags_enabled=conf.getboolean("metrics", "statsd_influxdb_enabled", fallback=False),
statsd_disabled_tags=conf.get("metrics", "statsd_disabled_tags", fallback=None),
metrics_allow_list=conf.get("metrics", "metrics_allow_list", fallback=None),
metrics_block_list=conf.get("metrics", "metrics_block_list", fallback=None),
stat_name_handler=conf.getimport("metrics", "stat_name_handler"),
statsd_influxdb_enabled=conf.getboolean("metrics", "statsd_influxdb_enabled", fallback=False),
)
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/observability/metrics/statsd_logger.py",
"license": "Apache License 2.0",
"lines": 52,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:airflow-core/src/airflow/observability/trace.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import logging
from collections.abc import Callable
from socket import socket
from typing import TYPE_CHECKING
from airflow._shared.observability.traces.base_tracer import EmptyTrace, Tracer
from airflow.configuration import conf
log = logging.getLogger(__name__)
class _TraceMeta(type):
factory: Callable[[], Tracer] | None = None
instance: Tracer | EmptyTrace | None = None
def __new__(cls, name, bases, attrs):
return super().__new__(cls, name, bases, attrs)
def __getattr__(cls, name: str):
if not cls.factory:
# Lazy initialization of the factory
cls.configure_factory()
if not cls.instance:
cls._initialize_instance()
return getattr(cls.instance, name)
def _initialize_instance(cls):
"""Initialize the trace instance."""
try:
cls.instance = cls.factory()
except (socket.gaierror, ImportError) as e:
log.error("Could not configure Trace: %s. Using EmptyTrace instead.", e)
cls.instance = EmptyTrace()
def __call__(cls, *args, **kwargs):
"""Ensure the class behaves as a singleton."""
if not cls.instance:
cls._initialize_instance()
return cls.instance
def configure_factory(cls):
"""Configure the trace factory based on settings."""
otel_on = conf.getboolean("traces", "otel_on")
if otel_on:
from airflow.observability.traces import otel_tracer
cls.factory = staticmethod(
lambda use_simple_processor=False: otel_tracer.get_otel_tracer(cls, use_simple_processor)
)
else:
# EmptyTrace is a class and not inherently callable.
# Using a lambda ensures it can be invoked as a callable factory.
# staticmethod ensures the lambda is treated as a standalone function
# and avoids passing `cls` as an implicit argument.
cls.factory = staticmethod(lambda: EmptyTrace())
def get_constant_tags(cls) -> str | None:
"""Get constant tags to add to all traces."""
return conf.get("traces", "tags", fallback=None)
if TYPE_CHECKING:
Trace: EmptyTrace
else:
class Trace(metaclass=_TraceMeta):
"""Empty class for Trace - we use metaclass to inject the right one."""
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/observability/trace.py",
"license": "Apache License 2.0",
"lines": 70,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:airflow-core/src/airflow/observability/traces/otel_tracer.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING
from airflow._shared.observability.traces import otel_tracer
from airflow.configuration import conf
if TYPE_CHECKING:
from airflow._shared.observability.traces.otel_tracer import OtelTrace
def get_otel_tracer(cls, use_simple_processor: bool = False) -> OtelTrace:
# The config values have been deprecated and therefore,
# if the user hasn't added them to the config, the default values won't be used.
# A fallback is needed to avoid an exception.
port = None
if conf.has_option("traces", "otel_port"):
port = conf.getint("traces", "otel_port")
return otel_tracer.get_otel_tracer(
cls,
use_simple_processor,
host=conf.get("traces", "otel_host", fallback=None),
port=port,
ssl_active=conf.getboolean("traces", "otel_ssl_active", fallback=False),
otel_service=conf.get("traces", "otel_service", fallback=None),
debug=conf.getboolean("traces", "otel_debugging_on", fallback=False),
)
def get_otel_tracer_for_task(cls) -> OtelTrace:
return get_otel_tracer(cls, use_simple_processor=True)
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/observability/traces/otel_tracer.py",
"license": "Apache License 2.0",
"lines": 41,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:shared/observability/src/airflow_shared/observability/exceptions.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Exceptions for observability."""
from __future__ import annotations
class InvalidStatsNameException(Exception):
"""Raise when name of the stats is invalid."""
| {
"repo_id": "apache/airflow",
"file_path": "shared/observability/src/airflow_shared/observability/exceptions.py",
"license": "Apache License 2.0",
"lines": 21,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:shared/observability/src/airflow_shared/observability/traces/utils.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import calendar
import logging
TRACE_ID = 0
SPAN_ID = 16
log = logging.getLogger(__name__)
def datetime_to_nano(datetime) -> int | None:
"""Convert datetime to nanoseconds."""
if datetime:
if datetime.tzinfo is None:
# There is no timezone info, handle it the same as UTC.
timestamp = calendar.timegm(datetime.timetuple()) + datetime.microsecond / 1e6
else:
# The datetime is timezone-aware. Use timestamp directly.
timestamp = datetime.timestamp()
return int(timestamp * 1e9)
return None
def parse_traceparent(traceparent_str: str | None = None) -> dict:
"""Parse traceparent string: 00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-01."""
if traceparent_str is None:
return {}
tokens = traceparent_str.split("-")
if len(tokens) != 4:
raise ValueError("The traceparent string does not have the correct format.")
return {"version": tokens[0], "trace_id": tokens[1], "parent_id": tokens[2], "flags": tokens[3]}
def parse_tracestate(tracestate_str: str | None = None) -> dict:
"""Parse tracestate string: rojo=00f067aa0ba902b7,congo=t61rcWkgMzE."""
if tracestate_str is None or len(tracestate_str) == 0:
return {}
tokens = tracestate_str.split(",")
result = {}
for pair in tokens:
if "=" in pair:
key, value = pair.split("=")
result[key.strip()] = value.strip()
return result
| {
"repo_id": "apache/airflow",
"file_path": "shared/observability/src/airflow_shared/observability/traces/utils.py",
"license": "Apache License 2.0",
"lines": 53,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:task-sdk/src/airflow/sdk/observability/metrics/datadog_logger.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING
from airflow.sdk._shared.observability.metrics import datadog_logger
from airflow.sdk.configuration import conf
if TYPE_CHECKING:
from airflow.sdk._shared.observability.metrics.datadog_logger import SafeDogStatsdLogger
def get_dogstatsd_logger(cls) -> SafeDogStatsdLogger:
return datadog_logger.get_dogstatsd_logger(
cls,
tags_in_string=conf.get("metrics", "statsd_datadog_tags"),
host=conf.get("metrics", "statsd_host"),
port=conf.getint("metrics", "statsd_port"),
namespace=conf.get("metrics", "statsd_prefix"),
datadog_metrics_tags=conf.getboolean("metrics", "statsd_datadog_metrics_tags", fallback=True),
statsd_disabled_tags=conf.get("metrics", "statsd_disabled_tags", fallback=None),
metrics_allow_list=conf.get("metrics", "metrics_allow_list", fallback=None),
metrics_block_list=conf.get("metrics", "metrics_block_list", fallback=None),
stat_name_handler=conf.getimport("metrics", "stat_name_handler"),
statsd_influxdb_enabled=conf.getboolean("metrics", "statsd_influxdb_enabled", fallback=False),
)
| {
"repo_id": "apache/airflow",
"file_path": "task-sdk/src/airflow/sdk/observability/metrics/datadog_logger.py",
"license": "Apache License 2.0",
"lines": 36,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:task-sdk/src/airflow/sdk/observability/metrics/otel_logger.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING
from airflow.sdk._shared.observability.metrics import otel_logger
from airflow.sdk.configuration import conf
if TYPE_CHECKING:
from airflow.sdk._shared.observability.metrics.otel_logger import SafeOtelLogger
def get_otel_logger() -> SafeOtelLogger:
# The config values have been deprecated and therefore,
# if the user hasn't added them to the config, the default values won't be used.
# A fallback is needed to avoid an exception.
port = None
if conf.has_option("metrics", "otel_port"):
port = conf.getint("metrics", "otel_port")
conf_interval = None
if conf.has_option("metrics", "otel_interval_milliseconds"):
conf_interval = conf.getfloat("metrics", "otel_interval_milliseconds")
return otel_logger.get_otel_logger(
host=conf.get("metrics", "otel_host", fallback=None), # ex: "breeze-otel-collector"
port=port, # ex: 4318
prefix=conf.get("metrics", "otel_prefix", fallback=None), # ex: "airflow"
ssl_active=conf.getboolean("metrics", "otel_ssl_active", fallback=False),
# PeriodicExportingMetricReader will default to an interval of 60000 millis.
conf_interval=conf_interval, # ex: 30000
debug=conf.getboolean("metrics", "otel_debugging_on", fallback=False),
service_name=conf.get("metrics", "otel_service", fallback=None),
metrics_allow_list=conf.get("metrics", "metrics_allow_list", fallback=None),
metrics_block_list=conf.get("metrics", "metrics_block_list", fallback=None),
stat_name_handler=conf.getimport("metrics", "stat_name_handler", fallback=None),
statsd_influxdb_enabled=conf.getboolean("metrics", "statsd_influxdb_enabled", fallback=False),
)
| {
"repo_id": "apache/airflow",
"file_path": "task-sdk/src/airflow/sdk/observability/metrics/otel_logger.py",
"license": "Apache License 2.0",
"lines": 46,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:task-sdk/src/airflow/sdk/observability/metrics/statsd_logger.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import logging
from typing import TYPE_CHECKING
from airflow.sdk._shared.configuration import AirflowConfigException
from airflow.sdk._shared.observability.metrics import statsd_logger
from airflow.sdk.configuration import conf
if TYPE_CHECKING:
from airflow.sdk._shared.observability.metrics.statsd_logger import SafeStatsdLogger
log = logging.getLogger(__name__)
def get_statsd_logger() -> SafeStatsdLogger:
stats_class = conf.getimport("metrics", "statsd_custom_client_path", fallback=None)
# no need to check for the scheduler/statsd_on -> this method is only called when it is set
# and previously it would crash with None is callable if it was called without it.
from statsd import StatsClient
if stats_class:
if not issubclass(stats_class, StatsClient):
raise AirflowConfigException(
"Your custom StatsD client must extend the statsd.StatsClient in order to ensure "
"backwards compatibility."
)
log.info("Successfully loaded custom StatsD client")
else:
stats_class = StatsClient
return statsd_logger.get_statsd_logger(
stats_class=stats_class,
host=conf.get("metrics", "statsd_host"),
port=conf.getint("metrics", "statsd_port"),
prefix=conf.get("metrics", "statsd_prefix"),
ipv6=conf.getboolean("metrics", "statsd_ipv6", fallback=False),
influxdb_tags_enabled=conf.getboolean("metrics", "statsd_influxdb_enabled", fallback=False),
statsd_disabled_tags=conf.get("metrics", "statsd_disabled_tags", fallback=None),
metrics_allow_list=conf.get("metrics", "metrics_allow_list", fallback=None),
metrics_block_list=conf.get("metrics", "metrics_block_list", fallback=None),
stat_name_handler=conf.getimport("metrics", "stat_name_handler"),
statsd_influxdb_enabled=conf.getboolean("metrics", "statsd_influxdb_enabled", fallback=False),
)
| {
"repo_id": "apache/airflow",
"file_path": "task-sdk/src/airflow/sdk/observability/metrics/statsd_logger.py",
"license": "Apache License 2.0",
"lines": 52,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:task-sdk/src/airflow/sdk/observability/trace.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import logging
from collections.abc import Callable
from socket import socket
from typing import TYPE_CHECKING
from airflow.sdk._shared.observability.traces.base_tracer import EmptyTrace, Tracer
from airflow.sdk.configuration import conf
log = logging.getLogger(__name__)
class _TraceMeta(type):
factory: Callable[[], Tracer] | None = None
instance: Tracer | EmptyTrace | None = None
def __new__(cls, name, bases, attrs):
return super().__new__(cls, name, bases, attrs)
def __getattr__(cls, name: str):
if not cls.factory:
# Lazy initialization of the factory
cls.configure_factory()
if not cls.instance:
cls._initialize_instance()
return getattr(cls.instance, name)
def _initialize_instance(cls):
"""Initialize the trace instance."""
try:
cls.instance = cls.factory()
except (socket.gaierror, ImportError) as e:
log.error("Could not configure Trace: %s. Using EmptyTrace instead.", e)
cls.instance = EmptyTrace()
def __call__(cls, *args, **kwargs):
"""Ensure the class behaves as a singleton."""
if not cls.instance:
cls._initialize_instance()
return cls.instance
def configure_factory(cls):
"""Configure the trace factory based on settings."""
otel_on = conf.getboolean("traces", "otel_on")
if otel_on:
from airflow.sdk.observability.traces import otel_tracer
cls.factory = staticmethod(
lambda use_simple_processor=False: otel_tracer.get_otel_tracer(cls, use_simple_processor)
)
else:
# EmptyTrace is a class and not inherently callable.
# Using a lambda ensures it can be invoked as a callable factory.
# staticmethod ensures the lambda is treated as a standalone function
# and avoids passing `cls` as an implicit argument.
cls.factory = staticmethod(lambda: EmptyTrace())
def get_constant_tags(cls) -> str | None:
"""Get constant tags to add to all traces."""
return conf.get("traces", "tags", fallback=None)
if TYPE_CHECKING:
Trace: EmptyTrace
else:
class Trace(metaclass=_TraceMeta):
"""Empty class for Trace - we use metaclass to inject the right one."""
| {
"repo_id": "apache/airflow",
"file_path": "task-sdk/src/airflow/sdk/observability/trace.py",
"license": "Apache License 2.0",
"lines": 70,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:task-sdk/src/airflow/sdk/observability/traces/otel_tracer.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING
from airflow.sdk._shared.observability.traces import otel_tracer
from airflow.sdk.configuration import conf
if TYPE_CHECKING:
from airflow.sdk._shared.observability.traces.otel_tracer import OtelTrace
def get_otel_tracer(cls, use_simple_processor: bool = False) -> OtelTrace:
# The config values have been deprecated and therefore,
# if the user hasn't added them to the config, the default values won't be used.
# A fallback is needed to avoid an exception.
port = None
if conf.has_option("traces", "otel_port"):
port = conf.getint("traces", "otel_port")
return otel_tracer.get_otel_tracer(
cls,
use_simple_processor,
host=conf.get("traces", "otel_host", fallback=None),
port=port,
ssl_active=conf.getboolean("traces", "otel_ssl_active", fallback=False),
otel_service=conf.get("traces", "otel_service", fallback=None),
debug=conf.getboolean("traces", "otel_debugging_on", fallback=False),
)
def get_otel_tracer_for_task(cls) -> OtelTrace:
return get_otel_tracer(cls, use_simple_processor=True)
| {
"repo_id": "apache/airflow",
"file_path": "task-sdk/src/airflow/sdk/observability/traces/otel_tracer.py",
"license": "Apache License 2.0",
"lines": 41,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:airflow-core/src/airflow/serialization/decoders.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import datetime
from typing import TYPE_CHECKING, Any, TypeVar
import dateutil.relativedelta
from airflow._shared.module_loading import import_string
from airflow.serialization.definitions.assets import (
SerializedAsset,
SerializedAssetAlias,
SerializedAssetAll,
SerializedAssetAny,
SerializedAssetBase,
SerializedAssetNameRef,
SerializedAssetUriRef,
SerializedAssetWatcher,
)
from airflow.serialization.definitions.deadline import (
DeadlineAlertFields,
SerializedDeadlineAlert,
SerializedReferenceModels,
)
from airflow.serialization.enums import DagAttributeTypes as DAT, Encoding
from airflow.serialization.helpers import (
find_registered_custom_partition_mapper,
find_registered_custom_timetable,
is_core_partition_mapper_import_path,
is_core_timetable_import_path,
)
if TYPE_CHECKING:
from airflow.partition_mappers.base import PartitionMapper
from airflow.timetables.base import Timetable as CoreTimetable
R = TypeVar("R")
def decode_relativedelta(var: dict[str, Any]) -> dateutil.relativedelta.relativedelta:
"""Dencode a relativedelta object."""
if "weekday" in var:
var["weekday"] = dateutil.relativedelta.weekday(*var["weekday"])
return dateutil.relativedelta.relativedelta(**var)
def decode_interval(value: int | dict) -> datetime.timedelta | dateutil.relativedelta.relativedelta:
if isinstance(value, dict):
return decode_relativedelta(value)
return datetime.timedelta(seconds=value)
def decode_run_immediately(value: bool | float) -> bool | datetime.timedelta:
if isinstance(value, float):
return datetime.timedelta(seconds=value)
return value
def smart_decode_trigger_kwargs(d):
"""
Slightly clean up kwargs for display or execution.
This detects one level of BaseSerialization and tries to deserialize the
content, removing some __type __var ugliness when the value is displayed
in UI to the user and/or while execution.
"""
from airflow.serialization.serialized_objects import BaseSerialization
if not isinstance(d, dict) or Encoding.TYPE not in d:
return d
return BaseSerialization.deserialize(d)
def _decode_asset(var: dict[str, Any]):
watchers = var.get("watchers", [])
return SerializedAsset(
name=var["name"],
uri=var["uri"],
group=var["group"],
extra=var["extra"],
watchers=[
SerializedAssetWatcher(
name=watcher["name"],
trigger={
"classpath": watcher["trigger"]["classpath"],
"kwargs": smart_decode_trigger_kwargs(watcher["trigger"]["kwargs"]),
},
)
for watcher in watchers
],
)
def decode_asset_like(var: dict[str, Any]) -> SerializedAssetBase:
"""
Decode a previously serialized asset-like object.
:meta private:
"""
typ = var[Encoding.TYPE]
if Encoding.VAR in var:
var = var[Encoding.VAR]
else:
var = {k: v for k, v in var.items() if k != Encoding.TYPE}
match typ:
case DAT.ASSET:
return _decode_asset(var)
case DAT.ASSET_ALL:
return SerializedAssetAll([decode_asset_like(x) for x in var["objects"]])
case DAT.ASSET_ANY:
return SerializedAssetAny([decode_asset_like(x) for x in var["objects"]])
case DAT.ASSET_ALIAS:
return SerializedAssetAlias(name=var["name"], group=var["group"])
case DAT.ASSET_REF:
if "name" in var:
return SerializedAssetNameRef(**var)
return SerializedAssetUriRef(**var)
case data_type:
raise ValueError(f"deserialization not implemented for DAT {data_type!r}")
def decode_deadline_alert(encoded_data: dict):
"""
Decode a previously serialized deadline alert.
:meta private:
"""
from airflow.sdk.serde import deserialize
data = encoded_data.get(Encoding.VAR, encoded_data)
reference_data = data[DeadlineAlertFields.REFERENCE]
reference_type = reference_data[SerializedReferenceModels.REFERENCE_TYPE_FIELD]
reference_class = SerializedReferenceModels.get_reference_class(reference_type)
reference = reference_class.deserialize_reference(reference_data)
return SerializedDeadlineAlert(
reference=reference,
interval=datetime.timedelta(seconds=data[DeadlineAlertFields.INTERVAL]),
callback=deserialize(data[DeadlineAlertFields.CALLBACK]),
)
def decode_timetable(var: dict[str, Any]) -> CoreTimetable:
"""
Decode a previously serialized timetable.
Most of the deserialization logic is delegated to the actual type, which
we import from string.
:meta private:
"""
if is_core_timetable_import_path(importable_string := var[Encoding.TYPE]):
timetable_type: type[CoreTimetable] = import_string(importable_string)
else:
timetable_type = find_registered_custom_timetable(importable_string)
return timetable_type.deserialize(var[Encoding.VAR])
def decode_partition_mapper(var: dict[str, Any]) -> PartitionMapper:
"""
Decode a previously serialized PartitionMapper.
Most of the deserialization logic is delegated to the actual type, which
we import from string.
:meta private:
"""
importable_string = var[Encoding.TYPE]
if is_core_partition_mapper_import_path(importable_string):
partition_mapper_cls = import_string(importable_string)
else:
partition_mapper_cls = find_registered_custom_partition_mapper(importable_string)
return partition_mapper_cls.deserialize(var[Encoding.VAR])
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/serialization/decoders.py",
"license": "Apache License 2.0",
"lines": 156,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:airflow-core/src/airflow/serialization/definitions/assets.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import json
from typing import TYPE_CHECKING, Any, ClassVar, Literal
import attrs
from airflow.api_fastapi.execution_api.datamodels.asset import AssetProfile
from airflow.serialization.dag_dependency import DagDependency
if TYPE_CHECKING:
from collections.abc import Callable, Iterable, Iterator, MutableSequence
from typing_extensions import Self
from airflow.models.asset import AssetModel
AttrsInstance = attrs.AttrsInstance
else:
AttrsInstance = object
@attrs.define(frozen=True)
class SerializedAssetUniqueKey(AttrsInstance):
"""
Columns to identify an unique asset.
:meta private:
"""
name: str
uri: str
@classmethod
def from_asset(cls, asset: SerializedAsset | AssetModel) -> Self:
return cls(name=asset.name, uri=asset.uri)
@classmethod
def from_str(cls, key: str) -> Self:
return cls(**json.loads(key))
def to_str(self) -> str:
return json.dumps(attrs.asdict(self))
def asprofile(self) -> AssetProfile:
return AssetProfile(name=self.name, uri=self.uri, type="Asset")
class SerializedAssetBase:
"""
Protocol for all serialized asset-like objects.
:meta private:
"""
def __bool__(self) -> bool:
return True
def as_expression(self) -> Any:
"""
Serialize the asset into its scheduling expression.
The return value is stored in DagModel for display purposes. It must be
JSON-compatible.
:meta private:
"""
raise NotImplementedError
def iter_assets(self) -> Iterator[tuple[SerializedAssetUniqueKey, SerializedAsset]]:
raise NotImplementedError
def iter_asset_aliases(self) -> Iterator[tuple[str, SerializedAssetAlias]]:
raise NotImplementedError
def iter_asset_refs(self) -> Iterator[SerializedAssetRef]:
raise NotImplementedError
def iter_dag_dependencies(self, *, source: str, target: str) -> Iterator[DagDependency]:
"""
Iterate a base asset as dag dependency.
:meta private:
"""
raise NotImplementedError
@attrs.define
class SerializedAssetWatcher:
"""Serialized representation of an asset watcher."""
name: str
trigger: dict
@attrs.define
class SerializedAsset(SerializedAssetBase):
"""Serialized representation of an asset."""
name: str
uri: str
group: str
extra: dict[str, Any]
watchers: MutableSequence[SerializedAssetWatcher]
def as_expression(self) -> Any:
"""
Serialize the asset into its scheduling expression.
:meta private:
"""
return {"asset": {"uri": self.uri, "name": self.name, "group": self.group}}
def iter_assets(self) -> Iterator[tuple[SerializedAssetUniqueKey, SerializedAsset]]:
yield SerializedAssetUniqueKey.from_asset(self), self
def iter_asset_aliases(self) -> Iterator[tuple[str, SerializedAssetAlias]]:
return iter(())
def iter_asset_refs(self) -> Iterator[SerializedAssetRef]:
return iter(())
def iter_dag_dependencies(self, *, source: str, target: str) -> Iterator[DagDependency]:
"""
Iterate an asset as dag dependency.
:meta private:
"""
yield DagDependency(
source=source or "asset",
target=target or "asset",
label=self.name,
dependency_type="asset",
# We can't get asset id at this stage.
# This will be updated when running SerializedDagModel.get_dag_dependencies
dependency_id=SerializedAssetUniqueKey.from_asset(self).to_str(),
)
def asprofile(self) -> AssetProfile:
"""
Profiles Asset to AssetProfile.
:meta private:
"""
return AssetProfile(name=self.name or None, uri=self.uri or None, type="Asset")
def __hash__(self):
f = attrs.filters.include(*attrs.fields_dict(SerializedAsset))
return hash(json.dumps(attrs.asdict(self, filter=f), sort_keys=True))
class SerializedAssetRef(SerializedAssetBase, AttrsInstance):
"""Serialized representation of an asset reference."""
_dependency_type: Literal["asset-name-ref", "asset-uri-ref"]
def as_expression(self) -> Any:
return {"asset_ref": attrs.asdict(self)}
def iter_assets(self) -> Iterator[tuple[SerializedAssetUniqueKey, SerializedAsset]]:
return iter(())
def iter_asset_aliases(self) -> Iterator[tuple[str, SerializedAssetAlias]]:
return iter(())
def iter_asset_refs(self) -> Iterator[SerializedAssetRef]:
yield self
def iter_dag_dependencies(self, *, source: str = "", target: str = "") -> Iterator[DagDependency]:
(dependency_id,) = attrs.astuple(self)
yield DagDependency(
source=source or self._dependency_type,
target=target or self._dependency_type,
label=dependency_id,
dependency_type=self._dependency_type,
dependency_id=dependency_id,
)
@attrs.define(hash=True)
class SerializedAssetNameRef(SerializedAssetRef):
"""Serialized representation of an asset reference by name."""
name: str
_dependency_type = "asset-name-ref"
@attrs.define(hash=True)
class SerializedAssetUriRef(SerializedAssetRef):
"""Serialized representation of an asset reference by URI."""
uri: str
_dependency_type = "asset-uri-ref"
@attrs.define
class SerializedAssetAlias(SerializedAssetBase):
"""Serialized representation of an asset alias."""
name: str
group: str
def as_expression(self) -> Any:
"""
Serialize the asset alias into its scheduling expression.
:meta private:
"""
return {"alias": {"name": self.name, "group": self.group}}
def iter_assets(self) -> Iterator[tuple[SerializedAssetUniqueKey, SerializedAsset]]:
return iter(())
def iter_asset_aliases(self) -> Iterator[tuple[str, SerializedAssetAlias]]:
yield self.name, self
def iter_asset_refs(self) -> Iterator[SerializedAssetRef]:
return iter(())
def iter_dag_dependencies(self, *, source: str = "", target: str = "") -> Iterator[DagDependency]:
"""
Iterate an asset alias and its resolved assets as dag dependency.
:meta private:
"""
yield DagDependency(
source=source or "asset-alias",
target=target or "asset-alias",
label=self.name,
dependency_type="asset-alias",
dependency_id=self.name,
)
@attrs.define
class SerializedAssetBooleanCondition(SerializedAssetBase):
"""Serialized representation of an asset condition."""
objects: list[SerializedAssetBase]
agg_func: ClassVar[Callable[[Iterable], bool]]
def iter_assets(self) -> Iterator[tuple[SerializedAssetUniqueKey, SerializedAsset]]:
for o in self.objects:
yield from o.iter_assets()
def iter_asset_aliases(self) -> Iterator[tuple[str, SerializedAssetAlias]]:
for o in self.objects:
yield from o.iter_asset_aliases()
def iter_asset_refs(self) -> Iterator[SerializedAssetRef]:
for o in self.objects:
yield from o.iter_asset_refs()
def iter_dag_dependencies(self, *, source: str, target: str) -> Iterator[DagDependency]:
"""
Iterate asset, asset aliases and their resolved assets as dag dependency.
:meta private:
"""
for obj in self.objects:
yield from obj.iter_dag_dependencies(source=source, target=target)
class SerializedAssetAny(SerializedAssetBooleanCondition):
"""Serialized representation of an asset "or" relationship."""
agg_func = any
def as_expression(self) -> dict[str, Any]:
"""
Serialize the asset into its scheduling expression.
:meta private:
"""
return {"any": [o.as_expression() for o in self.objects]}
class SerializedAssetAll(SerializedAssetBooleanCondition):
"""Serialized representation of an asset "and" relationship."""
agg_func = all
def __repr__(self) -> str:
return f"AssetAny({', '.join(map(str, self.objects))})"
def as_expression(self) -> Any:
"""
Serialize the assets into its scheduling expression.
:meta private:
"""
return {"all": [o.as_expression() for o in self.objects]}
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/serialization/definitions/assets.py",
"license": "Apache License 2.0",
"lines": 223,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:airflow-core/src/airflow/serialization/encoders.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import contextlib
import datetime
import functools
from typing import TYPE_CHECKING, Any, TypeVar, overload
import attrs
import pendulum
from airflow._shared.module_loading import qualname
from airflow.partition_mappers.base import PartitionMapper as CorePartitionMapper
from airflow.sdk import (
Asset,
AssetAlias,
AssetAll,
AssetAny,
AssetOrTimeSchedule,
CronDataIntervalTimetable,
CronTriggerTimetable,
DailyMapper,
DeltaDataIntervalTimetable,
DeltaTriggerTimetable,
EventsTimetable,
IdentityMapper,
MonthlyMapper,
MultipleCronTriggerTimetable,
PartitionMapper,
QuarterlyMapper,
WeeklyMapper,
YearlyMapper,
)
from airflow.sdk.bases.timetable import BaseTimetable
from airflow.sdk.definitions.asset import AssetRef
from airflow.sdk.definitions.partition_mappers.temporal import HourlyMapper
from airflow.sdk.definitions.timetables.assets import (
AssetTriggeredTimetable,
PartitionedAssetTimetable,
)
from airflow.sdk.definitions.timetables.simple import ContinuousTimetable, NullTimetable, OnceTimetable
from airflow.sdk.definitions.timetables.trigger import CronPartitionTimetable
from airflow.serialization.decoders import decode_deadline_alert
from airflow.serialization.definitions.assets import (
SerializedAsset,
SerializedAssetAlias,
SerializedAssetAll,
SerializedAssetAny,
SerializedAssetBase,
SerializedAssetRef,
)
from airflow.serialization.definitions.deadline import SerializedDeadlineAlert
from airflow.serialization.enums import DagAttributeTypes as DAT, Encoding
from airflow.serialization.helpers import (
find_registered_custom_partition_mapper,
find_registered_custom_timetable,
is_core_partition_mapper_import_path,
is_core_timetable_import_path,
)
from airflow.timetables.base import Timetable as CoreTimetable
from airflow.utils.docs import get_docs_url
if TYPE_CHECKING:
from dateutil.relativedelta import relativedelta
from airflow.sdk.definitions._internal.expandinput import ExpandInput
from airflow.sdk.definitions.asset import BaseAsset
from airflow.sdk.definitions.deadline import DeadlineAlert
from airflow.triggers.base import BaseEventTrigger
T = TypeVar("T")
def encode_expand_input(var: ExpandInput) -> dict[str, Any]:
from airflow.serialization.serialized_objects import BaseSerialization
return {"type": var.EXPAND_INPUT_TYPE, "value": BaseSerialization.serialize(var.value)}
def encode_relativedelta(var: relativedelta) -> dict[str, Any]:
"""Encode a relativedelta object."""
encoded = {k: v for k, v in var.__dict__.items() if not k.startswith("_") and v}
if var.weekday and var.weekday.n:
# Every n'th Friday for example
encoded["weekday"] = [var.weekday.weekday, var.weekday.n]
elif var.weekday:
encoded["weekday"] = [var.weekday.weekday]
return encoded
def encode_timezone(var: str | pendulum.Timezone | pendulum.FixedTimezone) -> str | int:
"""
Encode a Pendulum Timezone for serialization.
Airflow only supports timezone objects that implements Pendulum's Timezone
interface. We try to keep as much information as possible to make conversion
round-tripping possible (see ``decode_timezone``). We need to special-case
UTC; Pendulum implements it as a FixedTimezone (i.e. it gets encoded as
0 without the special case), but passing 0 into ``pendulum.timezone`` does
not give us UTC (but ``+00:00``).
"""
if isinstance(var, str):
return var
if isinstance(var, pendulum.FixedTimezone):
if var.offset == 0:
return "UTC"
return var.offset
if isinstance(var, pendulum.Timezone):
return var.name
raise ValueError(
f"DAG timezone should be a pendulum.tz.Timezone, not {var!r}. "
f"See {get_docs_url('timezone.html#time-zone-aware-dags')}"
)
def encode_interval(interval: datetime.timedelta | relativedelta) -> float | dict:
if isinstance(interval, datetime.timedelta):
return interval.total_seconds()
return encode_relativedelta(interval)
def encode_run_immediately(value: bool | datetime.timedelta) -> bool | float:
if isinstance(value, datetime.timedelta):
return value.total_seconds()
return value
def encode_trigger(trigger: BaseEventTrigger | dict):
from airflow.serialization.serialized_objects import BaseSerialization
def _ensure_serialized(d):
"""
Make sure the kwargs dict is JSON-serializable.
This is done with BaseSerialization logic. A simple check is added to
ensure we don't double-serialize, which is possible when a trigger goes
through multiple serialization layers.
"""
if isinstance(d, dict) and Encoding.TYPE in d:
return d
return BaseSerialization.serialize(d)
if isinstance(trigger, dict):
classpath = trigger["classpath"]
kwargs = trigger["kwargs"]
else:
classpath, kwargs = trigger.serialize()
return {
"classpath": classpath,
"kwargs": {k: _ensure_serialized(v) for k, v in kwargs.items()},
}
def encode_asset_like(a: BaseAsset | SerializedAssetBase) -> dict[str, Any]:
"""
Encode an asset-like object.
:meta private:
"""
d: dict[str, Any]
match a:
case Asset() | SerializedAsset():
d = {"__type": DAT.ASSET, "name": a.name, "uri": a.uri, "group": a.group, "extra": a.extra}
if a.watchers:
d["watchers"] = [{"name": w.name, "trigger": encode_trigger(w.trigger)} for w in a.watchers]
return d
case AssetAlias() | SerializedAssetAlias():
return {"__type": DAT.ASSET_ALIAS, "name": a.name, "group": a.group}
case AssetAll() | SerializedAssetAll():
return {"__type": DAT.ASSET_ALL, "objects": [encode_asset_like(x) for x in a.objects]}
case AssetAny() | SerializedAssetAny():
return {"__type": DAT.ASSET_ANY, "objects": [encode_asset_like(x) for x in a.objects]}
case AssetRef() | SerializedAssetRef():
return {"__type": DAT.ASSET_REF, **attrs.asdict(a)}
raise ValueError(f"serialization not implemented for {type(a).__name__!r}")
def encode_deadline_alert(d: DeadlineAlert | SerializedDeadlineAlert) -> dict[str, Any]:
"""
Encode a deadline alert.
:meta private:
"""
from airflow.sdk.serde import serialize
return {
"reference": d.reference.serialize_reference(),
"interval": d.interval.total_seconds(),
"callback": serialize(d.callback),
}
def encode_deadline_reference(ref) -> dict[str, Any]:
"""
Encode a deadline reference.
:meta private:
"""
return ref.serialize_reference()
def _get_serialized_timetable_import_path(var: BaseTimetable | CoreTimetable) -> str:
# Find SDK classes.
with contextlib.suppress(KeyError):
return _serializer.BUILTIN_TIMETABLES[var_type := type(var)]
# Check Core classes.
if is_core_timetable_import_path(importable_string := qualname(var_type)):
return importable_string
# Find user-registered classes.
find_registered_custom_timetable(importable_string) # This raises if not found.
return importable_string
def encode_timetable(var: BaseTimetable | CoreTimetable) -> dict[str, Any]:
"""
Encode a timetable instance.
See ``_Serializer.serialize()`` for more implementation detail.
:meta private:
"""
importable_string = _get_serialized_timetable_import_path(var)
return {Encoding.TYPE: importable_string, Encoding.VAR: _serializer.serialize_timetable(var)}
class _Serializer:
"""Serialization logic."""
BUILTIN_TIMETABLES: dict[type, str] = {
AssetOrTimeSchedule: "airflow.timetables.assets.AssetOrTimeSchedule",
AssetTriggeredTimetable: "airflow.timetables.simple.AssetTriggeredTimetable",
ContinuousTimetable: "airflow.timetables.simple.ContinuousTimetable",
CronDataIntervalTimetable: "airflow.timetables.interval.CronDataIntervalTimetable",
CronTriggerTimetable: "airflow.timetables.trigger.CronTriggerTimetable",
CronPartitionTimetable: "airflow.timetables.trigger.CronPartitionTimetable",
DeltaDataIntervalTimetable: "airflow.timetables.interval.DeltaDataIntervalTimetable",
DeltaTriggerTimetable: "airflow.timetables.trigger.DeltaTriggerTimetable",
EventsTimetable: "airflow.timetables.events.EventsTimetable",
MultipleCronTriggerTimetable: "airflow.timetables.trigger.MultipleCronTriggerTimetable",
NullTimetable: "airflow.timetables.simple.NullTimetable",
OnceTimetable: "airflow.timetables.simple.OnceTimetable",
PartitionedAssetTimetable: "airflow.timetables.simple.PartitionedAssetTimetable",
}
@functools.singledispatchmethod
def serialize_timetable(self, timetable: BaseTimetable | CoreTimetable) -> dict[str, Any]:
"""
Serialize a timetable into a JSON-compatible dict for storage.
All timetables defined in the SDK should be handled by registered
single-dispatch variants below.
This function's body should only be
called on timetables defined in Core (under ``airflow.timetables``),
and user-defined custom timetables registered via plugins, which also
inherit from the Core timetable base class.
For timetables in Core, serialization work is delegated to the type.
"""
if not isinstance(timetable, CoreTimetable):
raise NotImplementedError(f"can not serialize timetable {type(timetable).__name__}")
return timetable.serialize()
@serialize_timetable.register(ContinuousTimetable)
@serialize_timetable.register(NullTimetable)
@serialize_timetable.register(OnceTimetable)
def _(self, timetable: ContinuousTimetable | NullTimetable | OnceTimetable) -> dict[str, Any]:
return {}
@serialize_timetable.register
def _(self, timetable: AssetTriggeredTimetable) -> dict[str, Any]:
return {"asset_condition": encode_asset_like(timetable.asset_condition)}
@serialize_timetable.register
def _(self, timetable: EventsTimetable) -> dict[str, Any]:
return {
"event_dates": [x.isoformat(sep="T") for x in timetable.event_dates],
"restrict_to_events": timetable.restrict_to_events,
"description": timetable.description,
}
@serialize_timetable.register
def _(self, timetable: CronDataIntervalTimetable) -> dict[str, Any]:
return {"expression": timetable.expression, "timezone": encode_timezone(timetable.timezone)}
@serialize_timetable.register
def _(self, timetable: DeltaDataIntervalTimetable) -> dict[str, Any]:
return {"delta": encode_interval(timetable.delta)}
@serialize_timetable.register
def _(self, timetable: CronTriggerTimetable) -> dict[str, Any]:
return {
"expression": timetable.expression,
"timezone": encode_timezone(timetable.timezone),
"interval": encode_interval(timetable.interval),
"run_immediately": encode_run_immediately(timetable.run_immediately),
}
@serialize_timetable.register
def _(self, timetable: CronPartitionTimetable) -> dict[str, Any]:
return {
"expression": timetable.expression,
"timezone": encode_timezone(timetable.timezone),
"run_immediately": encode_run_immediately(timetable.run_immediately),
"run_offset": timetable.run_offset,
"key_format": timetable.key_format,
}
@serialize_timetable.register
def _(self, timetable: DeltaTriggerTimetable) -> dict[str, Any]:
return {
"delta": encode_interval(timetable.delta),
"interval": encode_interval(timetable.interval),
}
@serialize_timetable.register
def _(self, timetable: MultipleCronTriggerTimetable) -> dict[str, Any]:
# All timetables share the same timezone, interval, and run_immediately
# values, so we can just use the first to represent them.
representitive = timetable.timetables[0]
return {
"expressions": [t.expression for t in timetable.timetables],
"timezone": encode_timezone(representitive.timezone),
"interval": encode_interval(representitive.interval),
"run_immediately": encode_run_immediately(representitive.run_immediately),
}
@serialize_timetable.register
def _(self, timetable: AssetOrTimeSchedule) -> dict[str, Any]:
return {
"asset_condition": encode_asset_like(timetable.asset_condition),
"timetable": encode_timetable(timetable.timetable),
}
@serialize_timetable.register
def _(self, timetable: CoreTimetable) -> dict[str, Any]:
return timetable.serialize()
@serialize_timetable.register
def _(self, timetable: PartitionedAssetTimetable) -> dict[str, Any]:
return {
"asset_condition": encode_asset_like(timetable.asset_condition),
"default_partition_mapper": encode_partition_mapper(timetable.default_partition_mapper),
"partition_mapper_config": [
(encode_asset_like(asset), encode_partition_mapper(partition_mapper))
for asset, partition_mapper in timetable.partition_mapper_config.items()
],
}
BUILTIN_PARTITION_MAPPERS: dict[type, str] = {
IdentityMapper: "airflow.partition_mappers.identity.IdentityMapper",
HourlyMapper: "airflow.partition_mappers.temporal.HourlyMapper",
DailyMapper: "airflow.partition_mappers.temporal.DailyMapper",
WeeklyMapper: "airflow.partition_mappers.temporal.WeeklyMapper",
MonthlyMapper: "airflow.partition_mappers.temporal.MonthlyMapper",
QuarterlyMapper: "airflow.partition_mappers.temporal.QuarterlyMapper",
YearlyMapper: "airflow.partition_mappers.temporal.YearlyMapper",
}
@functools.singledispatchmethod
def serialize_partition_mapper(
self, partition_mapper: PartitionMapper | CorePartitionMapper
) -> dict[str, Any]:
if not isinstance(partition_mapper, CorePartitionMapper):
raise NotImplementedError(f"can not serialize timetable {type(partition_mapper).__name__}")
return partition_mapper.serialize()
@serialize_partition_mapper.register
def _(self, partition_mapper: IdentityMapper) -> dict[str, Any]:
return {}
@serialize_partition_mapper.register(HourlyMapper)
@serialize_partition_mapper.register(DailyMapper)
@serialize_partition_mapper.register(WeeklyMapper)
@serialize_partition_mapper.register(MonthlyMapper)
@serialize_partition_mapper.register(QuarterlyMapper)
@serialize_partition_mapper.register(YearlyMapper)
def _(
self,
partition_mapper: HourlyMapper
| DailyMapper
| WeeklyMapper
| MonthlyMapper
| QuarterlyMapper
| YearlyMapper,
) -> dict[str, Any]:
return {
"input_format": partition_mapper.input_format,
"output_format": partition_mapper.output_format,
}
_serializer = _Serializer()
@overload
def coerce_to_core_timetable(obj: BaseTimetable | CoreTimetable) -> CoreTimetable: ...
@overload
def coerce_to_core_timetable(obj: T) -> T: ...
def coerce_to_core_timetable(obj: object) -> object:
"""
Convert *obj* from an SDK timetable to a Core tiemtable instance if possible.
:meta private:
"""
if isinstance(obj, CoreTimetable) or not isinstance(obj, BaseTimetable):
return obj
from airflow.serialization.decoders import decode_timetable
return decode_timetable(encode_timetable(obj))
@overload
def ensure_serialized_asset(obj: Asset | SerializedAsset) -> SerializedAsset: ...
@overload
def ensure_serialized_asset(obj: AssetAlias | SerializedAssetAlias) -> SerializedAssetAlias: ...
@overload
def ensure_serialized_asset(obj: BaseAsset | SerializedAssetBase) -> SerializedAssetBase: ...
def ensure_serialized_asset(obj: BaseAsset | SerializedAssetBase) -> SerializedAssetBase:
"""
Convert *obj* from an SDK asset to a Core asset instance if needed.
:meta private:
"""
if isinstance(obj, SerializedAssetBase):
return obj
from airflow.serialization.decoders import decode_asset_like
return decode_asset_like(encode_asset_like(obj))
def ensure_serialized_deadline_alert(obj: DeadlineAlert | SerializedDeadlineAlert) -> SerializedDeadlineAlert:
"""
Convert *obj* from an SDK deadline alert to a serialized deadline alert if needed.
:meta private:
"""
if isinstance(obj, SerializedDeadlineAlert):
return obj
return decode_deadline_alert(encode_deadline_alert(obj))
def encode_partition_mapper(var: PartitionMapper | CorePartitionMapper) -> dict[str, Any]:
"""
Encode a PartitionMapper instance.
This delegates most of the serialization work to the type, so the behavior
can be completely controlled by a custom subclass.
:meta private:
"""
var_type = type(var)
importable_string = _serializer.BUILTIN_PARTITION_MAPPERS.get(var_type)
if importable_string is not None:
return {
Encoding.TYPE: importable_string,
Encoding.VAR: _serializer.serialize_partition_mapper(var),
}
qn = qualname(var)
if is_core_partition_mapper_import_path(qn) is False:
# This raises if not found.
find_registered_custom_partition_mapper(qn)
return {
Encoding.TYPE: qn,
Encoding.VAR: _serializer.serialize_partition_mapper(var),
}
| {
"repo_id": "apache/airflow",
"file_path": "airflow-core/src/airflow/serialization/encoders.py",
"license": "Apache License 2.0",
"lines": 402,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
apache/airflow:task-sdk/src/airflow/sdk/bases/timetable.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from airflow.sdk.definitions.asset import BaseAsset
class BaseTimetable:
"""Base class inherited by all user-facing timetables."""
can_be_scheduled: bool = True
"""
Whether this timetable can actually schedule runs in an automated manner.
This defaults to and should generally be *True* (including non periodic
execution types like *@once* and data triggered tables), but
``NullTimetable`` sets this to *False*.
"""
active_runs_limit: int | None = None
"""
Maximum active runs that can be active at one time for a DAG.
This is called during DAG initialization, and the return value is used as
the DAG's default ``max_active_runs`` if not set on the DAG explicitly. This
should generally return *None* (no limit), but some timetables may limit
parallelism, such as ``ContinuousTimetable``.
"""
asset_condition: BaseAsset | None = None
# TODO: AIP-76 just add partition-driven field here to differentiate the behavior
def validate(self) -> None:
"""
Validate the timetable is correctly specified.
Override this method to provide run-time validation raised when a DAG
is put into a dagbag. The default implementation does nothing.
:raises: :class:`~airflow.sdk.exceptions.AirflowTimetableInvalid` on validation failure.
"""
| {
"repo_id": "apache/airflow",
"file_path": "task-sdk/src/airflow/sdk/bases/timetable.py",
"license": "Apache License 2.0",
"lines": 46,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | documentation |
apache/airflow:task-sdk/src/airflow/sdk/definitions/timetables/_cron.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING
import attrs
from croniter import CroniterBadCronError, CroniterBadDateError, croniter
from airflow.sdk.exceptions import AirflowTimetableInvalid
if TYPE_CHECKING:
from pendulum.tz.timezone import FixedTimezone, Timezone
@attrs.define
class CronMixin:
"""Mixin to provide interface to work with croniter."""
expression: str
timezone: str | Timezone | FixedTimezone
def validate(self) -> None:
try:
croniter(self.expression)
except (CroniterBadCronError, CroniterBadDateError) as e:
raise AirflowTimetableInvalid(str(e))
| {
"repo_id": "apache/airflow",
"file_path": "task-sdk/src/airflow/sdk/definitions/timetables/_cron.py",
"license": "Apache License 2.0",
"lines": 33,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
apache/airflow:task-sdk/src/airflow/sdk/definitions/timetables/_delta.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import datetime
from typing import TYPE_CHECKING
import attrs
from airflow.sdk.exceptions import AirflowTimetableInvalid
if TYPE_CHECKING:
from dateutil.relativedelta import relativedelta
@attrs.define
class DeltaMixin:
"""Mixin to provide interface to work with timedelta and relativedelta."""
delta: datetime.timedelta | relativedelta
def validate(self) -> None:
now = datetime.datetime.now()
if (now + self.delta) <= now:
raise AirflowTimetableInvalid(f"schedule interval must be positive, not {self.delta!r}")
| {
"repo_id": "apache/airflow",
"file_path": "task-sdk/src/airflow/sdk/definitions/timetables/_delta.py",
"license": "Apache License 2.0",
"lines": 31,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.