sample_id stringlengths 21 196 | text stringlengths 105 936k | metadata dict | category stringclasses 6
values |
|---|---|---|---|
fastapi/fastapi:tests/test_validation_error_context.py | from fastapi import FastAPI, Request, WebSocket
from fastapi.exceptions import (
RequestValidationError,
ResponseValidationError,
WebSocketRequestValidationError,
)
from fastapi.testclient import TestClient
from pydantic import BaseModel
class Item(BaseModel):
id: int
name: str
class ExceptionCapture:
def __init__(self):
self.exception = None
def capture(self, exc):
self.exception = exc
return exc
app = FastAPI()
sub_app = FastAPI()
captured_exception = ExceptionCapture()
app.mount(path="/sub", app=sub_app)
@app.exception_handler(RequestValidationError)
@sub_app.exception_handler(RequestValidationError)
async def request_validation_handler(request: Request, exc: RequestValidationError):
captured_exception.capture(exc)
raise exc
@app.exception_handler(ResponseValidationError)
@sub_app.exception_handler(ResponseValidationError)
async def response_validation_handler(_: Request, exc: ResponseValidationError):
captured_exception.capture(exc)
raise exc
@app.exception_handler(WebSocketRequestValidationError)
@sub_app.exception_handler(WebSocketRequestValidationError)
async def websocket_validation_handler(
websocket: WebSocket, exc: WebSocketRequestValidationError
):
captured_exception.capture(exc)
raise exc
@app.get("/users/{user_id}")
def get_user(user_id: int):
return {"user_id": user_id} # pragma: no cover
@app.get("/items/", response_model=Item)
def get_item():
return {"name": "Widget"}
@sub_app.get("/items/", response_model=Item)
def get_sub_item():
return {"name": "Widget"} # pragma: no cover
@app.websocket("/ws/{item_id}")
async def websocket_endpoint(websocket: WebSocket, item_id: int):
await websocket.accept() # pragma: no cover
await websocket.send_text(f"Item: {item_id}") # pragma: no cover
await websocket.close() # pragma: no cover
@sub_app.websocket("/ws/{item_id}")
async def subapp_websocket_endpoint(websocket: WebSocket, item_id: int):
await websocket.accept() # pragma: no cover
await websocket.send_text(f"Item: {item_id}") # pragma: no cover
await websocket.close() # pragma: no cover
client = TestClient(app)
def test_request_validation_error_includes_endpoint_context():
captured_exception.exception = None
try:
client.get("/users/invalid")
except Exception:
pass
assert captured_exception.exception is not None
error_str = str(captured_exception.exception)
assert "get_user" in error_str
assert "/users/" in error_str
def test_response_validation_error_includes_endpoint_context():
captured_exception.exception = None
try:
client.get("/items/")
except Exception:
pass
assert captured_exception.exception is not None
error_str = str(captured_exception.exception)
assert "get_item" in error_str
assert "/items/" in error_str
def test_websocket_validation_error_includes_endpoint_context():
captured_exception.exception = None
try:
with client.websocket_connect("/ws/invalid"):
pass # pragma: no cover
except Exception:
pass
assert captured_exception.exception is not None
error_str = str(captured_exception.exception)
assert "websocket_endpoint" in error_str
assert "/ws/" in error_str
def test_subapp_request_validation_error_includes_endpoint_context():
captured_exception.exception = None
try:
client.get("/sub/items/")
except Exception:
pass
assert captured_exception.exception is not None
error_str = str(captured_exception.exception)
assert "get_sub_item" in error_str
assert "/sub/items/" in error_str
def test_subapp_websocket_validation_error_includes_endpoint_context():
captured_exception.exception = None
try:
with client.websocket_connect("/sub/ws/invalid"):
pass # pragma: no cover
except Exception:
pass
assert captured_exception.exception is not None
error_str = str(captured_exception.exception)
assert "subapp_websocket_endpoint" in error_str
assert "/sub/ws/" in error_str
def test_validation_error_with_only_path():
errors = [{"type": "missing", "loc": ("body", "name"), "msg": "Field required"}]
exc = RequestValidationError(errors, endpoint_ctx={"path": "GET /api/test"})
error_str = str(exc)
assert "Endpoint: GET /api/test" in error_str
assert 'File "' not in error_str
def test_validation_error_with_no_context():
errors = [{"type": "missing", "loc": ("body", "name"), "msg": "Field required"}]
exc = RequestValidationError(errors, endpoint_ctx={})
error_str = str(exc)
assert "1 validation error:" in error_str
assert "Endpoint" not in error_str
assert 'File "' not in error_str
| {
"repo_id": "fastapi/fastapi",
"file_path": "tests/test_validation_error_context.py",
"license": "MIT License",
"lines": 123,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
fastapi/fastapi:tests/test_security_oauth2_authorization_code_bearer_scopes_openapi_simple.py | # Ref: https://github.com/fastapi/fastapi/issues/14454
from typing import Annotated
from fastapi import Depends, FastAPI, Security
from fastapi.security import OAuth2AuthorizationCodeBearer
from fastapi.testclient import TestClient
from inline_snapshot import snapshot
oauth2_scheme = OAuth2AuthorizationCodeBearer(
authorizationUrl="api/oauth/authorize",
tokenUrl="/api/oauth/token",
scopes={"read": "Read access", "write": "Write access"},
)
async def get_token(token: Annotated[str, Depends(oauth2_scheme)]) -> str:
return token
app = FastAPI(dependencies=[Depends(get_token)])
@app.get("/admin", dependencies=[Security(get_token, scopes=["read", "write"])])
async def read_admin():
return {"message": "Admin Access"}
client = TestClient(app)
def test_read_admin():
response = client.get("/admin", headers={"Authorization": "Bearer faketoken"})
assert response.status_code == 200, response.text
assert response.json() == {"message": "Admin Access"}
def test_openapi_schema():
response = client.get("/openapi.json")
assert response.status_code == 200, response.text
assert response.json() == snapshot(
{
"openapi": "3.1.0",
"info": {"title": "FastAPI", "version": "0.1.0"},
"paths": {
"/admin": {
"get": {
"summary": "Read Admin",
"operationId": "read_admin_admin_get",
"responses": {
"200": {
"description": "Successful Response",
"content": {"application/json": {"schema": {}}},
}
},
"security": [
{"OAuth2AuthorizationCodeBearer": ["read", "write"]}
],
}
}
},
"components": {
"securitySchemes": {
"OAuth2AuthorizationCodeBearer": {
"type": "oauth2",
"flows": {
"authorizationCode": {
"scopes": {
"read": "Read access",
"write": "Write access",
},
"authorizationUrl": "api/oauth/authorize",
"tokenUrl": "/api/oauth/token",
}
},
}
}
},
}
)
| {
"repo_id": "fastapi/fastapi",
"file_path": "tests/test_security_oauth2_authorization_code_bearer_scopes_openapi_simple.py",
"license": "MIT License",
"lines": 65,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
fastapi/fastapi:tests/test_security_oauth2_authorization_code_bearer_scopes_openapi.py | # Ref: https://github.com/fastapi/fastapi/issues/14454
from typing import Annotated
from fastapi import APIRouter, Depends, FastAPI, Security
from fastapi.security import OAuth2AuthorizationCodeBearer
from fastapi.testclient import TestClient
from inline_snapshot import snapshot
oauth2_scheme = OAuth2AuthorizationCodeBearer(
authorizationUrl="authorize",
tokenUrl="token",
auto_error=True,
scopes={"read": "Read access", "write": "Write access"},
)
async def get_token(token: Annotated[str, Depends(oauth2_scheme)]) -> str:
return token
app = FastAPI(dependencies=[Depends(get_token)])
@app.get("/")
async def root():
return {"message": "Hello World"}
@app.get(
"/with-oauth2-scheme",
dependencies=[Security(oauth2_scheme, scopes=["read", "write"])],
)
async def read_with_oauth2_scheme():
return {"message": "Admin Access"}
@app.get(
"/with-get-token", dependencies=[Security(get_token, scopes=["read", "write"])]
)
async def read_with_get_token():
return {"message": "Admin Access"}
router = APIRouter(dependencies=[Security(oauth2_scheme, scopes=["read"])])
@router.get("/items/")
async def read_items(token: str | None = Depends(oauth2_scheme)):
return {"token": token}
@router.post("/items/")
async def create_item(
token: str | None = Security(oauth2_scheme, scopes=["read", "write"]),
):
return {"token": token}
app.include_router(router)
client = TestClient(app)
def test_root():
response = client.get("/", headers={"Authorization": "Bearer testtoken"})
assert response.status_code == 200, response.text
assert response.json() == {"message": "Hello World"}
def test_read_with_oauth2_scheme():
response = client.get(
"/with-oauth2-scheme", headers={"Authorization": "Bearer testtoken"}
)
assert response.status_code == 200, response.text
assert response.json() == {"message": "Admin Access"}
def test_read_with_get_token():
response = client.get(
"/with-get-token", headers={"Authorization": "Bearer testtoken"}
)
assert response.status_code == 200, response.text
assert response.json() == {"message": "Admin Access"}
def test_read_token():
response = client.get("/items/", headers={"Authorization": "Bearer testtoken"})
assert response.status_code == 200, response.text
assert response.json() == {"token": "testtoken"}
def test_create_token():
response = client.post("/items/", headers={"Authorization": "Bearer testtoken"})
assert response.status_code == 200, response.text
assert response.json() == {"token": "testtoken"}
def test_openapi_schema():
response = client.get("/openapi.json")
assert response.status_code == 200, response.text
assert response.json() == snapshot(
{
"openapi": "3.1.0",
"info": {"title": "FastAPI", "version": "0.1.0"},
"paths": {
"/": {
"get": {
"summary": "Root",
"operationId": "root__get",
"responses": {
"200": {
"description": "Successful Response",
"content": {"application/json": {"schema": {}}},
}
},
"security": [{"OAuth2AuthorizationCodeBearer": []}],
}
},
"/with-oauth2-scheme": {
"get": {
"summary": "Read With Oauth2 Scheme",
"operationId": "read_with_oauth2_scheme_with_oauth2_scheme_get",
"responses": {
"200": {
"description": "Successful Response",
"content": {"application/json": {"schema": {}}},
}
},
"security": [
{"OAuth2AuthorizationCodeBearer": ["read", "write"]}
],
}
},
"/with-get-token": {
"get": {
"summary": "Read With Get Token",
"operationId": "read_with_get_token_with_get_token_get",
"responses": {
"200": {
"description": "Successful Response",
"content": {"application/json": {"schema": {}}},
}
},
"security": [
{"OAuth2AuthorizationCodeBearer": ["read", "write"]}
],
}
},
"/items/": {
"get": {
"summary": "Read Items",
"operationId": "read_items_items__get",
"responses": {
"200": {
"description": "Successful Response",
"content": {"application/json": {"schema": {}}},
}
},
"security": [
{"OAuth2AuthorizationCodeBearer": ["read"]},
],
},
"post": {
"summary": "Create Item",
"operationId": "create_item_items__post",
"responses": {
"200": {
"description": "Successful Response",
"content": {"application/json": {"schema": {}}},
}
},
"security": [
{"OAuth2AuthorizationCodeBearer": ["read", "write"]},
],
},
},
},
"components": {
"securitySchemes": {
"OAuth2AuthorizationCodeBearer": {
"type": "oauth2",
"flows": {
"authorizationCode": {
"scopes": {
"read": "Read access",
"write": "Write access",
},
"authorizationUrl": "authorize",
"tokenUrl": "token",
}
},
}
}
},
}
)
| {
"repo_id": "fastapi/fastapi",
"file_path": "tests/test_security_oauth2_authorization_code_bearer_scopes_openapi.py",
"license": "MIT License",
"lines": 163,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
fastapi/fastapi:tests/test_stringified_annotations_simple.py | from __future__ import annotations
from typing import Annotated
from fastapi import Depends, FastAPI, Request
from fastapi.testclient import TestClient
from .utils import needs_py310
class Dep:
def __call__(self, request: Request):
return "test"
@needs_py310
def test_stringified_annotations():
app = FastAPI()
client = TestClient(app)
@app.get("/test/")
def call(test: Annotated[str, Depends(Dep())]):
return {"test": test}
response = client.get("/test")
assert response.status_code == 200
| {
"repo_id": "fastapi/fastapi",
"file_path": "tests/test_stringified_annotations_simple.py",
"license": "MIT License",
"lines": 17,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
fastapi/fastapi:tests/test_dependency_partial.py | from collections.abc import AsyncGenerator, Generator
from functools import partial
from typing import Annotated
import pytest
from fastapi import Depends, FastAPI
from fastapi.testclient import TestClient
app = FastAPI()
def function_dependency(value: str) -> str:
return value
async def async_function_dependency(value: str) -> str:
return value
def gen_dependency(value: str) -> Generator[str, None, None]:
yield value
async def async_gen_dependency(value: str) -> AsyncGenerator[str, None]:
yield value
class CallableDependency:
def __call__(self, value: str) -> str:
return value
class CallableGenDependency:
def __call__(self, value: str) -> Generator[str, None, None]:
yield value
class AsyncCallableDependency:
async def __call__(self, value: str) -> str:
return value
class AsyncCallableGenDependency:
async def __call__(self, value: str) -> AsyncGenerator[str, None]:
yield value
class MethodsDependency:
def synchronous(self, value: str) -> str:
return value
async def asynchronous(self, value: str) -> str:
return value
def synchronous_gen(self, value: str) -> Generator[str, None, None]:
yield value
async def asynchronous_gen(self, value: str) -> AsyncGenerator[str, None]:
yield value
callable_dependency = CallableDependency()
callable_gen_dependency = CallableGenDependency()
async_callable_dependency = AsyncCallableDependency()
async_callable_gen_dependency = AsyncCallableGenDependency()
methods_dependency = MethodsDependency()
@app.get("/partial-function-dependency")
async def get_partial_function_dependency(
value: Annotated[
str, Depends(partial(function_dependency, "partial-function-dependency"))
],
) -> str:
return value
@app.get("/partial-async-function-dependency")
async def get_partial_async_function_dependency(
value: Annotated[
str,
Depends(
partial(async_function_dependency, "partial-async-function-dependency")
),
],
) -> str:
return value
@app.get("/partial-gen-dependency")
async def get_partial_gen_dependency(
value: Annotated[str, Depends(partial(gen_dependency, "partial-gen-dependency"))],
) -> str:
return value
@app.get("/partial-async-gen-dependency")
async def get_partial_async_gen_dependency(
value: Annotated[
str, Depends(partial(async_gen_dependency, "partial-async-gen-dependency"))
],
) -> str:
return value
@app.get("/partial-callable-dependency")
async def get_partial_callable_dependency(
value: Annotated[
str, Depends(partial(callable_dependency, "partial-callable-dependency"))
],
) -> str:
return value
@app.get("/partial-callable-gen-dependency")
async def get_partial_callable_gen_dependency(
value: Annotated[
str,
Depends(partial(callable_gen_dependency, "partial-callable-gen-dependency")),
],
) -> str:
return value
@app.get("/partial-async-callable-dependency")
async def get_partial_async_callable_dependency(
value: Annotated[
str,
Depends(
partial(async_callable_dependency, "partial-async-callable-dependency")
),
],
) -> str:
return value
@app.get("/partial-async-callable-gen-dependency")
async def get_partial_async_callable_gen_dependency(
value: Annotated[
str,
Depends(
partial(
async_callable_gen_dependency, "partial-async-callable-gen-dependency"
)
),
],
) -> str:
return value
@app.get("/partial-synchronous-method-dependency")
async def get_partial_synchronous_method_dependency(
value: Annotated[
str,
Depends(
partial(
methods_dependency.synchronous, "partial-synchronous-method-dependency"
)
),
],
) -> str:
return value
@app.get("/partial-synchronous-method-gen-dependency")
async def get_partial_synchronous_method_gen_dependency(
value: Annotated[
str,
Depends(
partial(
methods_dependency.synchronous_gen,
"partial-synchronous-method-gen-dependency",
)
),
],
) -> str:
return value
@app.get("/partial-asynchronous-method-dependency")
async def get_partial_asynchronous_method_dependency(
value: Annotated[
str,
Depends(
partial(
methods_dependency.asynchronous,
"partial-asynchronous-method-dependency",
)
),
],
) -> str:
return value
@app.get("/partial-asynchronous-method-gen-dependency")
async def get_partial_asynchronous_method_gen_dependency(
value: Annotated[
str,
Depends(
partial(
methods_dependency.asynchronous_gen,
"partial-asynchronous-method-gen-dependency",
)
),
],
) -> str:
return value
client = TestClient(app)
@pytest.mark.parametrize(
"route,value",
[
("/partial-function-dependency", "partial-function-dependency"),
(
"/partial-async-function-dependency",
"partial-async-function-dependency",
),
("/partial-gen-dependency", "partial-gen-dependency"),
("/partial-async-gen-dependency", "partial-async-gen-dependency"),
("/partial-callable-dependency", "partial-callable-dependency"),
("/partial-callable-gen-dependency", "partial-callable-gen-dependency"),
("/partial-async-callable-dependency", "partial-async-callable-dependency"),
(
"/partial-async-callable-gen-dependency",
"partial-async-callable-gen-dependency",
),
(
"/partial-synchronous-method-dependency",
"partial-synchronous-method-dependency",
),
(
"/partial-synchronous-method-gen-dependency",
"partial-synchronous-method-gen-dependency",
),
(
"/partial-asynchronous-method-dependency",
"partial-asynchronous-method-dependency",
),
(
"/partial-asynchronous-method-gen-dependency",
"partial-asynchronous-method-gen-dependency",
),
],
)
def test_dependency_types_with_partial(route: str, value: str) -> None:
response = client.get(route)
assert response.status_code == 200, response.text
assert response.json() == value
| {
"repo_id": "fastapi/fastapi",
"file_path": "tests/test_dependency_partial.py",
"license": "MIT License",
"lines": 198,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
fastapi/fastapi:tests/forward_reference_type.py | from pydantic import BaseModel
def forwardref_method(input: "ForwardRefModel") -> "ForwardRefModel":
return ForwardRefModel(x=input.x + 1)
class ForwardRefModel(BaseModel):
x: int = 0
| {
"repo_id": "fastapi/fastapi",
"file_path": "tests/forward_reference_type.py",
"license": "MIT License",
"lines": 5,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
fastapi/fastapi:tests/test_wrapped_method_forward_reference.py | import functools
from fastapi import FastAPI
from fastapi.testclient import TestClient
from .forward_reference_type import forwardref_method
def passthrough(f):
@functools.wraps(f)
def method(*args, **kwargs):
return f(*args, **kwargs)
return method
def test_wrapped_method_type_inference():
"""
Regression test ensuring that when a method imported from another module
is decorated with something that sets the __wrapped__ attribute (functools.wraps),
then the types are still processed correctly, including dereferencing of forward
references.
"""
app = FastAPI()
client = TestClient(app)
app.post("/endpoint")(passthrough(forwardref_method))
app.post("/endpoint2")(passthrough(passthrough(forwardref_method)))
with client:
response = client.post("/endpoint", json={"input": {"x": 0}})
response2 = client.post("/endpoint2", json={"input": {"x": 0}})
assert response.json() == response2.json() == {"x": 1}
| {
"repo_id": "fastapi/fastapi",
"file_path": "tests/test_wrapped_method_forward_reference.py",
"license": "MIT License",
"lines": 24,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
fastapi/fastapi:tests/test_dependency_wrapped.py | import inspect
import sys
from collections.abc import AsyncGenerator, Generator
from functools import wraps
import pytest
from fastapi import Depends, FastAPI
from fastapi.concurrency import iterate_in_threadpool, run_in_threadpool
from fastapi.testclient import TestClient
if sys.version_info >= (3, 13): # pragma: no cover
from inspect import iscoroutinefunction
else: # pragma: no cover
from asyncio import iscoroutinefunction
def noop_wrap(func):
@wraps(func)
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
return wrapper
def noop_wrap_async(func):
if inspect.isgeneratorfunction(func):
@wraps(func)
async def gen_wrapper(*args, **kwargs):
async for item in iterate_in_threadpool(func(*args, **kwargs)):
yield item
return gen_wrapper
elif inspect.isasyncgenfunction(func):
@wraps(func)
async def async_gen_wrapper(*args, **kwargs):
async for item in func(*args, **kwargs):
yield item
return async_gen_wrapper
@wraps(func)
async def wrapper(*args, **kwargs):
if inspect.isroutine(func) and iscoroutinefunction(func):
return await func(*args, **kwargs)
if inspect.isclass(func):
return await run_in_threadpool(func, *args, **kwargs)
dunder_call = getattr(func, "__call__", None) # noqa: B004
if iscoroutinefunction(dunder_call):
return await dunder_call(*args, **kwargs)
return await run_in_threadpool(func, *args, **kwargs)
return wrapper
class ClassInstanceDep:
def __call__(self):
return True
class_instance_dep = ClassInstanceDep()
wrapped_class_instance_dep = noop_wrap(class_instance_dep)
wrapped_class_instance_dep_async_wrapper = noop_wrap_async(class_instance_dep)
class ClassInstanceGenDep:
def __call__(self):
yield True
class_instance_gen_dep = ClassInstanceGenDep()
wrapped_class_instance_gen_dep = noop_wrap(class_instance_gen_dep)
class ClassInstanceWrappedDep:
@noop_wrap
def __call__(self):
return True
class_instance_wrapped_dep = ClassInstanceWrappedDep()
class ClassInstanceWrappedAsyncDep:
@noop_wrap_async
def __call__(self):
return True
class_instance_wrapped_async_dep = ClassInstanceWrappedAsyncDep()
class ClassInstanceWrappedGenDep:
@noop_wrap
def __call__(self):
yield True
class_instance_wrapped_gen_dep = ClassInstanceWrappedGenDep()
class ClassInstanceWrappedAsyncGenDep:
@noop_wrap_async
def __call__(self):
yield True
class_instance_wrapped_async_gen_dep = ClassInstanceWrappedAsyncGenDep()
class ClassDep:
def __init__(self):
self.value = True
wrapped_class_dep = noop_wrap(ClassDep)
wrapped_class_dep_async_wrapper = noop_wrap_async(ClassDep)
class ClassInstanceAsyncDep:
async def __call__(self):
return True
class_instance_async_dep = ClassInstanceAsyncDep()
wrapped_class_instance_async_dep = noop_wrap(class_instance_async_dep)
wrapped_class_instance_async_dep_async_wrapper = noop_wrap_async(
class_instance_async_dep
)
class ClassInstanceAsyncGenDep:
async def __call__(self):
yield True
class_instance_async_gen_dep = ClassInstanceAsyncGenDep()
wrapped_class_instance_async_gen_dep = noop_wrap(class_instance_async_gen_dep)
class ClassInstanceAsyncWrappedDep:
@noop_wrap
async def __call__(self):
return True
class_instance_async_wrapped_dep = ClassInstanceAsyncWrappedDep()
class ClassInstanceAsyncWrappedAsyncDep:
@noop_wrap_async
async def __call__(self):
return True
class_instance_async_wrapped_async_dep = ClassInstanceAsyncWrappedAsyncDep()
class ClassInstanceAsyncWrappedGenDep:
@noop_wrap
async def __call__(self):
yield True
class_instance_async_wrapped_gen_dep = ClassInstanceAsyncWrappedGenDep()
class ClassInstanceAsyncWrappedGenAsyncDep:
@noop_wrap_async
async def __call__(self):
yield True
class_instance_async_wrapped_gen_async_dep = ClassInstanceAsyncWrappedGenAsyncDep()
app = FastAPI()
# Sync wrapper
@noop_wrap
def wrapped_dependency() -> bool:
return True
@noop_wrap
def wrapped_gen_dependency() -> Generator[bool, None, None]:
yield True
@noop_wrap
async def async_wrapped_dependency() -> bool:
return True
@noop_wrap
async def async_wrapped_gen_dependency() -> AsyncGenerator[bool, None]:
yield True
@app.get("/wrapped-dependency/")
async def get_wrapped_dependency(value: bool = Depends(wrapped_dependency)):
return value
@app.get("/wrapped-gen-dependency/")
async def get_wrapped_gen_dependency(value: bool = Depends(wrapped_gen_dependency)):
return value
@app.get("/async-wrapped-dependency/")
async def get_async_wrapped_dependency(value: bool = Depends(async_wrapped_dependency)):
return value
@app.get("/async-wrapped-gen-dependency/")
async def get_async_wrapped_gen_dependency(
value: bool = Depends(async_wrapped_gen_dependency),
):
return value
@app.get("/wrapped-class-instance-dependency/")
async def get_wrapped_class_instance_dependency(
value: bool = Depends(wrapped_class_instance_dep),
):
return value
@app.get("/wrapped-class-instance-async-dependency/")
async def get_wrapped_class_instance_async_dependency(
value: bool = Depends(wrapped_class_instance_async_dep),
):
return value
@app.get("/wrapped-class-instance-gen-dependency/")
async def get_wrapped_class_instance_gen_dependency(
value: bool = Depends(wrapped_class_instance_gen_dep),
):
return value
@app.get("/wrapped-class-instance-async-gen-dependency/")
async def get_wrapped_class_instance_async_gen_dependency(
value: bool = Depends(wrapped_class_instance_async_gen_dep),
):
return value
@app.get("/class-instance-wrapped-dependency/")
async def get_class_instance_wrapped_dependency(
value: bool = Depends(class_instance_wrapped_dep),
):
return value
@app.get("/class-instance-wrapped-async-dependency/")
async def get_class_instance_wrapped_async_dependency(
value: bool = Depends(class_instance_wrapped_async_dep),
):
return value
@app.get("/class-instance-async-wrapped-dependency/")
async def get_class_instance_async_wrapped_dependency(
value: bool = Depends(class_instance_async_wrapped_dep),
):
return value
@app.get("/class-instance-async-wrapped-async-dependency/")
async def get_class_instance_async_wrapped_async_dependency(
value: bool = Depends(class_instance_async_wrapped_async_dep),
):
return value
@app.get("/class-instance-wrapped-gen-dependency/")
async def get_class_instance_wrapped_gen_dependency(
value: bool = Depends(class_instance_wrapped_gen_dep),
):
return value
@app.get("/class-instance-wrapped-async-gen-dependency/")
async def get_class_instance_wrapped_async_gen_dependency(
value: bool = Depends(class_instance_wrapped_async_gen_dep),
):
return value
@app.get("/class-instance-async-wrapped-gen-dependency/")
async def get_class_instance_async_wrapped_gen_dependency(
value: bool = Depends(class_instance_async_wrapped_gen_dep),
):
return value
@app.get("/class-instance-async-wrapped-gen-async-dependency/")
async def get_class_instance_async_wrapped_gen_async_dependency(
value: bool = Depends(class_instance_async_wrapped_gen_async_dep),
):
return value
@app.get("/wrapped-class-dependency/")
async def get_wrapped_class_dependency(value: ClassDep = Depends(wrapped_class_dep)):
return value.value
@app.get("/wrapped-endpoint/")
@noop_wrap
def get_wrapped_endpoint():
return True
@app.get("/async-wrapped-endpoint/")
@noop_wrap
async def get_async_wrapped_endpoint():
return True
# Async wrapper
@noop_wrap_async
def wrapped_dependency_async_wrapper() -> bool:
return True
@noop_wrap_async
def wrapped_gen_dependency_async_wrapper() -> Generator[bool, None, None]:
yield True
@noop_wrap_async
async def async_wrapped_dependency_async_wrapper() -> bool:
return True
@noop_wrap_async
async def async_wrapped_gen_dependency_async_wrapper() -> AsyncGenerator[bool, None]:
yield True
@app.get("/wrapped-dependency-async-wrapper/")
async def get_wrapped_dependency_async_wrapper(
value: bool = Depends(wrapped_dependency_async_wrapper),
):
return value
@app.get("/wrapped-gen-dependency-async-wrapper/")
async def get_wrapped_gen_dependency_async_wrapper(
value: bool = Depends(wrapped_gen_dependency_async_wrapper),
):
return value
@app.get("/async-wrapped-dependency-async-wrapper/")
async def get_async_wrapped_dependency_async_wrapper(
value: bool = Depends(async_wrapped_dependency_async_wrapper),
):
return value
@app.get("/async-wrapped-gen-dependency-async-wrapper/")
async def get_async_wrapped_gen_dependency_async_wrapper(
value: bool = Depends(async_wrapped_gen_dependency_async_wrapper),
):
return value
@app.get("/wrapped-class-instance-dependency-async-wrapper/")
async def get_wrapped_class_instance_dependency_async_wrapper(
value: bool = Depends(wrapped_class_instance_dep_async_wrapper),
):
return value
@app.get("/wrapped-class-instance-async-dependency-async-wrapper/")
async def get_wrapped_class_instance_async_dependency_async_wrapper(
value: bool = Depends(wrapped_class_instance_async_dep_async_wrapper),
):
return value
@app.get("/wrapped-class-dependency-async-wrapper/")
async def get_wrapped_class_dependency_async_wrapper(
value: ClassDep = Depends(wrapped_class_dep_async_wrapper),
):
return value.value
@app.get("/wrapped-endpoint-async-wrapper/")
@noop_wrap_async
def get_wrapped_endpoint_async_wrapper():
return True
@app.get("/async-wrapped-endpoint-async-wrapper/")
@noop_wrap_async
async def get_async_wrapped_endpoint_async_wrapper():
return True
client = TestClient(app)
@pytest.mark.parametrize(
"route",
[
"/wrapped-dependency/",
"/wrapped-gen-dependency/",
"/async-wrapped-dependency/",
"/async-wrapped-gen-dependency/",
"/wrapped-class-instance-dependency/",
"/wrapped-class-instance-async-dependency/",
"/wrapped-class-instance-gen-dependency/",
"/wrapped-class-instance-async-gen-dependency/",
"/class-instance-wrapped-dependency/",
"/class-instance-wrapped-async-dependency/",
"/class-instance-async-wrapped-dependency/",
"/class-instance-async-wrapped-async-dependency/",
"/class-instance-wrapped-gen-dependency/",
"/class-instance-wrapped-async-gen-dependency/",
"/class-instance-async-wrapped-gen-dependency/",
"/class-instance-async-wrapped-gen-async-dependency/",
"/wrapped-class-dependency/",
"/wrapped-endpoint/",
"/async-wrapped-endpoint/",
"/wrapped-dependency-async-wrapper/",
"/wrapped-gen-dependency-async-wrapper/",
"/async-wrapped-dependency-async-wrapper/",
"/async-wrapped-gen-dependency-async-wrapper/",
"/wrapped-class-instance-dependency-async-wrapper/",
"/wrapped-class-instance-async-dependency-async-wrapper/",
"/wrapped-class-dependency-async-wrapper/",
"/wrapped-endpoint-async-wrapper/",
"/async-wrapped-endpoint-async-wrapper/",
],
)
def test_class_dependency(route):
response = client.get(route)
assert response.status_code == 200, response.text
assert response.json() is True
| {
"repo_id": "fastapi/fastapi",
"file_path": "tests/test_dependency_wrapped.py",
"license": "MIT License",
"lines": 303,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
fastapi/fastapi:tests/test_request_param_model_by_alias.py | from dirty_equals import IsPartialDict
from fastapi import Cookie, FastAPI, Header, Query
from fastapi.testclient import TestClient
from pydantic import BaseModel, Field
app = FastAPI()
class Model(BaseModel):
param: str = Field(alias="param_alias")
@app.get("/query")
async def query_model(data: Model = Query()):
return {"param": data.param}
@app.get("/header")
async def header_model(data: Model = Header()):
return {"param": data.param}
@app.get("/cookie")
async def cookie_model(data: Model = Cookie()):
return {"param": data.param}
def test_query_model_with_alias():
client = TestClient(app)
response = client.get("/query", params={"param_alias": "value"})
assert response.status_code == 200, response.text
assert response.json() == {"param": "value"}
def test_header_model_with_alias():
client = TestClient(app)
response = client.get("/header", headers={"param_alias": "value"})
assert response.status_code == 200, response.text
assert response.json() == {"param": "value"}
def test_cookie_model_with_alias():
client = TestClient(app)
client.cookies.set("param_alias", "value")
response = client.get("/cookie")
assert response.status_code == 200, response.text
assert response.json() == {"param": "value"}
def test_query_model_with_alias_by_name():
client = TestClient(app)
response = client.get("/query", params={"param": "value"})
assert response.status_code == 422, response.text
details = response.json()
assert details["detail"][0]["input"] == {"param": "value"}
def test_header_model_with_alias_by_name():
client = TestClient(app)
response = client.get("/header", headers={"param": "value"})
assert response.status_code == 422, response.text
details = response.json()
assert details["detail"][0]["input"] == IsPartialDict({"param": "value"})
def test_cookie_model_with_alias_by_name():
client = TestClient(app)
client.cookies.set("param", "value")
response = client.get("/cookie")
assert response.status_code == 422, response.text
details = response.json()
assert details["detail"][0]["input"] == {"param": "value"}
| {
"repo_id": "fastapi/fastapi",
"file_path": "tests/test_request_param_model_by_alias.py",
"license": "MIT License",
"lines": 51,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
fastapi/fastapi:tests/test_optional_file_list.py | from fastapi import FastAPI, File
from fastapi.testclient import TestClient
app = FastAPI()
@app.post("/files")
async def upload_files(files: list[bytes] | None = File(None)):
if files is None:
return {"files_count": 0}
return {"files_count": len(files), "sizes": [len(f) for f in files]}
def test_optional_bytes_list():
client = TestClient(app)
response = client.post(
"/files",
files=[("files", b"content1"), ("files", b"content2")],
)
assert response.status_code == 200
assert response.json() == {"files_count": 2, "sizes": [8, 8]}
def test_optional_bytes_list_no_files():
client = TestClient(app)
response = client.post("/files")
assert response.status_code == 200
assert response.json() == {"files_count": 0}
| {
"repo_id": "fastapi/fastapi",
"file_path": "tests/test_optional_file_list.py",
"license": "MIT License",
"lines": 21,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
fastapi/fastapi:tests/test_query_cookie_header_model_extra_params.py | from fastapi import Cookie, FastAPI, Header, Query
from fastapi.testclient import TestClient
from pydantic import BaseModel
app = FastAPI()
class Model(BaseModel):
param: str
model_config = {"extra": "allow"}
@app.get("/query")
async def query_model_with_extra(data: Model = Query()):
return data
@app.get("/header")
async def header_model_with_extra(data: Model = Header()):
return data
@app.get("/cookie")
async def cookies_model_with_extra(data: Model = Cookie()):
return data
def test_query_pass_extra_list():
client = TestClient(app)
resp = client.get(
"/query",
params={
"param": "123",
"param2": ["456", "789"], # Pass a list of values as extra parameter
},
)
assert resp.status_code == 200
assert resp.json() == {
"param": "123",
"param2": ["456", "789"],
}
def test_query_pass_extra_single():
client = TestClient(app)
resp = client.get(
"/query",
params={
"param": "123",
"param2": "456",
},
)
assert resp.status_code == 200
assert resp.json() == {
"param": "123",
"param2": "456",
}
def test_header_pass_extra_list():
client = TestClient(app)
resp = client.get(
"/header",
headers=[
("param", "123"),
("param2", "456"), # Pass a list of values as extra parameter
("param2", "789"),
],
)
assert resp.status_code == 200
resp_json = resp.json()
assert "param2" in resp_json
assert resp_json["param2"] == ["456", "789"]
def test_header_pass_extra_single():
client = TestClient(app)
resp = client.get(
"/header",
headers=[
("param", "123"),
("param2", "456"),
],
)
assert resp.status_code == 200
resp_json = resp.json()
assert "param2" in resp_json
assert resp_json["param2"] == "456"
def test_cookie_pass_extra_list():
client = TestClient(app)
client.cookies = [
("param", "123"),
("param2", "456"), # Pass a list of values as extra parameter
("param2", "789"),
]
resp = client.get("/cookie")
assert resp.status_code == 200
resp_json = resp.json()
assert "param2" in resp_json
assert resp_json["param2"] == "789" # Cookies only keep the last value
| {
"repo_id": "fastapi/fastapi",
"file_path": "tests/test_query_cookie_header_model_extra_params.py",
"license": "MIT License",
"lines": 83,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
fastapi/fastapi:tests/test_form_default.py | from typing import Annotated
from fastapi import FastAPI, File, Form
from starlette.testclient import TestClient
app = FastAPI()
@app.post("/urlencoded")
async def post_url_encoded(age: Annotated[int | None, Form()] = None):
return age
@app.post("/multipart")
async def post_multi_part(
age: Annotated[int | None, Form()] = None,
file: Annotated[bytes | None, File()] = None,
):
return {"file": file, "age": age}
client = TestClient(app)
def test_form_default_url_encoded():
response = client.post("/urlencoded", data={"age": ""})
assert response.status_code == 200
assert response.text == "null"
def test_form_default_multi_part():
response = client.post("/multipart", data={"age": ""})
assert response.status_code == 200
assert response.json() == {"file": None, "age": None}
| {
"repo_id": "fastapi/fastapi",
"file_path": "tests/test_form_default.py",
"license": "MIT License",
"lines": 22,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
fastapi/fastapi:tests/test_schema_compat_pydantic_v2.py | import pytest
from fastapi import FastAPI
from fastapi.testclient import TestClient
from inline_snapshot import snapshot
from pydantic import BaseModel
from tests.utils import needs_py310
@pytest.fixture(name="client")
def get_client():
from enum import Enum
app = FastAPI()
class PlatformRole(str, Enum):
admin = "admin"
user = "user"
class OtherRole(str, Enum): ...
class User(BaseModel):
username: str
role: PlatformRole | OtherRole
@app.get("/users")
async def get_user() -> User:
return {"username": "alice", "role": "admin"}
client = TestClient(app)
return client
@needs_py310
def test_get(client: TestClient):
response = client.get("/users")
assert response.json() == {"username": "alice", "role": "admin"}
@needs_py310
def test_openapi_schema(client: TestClient):
response = client.get("openapi.json")
assert response.json() == snapshot(
{
"openapi": "3.1.0",
"info": {"title": "FastAPI", "version": "0.1.0"},
"paths": {
"/users": {
"get": {
"summary": "Get User",
"operationId": "get_user_users_get",
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {"$ref": "#/components/schemas/User"}
}
},
}
},
}
}
},
"components": {
"schemas": {
"PlatformRole": {
"type": "string",
"enum": ["admin", "user"],
"title": "PlatformRole",
},
"User": {
"properties": {
"username": {"type": "string", "title": "Username"},
"role": {
"anyOf": [
{"$ref": "#/components/schemas/PlatformRole"},
{"enum": [], "title": "OtherRole"},
],
"title": "Role",
},
},
"type": "object",
"required": ["username", "role"],
"title": "User",
},
}
},
}
)
| {
"repo_id": "fastapi/fastapi",
"file_path": "tests/test_schema_compat_pydantic_v2.py",
"license": "MIT License",
"lines": 77,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
fastapi/fastapi:tests/test_security_scopes.py | from typing import Annotated
import pytest
from fastapi import Depends, FastAPI, Security
from fastapi.testclient import TestClient
@pytest.fixture(name="call_counter")
def call_counter_fixture():
return {"count": 0}
@pytest.fixture(name="app")
def app_fixture(call_counter: dict[str, int]):
def get_db():
call_counter["count"] += 1
return f"db_{call_counter['count']}"
def get_user(db: Annotated[str, Depends(get_db)]):
return "user"
app = FastAPI()
@app.get("/")
def endpoint(
db: Annotated[str, Depends(get_db)],
user: Annotated[str, Security(get_user, scopes=["read"])],
):
return {"db": db}
return app
@pytest.fixture(name="client")
def client_fixture(app: FastAPI):
return TestClient(app)
def test_security_scopes_dependency_called_once(
client: TestClient, call_counter: dict[str, int]
):
response = client.get("/")
assert response.status_code == 200
assert call_counter["count"] == 1
| {
"repo_id": "fastapi/fastapi",
"file_path": "tests/test_security_scopes.py",
"license": "MIT License",
"lines": 31,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
fastapi/fastapi:tests/test_security_scopes_sub_dependency.py | # Ref: https://github.com/fastapi/fastapi/discussions/6024#discussioncomment-8541913
from typing import Annotated
import pytest
from fastapi import Depends, FastAPI, Security
from fastapi.security import SecurityScopes
from fastapi.testclient import TestClient
@pytest.fixture(name="call_counts")
def call_counts_fixture():
return {
"get_db_session": 0,
"get_current_user": 0,
"get_user_me": 0,
"get_user_items": 0,
}
@pytest.fixture(name="app")
def app_fixture(call_counts: dict[str, int]):
def get_db_session():
call_counts["get_db_session"] += 1
return f"db_session_{call_counts['get_db_session']}"
def get_current_user(
security_scopes: SecurityScopes,
db_session: Annotated[str, Depends(get_db_session)],
):
call_counts["get_current_user"] += 1
return {
"user": f"user_{call_counts['get_current_user']}",
"scopes": security_scopes.scopes,
"db_session": db_session,
}
def get_user_me(
current_user: Annotated[dict, Security(get_current_user, scopes=["me"])],
):
call_counts["get_user_me"] += 1
return {
"user_me": f"user_me_{call_counts['get_user_me']}",
"current_user": current_user,
}
def get_user_items(
user_me: Annotated[dict, Depends(get_user_me)],
):
call_counts["get_user_items"] += 1
return {
"user_items": f"user_items_{call_counts['get_user_items']}",
"user_me": user_me,
}
app = FastAPI()
@app.get("/")
def path_operation(
user_me: Annotated[dict, Depends(get_user_me)],
user_items: Annotated[dict, Security(get_user_items, scopes=["items"])],
):
return {
"user_me": user_me,
"user_items": user_items,
}
return app
@pytest.fixture(name="client")
def client_fixture(app: FastAPI):
return TestClient(app)
def test_security_scopes_sub_dependency_caching(
client: TestClient, call_counts: dict[str, int]
):
response = client.get("/")
assert response.status_code == 200
assert call_counts["get_db_session"] == 1
assert call_counts["get_current_user"] == 2
assert call_counts["get_user_me"] == 2
assert call_counts["get_user_items"] == 1
assert response.json() == {
"user_me": {
"user_me": "user_me_1",
"current_user": {
"user": "user_1",
"scopes": ["me"],
"db_session": "db_session_1",
},
},
"user_items": {
"user_items": "user_items_1",
"user_me": {
"user_me": "user_me_2",
"current_user": {
"user": "user_2",
"scopes": ["items", "me"],
"db_session": "db_session_1",
},
},
},
}
| {
"repo_id": "fastapi/fastapi",
"file_path": "tests/test_security_scopes_sub_dependency.py",
"license": "MIT License",
"lines": 89,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
fastapi/fastapi:tests/test_security_scopes_dont_propagate.py | # Ref: https://github.com/tiangolo/fastapi/issues/5623
from typing import Annotated, Any
from fastapi import FastAPI, Security
from fastapi.security import SecurityScopes
from fastapi.testclient import TestClient
async def security1(scopes: SecurityScopes):
return scopes.scopes
async def security2(scopes: SecurityScopes):
return scopes.scopes
async def dep3(
dep1: Annotated[list[str], Security(security1, scopes=["scope1"])],
dep2: Annotated[list[str], Security(security2, scopes=["scope2"])],
):
return {"dep1": dep1, "dep2": dep2}
app = FastAPI()
@app.get("/scopes")
def get_scopes(
dep3: Annotated[dict[str, Any], Security(dep3, scopes=["scope3"])],
):
return dep3
client = TestClient(app)
def test_security_scopes_dont_propagate():
response = client.get("/scopes")
assert response.status_code == 200
assert response.json() == {
"dep1": ["scope3", "scope1"],
"dep2": ["scope3", "scope2"],
}
| {
"repo_id": "fastapi/fastapi",
"file_path": "tests/test_security_scopes_dont_propagate.py",
"license": "MIT License",
"lines": 28,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
fastapi/fastapi:tests/test_tutorial/test_authentication_error_status_code/test_tutorial001.py | import importlib
import pytest
from fastapi.testclient import TestClient
from inline_snapshot import snapshot
@pytest.fixture(
name="client",
params=[
"tutorial001_an_py310",
],
)
def get_client(request: pytest.FixtureRequest):
mod = importlib.import_module(
f"docs_src.authentication_error_status_code.{request.param}"
)
client = TestClient(mod.app)
return client
def test_get_me(client: TestClient):
response = client.get("/me", headers={"Authorization": "Bearer secrettoken"})
assert response.status_code == 200
assert response.json() == {
"message": "You are authenticated",
"token": "secrettoken",
}
def test_get_me_no_credentials(client: TestClient):
response = client.get("/me")
assert response.status_code == 403
assert response.json() == {"detail": "Not authenticated"}
def test_openapi_schema(client: TestClient):
response = client.get("/openapi.json")
assert response.status_code == 200, response.text
assert response.json() == snapshot(
{
"openapi": "3.1.0",
"info": {"title": "FastAPI", "version": "0.1.0"},
"paths": {
"/me": {
"get": {
"summary": "Read Me",
"operationId": "read_me_me_get",
"responses": {
"200": {
"description": "Successful Response",
"content": {"application/json": {"schema": {}}},
}
},
"security": [{"HTTPBearer403": []}],
}
}
},
"components": {
"securitySchemes": {
"HTTPBearer403": {"type": "http", "scheme": "bearer"}
}
},
}
)
| {
"repo_id": "fastapi/fastapi",
"file_path": "tests/test_tutorial/test_authentication_error_status_code/test_tutorial001.py",
"license": "MIT License",
"lines": 56,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
fastapi/fastapi:tests/test_depends_hashable.py | # This is more or less a workaround to make Depends and Security hashable
# as other tools that use them depend on that
# Ref: https://github.com/fastapi/fastapi/pull/14320
from fastapi import Depends, Security
def dep():
pass
def test_depends_hashable():
dep() # just for coverage
d1 = Depends(dep)
d2 = Depends(dep)
d3 = Depends(dep, scope="function")
d4 = Depends(dep, scope="function")
s1 = Security(dep)
s2 = Security(dep)
assert hash(d1) == hash(d2)
assert hash(s1) == hash(s2)
assert hash(d1) != hash(d3)
assert hash(d3) == hash(d4)
| {
"repo_id": "fastapi/fastapi",
"file_path": "tests/test_depends_hashable.py",
"license": "MIT License",
"lines": 18,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
fastapi/fastapi:tests/test_schema_ref_pydantic_v2.py | from typing import Any
import pytest
from fastapi import FastAPI
from fastapi.testclient import TestClient
from inline_snapshot import snapshot
from pydantic import BaseModel, ConfigDict, Field
@pytest.fixture(name="client")
def get_client():
app = FastAPI()
class ModelWithRef(BaseModel):
ref: str = Field(validation_alias="$ref", serialization_alias="$ref")
model_config = ConfigDict(validate_by_alias=True, serialize_by_alias=True)
@app.get("/", response_model=ModelWithRef)
async def read_root() -> Any:
return {"$ref": "some-ref"}
client = TestClient(app)
return client
def test_get(client: TestClient):
response = client.get("/")
assert response.json() == {"$ref": "some-ref"}
def test_openapi_schema(client: TestClient):
response = client.get("openapi.json")
assert response.json() == snapshot(
{
"openapi": "3.1.0",
"info": {"title": "FastAPI", "version": "0.1.0"},
"paths": {
"/": {
"get": {
"summary": "Read Root",
"operationId": "read_root__get",
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ModelWithRef"
}
}
},
}
},
}
}
},
"components": {
"schemas": {
"ModelWithRef": {
"properties": {"$ref": {"type": "string", "title": "$Ref"}},
"type": "object",
"required": ["$ref"],
"title": "ModelWithRef",
}
}
},
}
)
| {
"repo_id": "fastapi/fastapi",
"file_path": "tests/test_schema_ref_pydantic_v2.py",
"license": "MIT License",
"lines": 58,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
fastapi/fastapi:tests/test_dependency_yield_scope.py | import json
from typing import Annotated, Any
import pytest
from fastapi import APIRouter, Depends, FastAPI, HTTPException
from fastapi.exceptions import FastAPIError
from fastapi.responses import StreamingResponse
from fastapi.testclient import TestClient
class Session:
def __init__(self) -> None:
self.open = True
def dep_session() -> Any:
s = Session()
yield s
s.open = False
def raise_after_yield() -> Any:
yield
raise HTTPException(status_code=503, detail="Exception after yield")
SessionFuncDep = Annotated[Session, Depends(dep_session, scope="function")]
SessionRequestDep = Annotated[Session, Depends(dep_session, scope="request")]
SessionDefaultDep = Annotated[Session, Depends(dep_session)]
class NamedSession:
def __init__(self, name: str = "default") -> None:
self.name = name
self.open = True
def get_named_session(session: SessionRequestDep, session_b: SessionDefaultDep) -> Any:
assert session is session_b
named_session = NamedSession(name="named")
yield named_session, session_b
named_session.open = False
NamedSessionsDep = Annotated[tuple[NamedSession, Session], Depends(get_named_session)]
def get_named_func_session(session: SessionFuncDep) -> Any:
named_session = NamedSession(name="named")
yield named_session, session
named_session.open = False
def get_named_regular_func_session(session: SessionFuncDep) -> Any:
named_session = NamedSession(name="named")
return named_session, session
BrokenSessionsDep = Annotated[
tuple[NamedSession, Session], Depends(get_named_func_session)
]
NamedSessionsFuncDep = Annotated[
tuple[NamedSession, Session], Depends(get_named_func_session, scope="function")
]
RegularSessionsDep = Annotated[
tuple[NamedSession, Session], Depends(get_named_regular_func_session)
]
app = FastAPI()
router = APIRouter()
@router.get("/")
def get_index():
return {"status": "ok"}
@app.get("/function-scope")
def function_scope(session: SessionFuncDep) -> Any:
def iter_data():
yield json.dumps({"is_open": session.open})
return StreamingResponse(iter_data())
@app.get("/request-scope")
def request_scope(session: SessionRequestDep) -> Any:
def iter_data():
yield json.dumps({"is_open": session.open})
return StreamingResponse(iter_data())
@app.get("/two-scopes")
def get_stream_session(
function_session: SessionFuncDep, request_session: SessionRequestDep
) -> Any:
def iter_data():
yield json.dumps(
{"func_is_open": function_session.open, "req_is_open": request_session.open}
)
return StreamingResponse(iter_data())
@app.get("/sub")
def get_sub(sessions: NamedSessionsDep) -> Any:
def iter_data():
yield json.dumps(
{"named_session_open": sessions[0].open, "session_open": sessions[1].open}
)
return StreamingResponse(iter_data())
@app.get("/named-function-scope")
def get_named_function_scope(sessions: NamedSessionsFuncDep) -> Any:
def iter_data():
yield json.dumps(
{"named_session_open": sessions[0].open, "session_open": sessions[1].open}
)
return StreamingResponse(iter_data())
@app.get("/regular-function-scope")
def get_regular_function_scope(sessions: RegularSessionsDep) -> Any:
def iter_data():
yield json.dumps(
{"named_session_open": sessions[0].open, "session_open": sessions[1].open}
)
return StreamingResponse(iter_data())
app.include_router(
prefix="/router-scope-function",
router=router,
dependencies=[Depends(raise_after_yield, scope="function")],
)
app.include_router(
prefix="/router-scope-request",
router=router,
dependencies=[Depends(raise_after_yield, scope="request")],
)
client = TestClient(app)
def test_function_scope() -> None:
response = client.get("/function-scope")
assert response.status_code == 200
data = response.json()
assert data["is_open"] is False
def test_request_scope() -> None:
response = client.get("/request-scope")
assert response.status_code == 200
data = response.json()
assert data["is_open"] is True
def test_two_scopes() -> None:
response = client.get("/two-scopes")
assert response.status_code == 200
data = response.json()
assert data["func_is_open"] is False
assert data["req_is_open"] is True
def test_sub() -> None:
response = client.get("/sub")
assert response.status_code == 200
data = response.json()
assert data["named_session_open"] is True
assert data["session_open"] is True
def test_broken_scope() -> None:
with pytest.raises(
FastAPIError,
match='The dependency "get_named_func_session" has a scope of "request", it cannot depend on dependencies with scope "function"',
):
@app.get("/broken-scope")
def get_broken(sessions: BrokenSessionsDep) -> Any: # pragma: no cover
pass
def test_named_function_scope() -> None:
response = client.get("/named-function-scope")
assert response.status_code == 200
data = response.json()
assert data["named_session_open"] is False
assert data["session_open"] is False
def test_regular_function_scope() -> None:
response = client.get("/regular-function-scope")
assert response.status_code == 200
data = response.json()
assert data["named_session_open"] is True
assert data["session_open"] is False
def test_router_level_dep_scope_function() -> None:
response = client.get("/router-scope-function/")
assert response.status_code == 503
assert response.json() == {"detail": "Exception after yield"}
def test_router_level_dep_scope_request() -> None:
with TestClient(app, raise_server_exceptions=False) as client:
response = client.get("/router-scope-request/")
assert response.status_code == 200
assert response.json() == {"status": "ok"}
def test_app_level_dep_scope_function() -> None:
app = FastAPI(dependencies=[Depends(raise_after_yield, scope="function")])
@app.get("/app-scope-function")
def get_app_scope_function():
return {"status": "ok"}
with TestClient(app) as client:
response = client.get("/app-scope-function")
assert response.status_code == 503
assert response.json() == {"detail": "Exception after yield"}
def test_app_level_dep_scope_request() -> None:
app = FastAPI(dependencies=[Depends(raise_after_yield, scope="request")])
@app.get("/app-scope-request")
def get_app_scope_request():
return {"status": "ok"}
with TestClient(app, raise_server_exceptions=False) as client:
response = client.get("/app-scope-request")
assert response.status_code == 200
assert response.json() == {"status": "ok"}
| {
"repo_id": "fastapi/fastapi",
"file_path": "tests/test_dependency_yield_scope.py",
"license": "MIT License",
"lines": 171,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
fastapi/fastapi:tests/test_dependency_yield_scope_websockets.py | from contextvars import ContextVar
from typing import Annotated, Any
import pytest
from fastapi import Depends, FastAPI, WebSocket
from fastapi.exceptions import FastAPIError
from fastapi.testclient import TestClient
global_context: ContextVar[dict[str, Any]] = ContextVar("global_context", default={}) # noqa: B039
class Session:
def __init__(self) -> None:
self.open = True
async def dep_session() -> Any:
s = Session()
yield s
s.open = False
global_state = global_context.get()
global_state["session_closed"] = True
SessionFuncDep = Annotated[Session, Depends(dep_session, scope="function")]
SessionRequestDep = Annotated[Session, Depends(dep_session, scope="request")]
SessionDefaultDep = Annotated[Session, Depends(dep_session)]
class NamedSession:
def __init__(self, name: str = "default") -> None:
self.name = name
self.open = True
def get_named_session(session: SessionRequestDep, session_b: SessionDefaultDep) -> Any:
assert session is session_b
named_session = NamedSession(name="named")
yield named_session, session_b
named_session.open = False
global_state = global_context.get()
global_state["named_session_closed"] = True
NamedSessionsDep = Annotated[tuple[NamedSession, Session], Depends(get_named_session)]
def get_named_func_session(session: SessionFuncDep) -> Any:
named_session = NamedSession(name="named")
yield named_session, session
named_session.open = False
global_state = global_context.get()
global_state["named_func_session_closed"] = True
def get_named_regular_func_session(session: SessionFuncDep) -> Any:
named_session = NamedSession(name="named")
return named_session, session
BrokenSessionsDep = Annotated[
tuple[NamedSession, Session], Depends(get_named_func_session)
]
NamedSessionsFuncDep = Annotated[
tuple[NamedSession, Session], Depends(get_named_func_session, scope="function")
]
RegularSessionsDep = Annotated[
tuple[NamedSession, Session], Depends(get_named_regular_func_session)
]
app = FastAPI()
@app.websocket("/function-scope")
async def function_scope(websocket: WebSocket, session: SessionFuncDep) -> Any:
await websocket.accept()
await websocket.send_json({"is_open": session.open})
@app.websocket("/request-scope")
async def request_scope(websocket: WebSocket, session: SessionRequestDep) -> Any:
await websocket.accept()
await websocket.send_json({"is_open": session.open})
@app.websocket("/two-scopes")
async def get_stream_session(
websocket: WebSocket,
function_session: SessionFuncDep,
request_session: SessionRequestDep,
) -> Any:
await websocket.accept()
await websocket.send_json(
{"func_is_open": function_session.open, "req_is_open": request_session.open}
)
@app.websocket("/sub")
async def get_sub(websocket: WebSocket, sessions: NamedSessionsDep) -> Any:
await websocket.accept()
await websocket.send_json(
{"named_session_open": sessions[0].open, "session_open": sessions[1].open}
)
@app.websocket("/named-function-scope")
async def get_named_function_scope(
websocket: WebSocket, sessions: NamedSessionsFuncDep
) -> Any:
await websocket.accept()
await websocket.send_json(
{"named_session_open": sessions[0].open, "session_open": sessions[1].open}
)
@app.websocket("/regular-function-scope")
async def get_regular_function_scope(
websocket: WebSocket, sessions: RegularSessionsDep
) -> Any:
await websocket.accept()
await websocket.send_json(
{"named_session_open": sessions[0].open, "session_open": sessions[1].open}
)
client = TestClient(app)
def test_function_scope() -> None:
global_context.set({})
global_state = global_context.get()
with client.websocket_connect("/function-scope") as websocket:
data = websocket.receive_json()
assert data["is_open"] is True
assert global_state["session_closed"] is True
def test_request_scope() -> None:
global_context.set({})
global_state = global_context.get()
with client.websocket_connect("/request-scope") as websocket:
data = websocket.receive_json()
assert data["is_open"] is True
assert global_state["session_closed"] is True
def test_two_scopes() -> None:
global_context.set({})
global_state = global_context.get()
with client.websocket_connect("/two-scopes") as websocket:
data = websocket.receive_json()
assert data["func_is_open"] is True
assert data["req_is_open"] is True
assert global_state["session_closed"] is True
def test_sub() -> None:
global_context.set({})
global_state = global_context.get()
with client.websocket_connect("/sub") as websocket:
data = websocket.receive_json()
assert data["named_session_open"] is True
assert data["session_open"] is True
assert global_state["session_closed"] is True
assert global_state["named_session_closed"] is True
def test_broken_scope() -> None:
with pytest.raises(
FastAPIError,
match='The dependency "get_named_func_session" has a scope of "request", it cannot depend on dependencies with scope "function"',
):
@app.websocket("/broken-scope")
async def get_broken(
websocket: WebSocket, sessions: BrokenSessionsDep
) -> Any: # pragma: no cover
pass
def test_named_function_scope() -> None:
global_context.set({})
global_state = global_context.get()
with client.websocket_connect("/named-function-scope") as websocket:
data = websocket.receive_json()
assert data["named_session_open"] is True
assert data["session_open"] is True
assert global_state["session_closed"] is True
assert global_state["named_func_session_closed"] is True
def test_regular_function_scope() -> None:
global_context.set({})
global_state = global_context.get()
with client.websocket_connect("/regular-function-scope") as websocket:
data = websocket.receive_json()
assert data["named_session_open"] is True
assert data["session_open"] is True
assert global_state["session_closed"] is True
| {
"repo_id": "fastapi/fastapi",
"file_path": "tests/test_dependency_yield_scope_websockets.py",
"license": "MIT License",
"lines": 149,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
fastapi/fastapi:tests/test_tutorial/test_dependencies/test_tutorial008e.py | import importlib
import pytest
from fastapi.testclient import TestClient
@pytest.fixture(
name="client",
params=[
pytest.param("tutorial008e_py310"),
pytest.param("tutorial008e_an_py310"),
],
)
def get_client(request: pytest.FixtureRequest):
mod = importlib.import_module(f"docs_src.dependencies.{request.param}")
client = TestClient(mod.app)
return client
def test_get_users_me(client: TestClient):
response = client.get("/users/me")
assert response.status_code == 200, response.text
assert response.json() == "Rick"
| {
"repo_id": "fastapi/fastapi",
"file_path": "tests/test_tutorial/test_dependencies/test_tutorial008e.py",
"license": "MIT License",
"lines": 18,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
fastapi/fastapi:tests/test_top_level_security_scheme_in_openapi.py | # Test security scheme at the top level, including OpenAPI
# Ref: https://github.com/fastapi/fastapi/discussions/14263
# Ref: https://github.com/fastapi/fastapi/issues/14271
from fastapi import Depends, FastAPI
from fastapi.security import HTTPBearer
from fastapi.testclient import TestClient
from inline_snapshot import snapshot
app = FastAPI()
bearer_scheme = HTTPBearer()
@app.get("/", dependencies=[Depends(bearer_scheme)])
async def get_root():
return {"message": "Hello, World!"}
client = TestClient(app)
def test_get_root():
response = client.get("/", headers={"Authorization": "Bearer token"})
assert response.status_code == 200, response.text
assert response.json() == {"message": "Hello, World!"}
def test_get_root_no_token():
response = client.get("/")
assert response.status_code == 401, response.text
assert response.json() == {"detail": "Not authenticated"}
def test_openapi_schema():
response = client.get("/openapi.json")
assert response.status_code == 200, response.text
assert response.json() == snapshot(
{
"openapi": "3.1.0",
"info": {"title": "FastAPI", "version": "0.1.0"},
"paths": {
"/": {
"get": {
"summary": "Get Root",
"operationId": "get_root__get",
"responses": {
"200": {
"description": "Successful Response",
"content": {"application/json": {"schema": {}}},
}
},
"security": [{"HTTPBearer": []}],
}
}
},
"components": {
"securitySchemes": {"HTTPBearer": {"type": "http", "scheme": "bearer"}}
},
}
)
| {
"repo_id": "fastapi/fastapi",
"file_path": "tests/test_top_level_security_scheme_in_openapi.py",
"license": "MIT License",
"lines": 48,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
fastapi/fastapi:tests/test_dependency_paramless.py | from typing import Annotated
from fastapi import FastAPI, HTTPException, Security
from fastapi.security import (
OAuth2PasswordBearer,
SecurityScopes,
)
from fastapi.testclient import TestClient
app = FastAPI()
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="token")
def process_auth(
credentials: Annotated[str | None, Security(oauth2_scheme)],
security_scopes: SecurityScopes,
):
# This is an incorrect way of using it, this is not checking if the scopes are
# provided by the token, only if the endpoint is requesting them, but the test
# here is just to check if FastAPI is indeed registering and passing the scopes
# correctly when using Security with parameterless dependencies.
if "a" not in security_scopes.scopes or "b" not in security_scopes.scopes:
raise HTTPException(detail="a or b not in scopes", status_code=401)
return {"token": credentials, "scopes": security_scopes.scopes}
@app.get("/get-credentials")
def get_credentials(
credentials: Annotated[dict, Security(process_auth, scopes=["a", "b"])],
):
return credentials
@app.get(
"/parameterless-with-scopes",
dependencies=[Security(process_auth, scopes=["a", "b"])],
)
def get_parameterless_with_scopes():
return {"status": "ok"}
@app.get(
"/parameterless-without-scopes",
dependencies=[Security(process_auth)],
)
def get_parameterless_without_scopes():
return {"status": "ok"}
client = TestClient(app)
def test_get_credentials():
response = client.get("/get-credentials", headers={"authorization": "Bearer token"})
assert response.status_code == 200, response.text
assert response.json() == {"token": "token", "scopes": ["a", "b"]}
def test_parameterless_with_scopes():
response = client.get(
"/parameterless-with-scopes", headers={"authorization": "Bearer token"}
)
assert response.status_code == 200, response.text
assert response.json() == {"status": "ok"}
def test_parameterless_without_scopes():
response = client.get(
"/parameterless-without-scopes", headers={"authorization": "Bearer token"}
)
assert response.status_code == 401, response.text
assert response.json() == {"detail": "a or b not in scopes"}
def test_call_get_parameterless_without_scopes_for_coverage():
assert get_parameterless_without_scopes() == {"status": "ok"}
| {
"repo_id": "fastapi/fastapi",
"file_path": "tests/test_dependency_paramless.py",
"license": "MIT License",
"lines": 56,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
fastapi/fastapi:tests/test_no_schema_split.py | # Test with parts from, and to verify the report in:
# https://github.com/fastapi/fastapi/discussions/14177
# Made an issue in:
# https://github.com/fastapi/fastapi/issues/14247
from enum import Enum
from fastapi import FastAPI
from fastapi.testclient import TestClient
from inline_snapshot import snapshot
from pydantic import BaseModel, Field
class MessageEventType(str, Enum):
alpha = "alpha"
beta = "beta"
class MessageEvent(BaseModel):
event_type: MessageEventType = Field(default=MessageEventType.alpha)
output: str
class MessageOutput(BaseModel):
body: str = ""
events: list[MessageEvent] = []
class Message(BaseModel):
input: str
output: MessageOutput
app = FastAPI(title="Minimal FastAPI App", version="1.0.0")
@app.post("/messages", response_model=Message)
async def create_message(input_message: str) -> Message:
return Message(
input=input_message,
output=MessageOutput(body=f"Processed: {input_message}"),
)
client = TestClient(app)
def test_create_message():
response = client.post("/messages", params={"input_message": "Hello"})
assert response.status_code == 200, response.text
assert response.json() == {
"input": "Hello",
"output": {"body": "Processed: Hello", "events": []},
}
def test_openapi_schema():
response = client.get("/openapi.json")
assert response.status_code == 200, response.text
assert response.json() == snapshot(
{
"openapi": "3.1.0",
"info": {"title": "Minimal FastAPI App", "version": "1.0.0"},
"paths": {
"/messages": {
"post": {
"summary": "Create Message",
"operationId": "create_message_messages_post",
"parameters": [
{
"name": "input_message",
"in": "query",
"required": True,
"schema": {"type": "string", "title": "Input Message"},
}
],
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/Message"
}
}
},
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
},
},
},
}
}
},
"components": {
"schemas": {
"HTTPValidationError": {
"properties": {
"detail": {
"items": {
"$ref": "#/components/schemas/ValidationError"
},
"type": "array",
"title": "Detail",
}
},
"type": "object",
"title": "HTTPValidationError",
},
"Message": {
"properties": {
"input": {"type": "string", "title": "Input"},
"output": {"$ref": "#/components/schemas/MessageOutput"},
},
"type": "object",
"required": ["input", "output"],
"title": "Message",
},
"MessageEvent": {
"properties": {
"event_type": {
"$ref": "#/components/schemas/MessageEventType",
"default": "alpha",
},
"output": {"type": "string", "title": "Output"},
},
"type": "object",
"required": ["output"],
"title": "MessageEvent",
},
"MessageEventType": {
"type": "string",
"enum": ["alpha", "beta"],
"title": "MessageEventType",
},
"MessageOutput": {
"properties": {
"body": {"type": "string", "title": "Body", "default": ""},
"events": {
"items": {"$ref": "#/components/schemas/MessageEvent"},
"type": "array",
"title": "Events",
"default": [],
},
},
"type": "object",
"title": "MessageOutput",
},
"ValidationError": {
"properties": {
"ctx": {"title": "Context", "type": "object"},
"input": {"title": "Input"},
"loc": {
"items": {
"anyOf": [{"type": "string"}, {"type": "integer"}]
},
"type": "array",
"title": "Location",
},
"msg": {"type": "string", "title": "Message"},
"type": {"type": "string", "title": "Error Type"},
},
"type": "object",
"required": ["loc", "msg", "type"],
"title": "ValidationError",
},
}
},
}
)
| {
"repo_id": "fastapi/fastapi",
"file_path": "tests/test_no_schema_split.py",
"license": "MIT License",
"lines": 157,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
fastapi/fastapi:docs_src/pydantic_v1_in_v2/tutorial001_an_py310.py | from pydantic.v1 import BaseModel
class Item(BaseModel):
name: str
description: str | None = None
size: float
| {
"repo_id": "fastapi/fastapi",
"file_path": "docs_src/pydantic_v1_in_v2/tutorial001_an_py310.py",
"license": "MIT License",
"lines": 5,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
fastapi/fastapi:docs_src/pydantic_v1_in_v2/tutorial002_an_py310.py | from fastapi import FastAPI
from pydantic.v1 import BaseModel
class Item(BaseModel):
name: str
description: str | None = None
size: float
app = FastAPI()
@app.post("/items/")
async def create_item(item: Item) -> Item:
return item
| {
"repo_id": "fastapi/fastapi",
"file_path": "docs_src/pydantic_v1_in_v2/tutorial002_an_py310.py",
"license": "MIT License",
"lines": 10,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
fastapi/fastapi:docs_src/pydantic_v1_in_v2/tutorial003_an_py310.py | from fastapi import FastAPI
from pydantic import BaseModel as BaseModelV2
from pydantic.v1 import BaseModel
class Item(BaseModel):
name: str
description: str | None = None
size: float
class ItemV2(BaseModelV2):
name: str
description: str | None = None
size: float
app = FastAPI()
@app.post("/items/", response_model=ItemV2)
async def create_item(item: Item):
return item
| {
"repo_id": "fastapi/fastapi",
"file_path": "docs_src/pydantic_v1_in_v2/tutorial003_an_py310.py",
"license": "MIT License",
"lines": 15,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
fastapi/fastapi:docs_src/pydantic_v1_in_v2/tutorial004_an_py310.py | from typing import Annotated
from fastapi import FastAPI
from fastapi.temp_pydantic_v1_params import Body
from pydantic.v1 import BaseModel
class Item(BaseModel):
name: str
description: str | None = None
size: float
app = FastAPI()
@app.post("/items/")
async def create_item(item: Annotated[Item, Body(embed=True)]) -> Item:
return item
| {
"repo_id": "fastapi/fastapi",
"file_path": "docs_src/pydantic_v1_in_v2/tutorial004_an_py310.py",
"license": "MIT License",
"lines": 12,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
fastapi/fastapi:tests/test_union_body_discriminator.py | from typing import Annotated, Any, Literal
from fastapi import FastAPI
from fastapi.testclient import TestClient
from inline_snapshot import snapshot
from pydantic import BaseModel, Field
def test_discriminator_pydantic_v2() -> None:
from pydantic import Tag
app = FastAPI()
class FirstItem(BaseModel):
value: Literal["first"]
price: int
class OtherItem(BaseModel):
value: Literal["other"]
price: float
Item = Annotated[
Annotated[FirstItem, Tag("first")] | Annotated[OtherItem, Tag("other")],
Field(discriminator="value"),
]
@app.post("/items/")
def save_union_body_discriminator(
item: Item, q: Annotated[str, Field(description="Query string")]
) -> dict[str, Any]:
return {"item": item}
client = TestClient(app)
response = client.post("/items/?q=first", json={"value": "first", "price": 100})
assert response.status_code == 200, response.text
assert response.json() == {"item": {"value": "first", "price": 100}}
response = client.post("/items/?q=other", json={"value": "other", "price": 100.5})
assert response.status_code == 200, response.text
assert response.json() == {"item": {"value": "other", "price": 100.5}}
response = client.get("/openapi.json")
assert response.status_code == 200, response.text
assert response.json() == snapshot(
{
"openapi": "3.1.0",
"info": {"title": "FastAPI", "version": "0.1.0"},
"paths": {
"/items/": {
"post": {
"summary": "Save Union Body Discriminator",
"operationId": "save_union_body_discriminator_items__post",
"parameters": [
{
"name": "q",
"in": "query",
"required": True,
"schema": {
"type": "string",
"description": "Query string",
"title": "Q",
},
}
],
"requestBody": {
"required": True,
"content": {
"application/json": {
"schema": {
"oneOf": [
{"$ref": "#/components/schemas/FirstItem"},
{"$ref": "#/components/schemas/OtherItem"},
],
"discriminator": {
"propertyName": "value",
"mapping": {
"first": "#/components/schemas/FirstItem",
"other": "#/components/schemas/OtherItem",
},
},
"title": "Item",
}
}
},
},
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {
"type": "object",
"additionalProperties": True,
"title": "Response Save Union Body Discriminator Items Post",
}
}
},
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
},
},
},
}
}
},
"components": {
"schemas": {
"FirstItem": {
"properties": {
"value": {
"type": "string",
"const": "first",
"title": "Value",
},
"price": {"type": "integer", "title": "Price"},
},
"type": "object",
"required": ["value", "price"],
"title": "FirstItem",
},
"HTTPValidationError": {
"properties": {
"detail": {
"items": {
"$ref": "#/components/schemas/ValidationError"
},
"type": "array",
"title": "Detail",
}
},
"type": "object",
"title": "HTTPValidationError",
},
"OtherItem": {
"properties": {
"value": {
"type": "string",
"const": "other",
"title": "Value",
},
"price": {"type": "number", "title": "Price"},
},
"type": "object",
"required": ["value", "price"],
"title": "OtherItem",
},
"ValidationError": {
"properties": {
"ctx": {"title": "Context", "type": "object"},
"input": {"title": "Input"},
"loc": {
"items": {
"anyOf": [{"type": "string"}, {"type": "integer"}]
},
"type": "array",
"title": "Location",
},
"msg": {"type": "string", "title": "Message"},
"type": {"type": "string", "title": "Error Type"},
},
"type": "object",
"required": ["loc", "msg", "type"],
"title": "ValidationError",
},
}
},
}
)
| {
"repo_id": "fastapi/fastapi",
"file_path": "tests/test_union_body_discriminator.py",
"license": "MIT License",
"lines": 164,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
fastapi/fastapi:docs_src/dependencies/tutorial013_an_py310.py | import time
from typing import Annotated
from fastapi import Depends, FastAPI, HTTPException
from fastapi.responses import StreamingResponse
from sqlmodel import Field, Session, SQLModel, create_engine
engine = create_engine("postgresql+psycopg://postgres:postgres@localhost/db")
class User(SQLModel, table=True):
id: int | None = Field(default=None, primary_key=True)
name: str
app = FastAPI()
def get_session():
with Session(engine) as session:
yield session
def get_user(user_id: int, session: Annotated[Session, Depends(get_session)]):
user = session.get(User, user_id)
if not user:
raise HTTPException(status_code=403, detail="Not authorized")
def generate_stream(query: str):
for ch in query:
yield ch
time.sleep(0.1)
@app.get("/generate", dependencies=[Depends(get_user)])
def generate(query: str):
return StreamingResponse(content=generate_stream(query))
| {
"repo_id": "fastapi/fastapi",
"file_path": "docs_src/dependencies/tutorial013_an_py310.py",
"license": "MIT License",
"lines": 24,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
fastapi/fastapi:docs_src/dependencies/tutorial014_an_py310.py | import time
from typing import Annotated
from fastapi import Depends, FastAPI, HTTPException
from fastapi.responses import StreamingResponse
from sqlmodel import Field, Session, SQLModel, create_engine
engine = create_engine("postgresql+psycopg://postgres:postgres@localhost/db")
class User(SQLModel, table=True):
id: int | None = Field(default=None, primary_key=True)
name: str
app = FastAPI()
def get_session():
with Session(engine) as session:
yield session
def get_user(user_id: int, session: Annotated[Session, Depends(get_session)]):
user = session.get(User, user_id)
if not user:
raise HTTPException(status_code=403, detail="Not authorized")
session.close()
def generate_stream(query: str):
for ch in query:
yield ch
time.sleep(0.1)
@app.get("/generate", dependencies=[Depends(get_user)])
def generate(query: str):
return StreamingResponse(content=generate_stream(query))
| {
"repo_id": "fastapi/fastapi",
"file_path": "docs_src/dependencies/tutorial014_an_py310.py",
"license": "MIT License",
"lines": 25,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
fastapi/fastapi:tests/test_dependency_after_yield_raise.py | from typing import Annotated, Any
import pytest
from fastapi import Depends, FastAPI, HTTPException
from fastapi.testclient import TestClient
class CustomError(Exception):
pass
def catching_dep() -> Any:
try:
yield "s"
except CustomError as err:
raise HTTPException(status_code=418, detail="Session error") from err
def broken_dep() -> Any:
yield "s"
raise ValueError("Broken after yield")
app = FastAPI()
@app.get("/catching")
def catching(d: Annotated[str, Depends(catching_dep)]) -> Any:
raise CustomError("Simulated error during streaming")
@app.get("/broken")
def broken(d: Annotated[str, Depends(broken_dep)]) -> Any:
return {"message": "all good?"}
client = TestClient(app)
def test_catching():
response = client.get("/catching")
assert response.status_code == 418
assert response.json() == {"detail": "Session error"}
def test_broken_raise():
with pytest.raises(ValueError, match="Broken after yield"):
client.get("/broken")
def test_broken_no_raise():
"""
When a dependency with yield raises after the yield (not in an except), the
response is already "successfully" sent back to the client, but there's still
an error in the server afterwards, an exception is raised and captured or shown
in the server logs.
"""
with TestClient(app, raise_server_exceptions=False) as client:
response = client.get("/broken")
assert response.status_code == 200
assert response.json() == {"message": "all good?"}
def test_broken_return_finishes():
client = TestClient(app, raise_server_exceptions=False)
response = client.get("/broken")
assert response.status_code == 200
assert response.json() == {"message": "all good?"}
| {
"repo_id": "fastapi/fastapi",
"file_path": "tests/test_dependency_after_yield_raise.py",
"license": "MIT License",
"lines": 45,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
fastapi/fastapi:tests/test_dependency_after_yield_streaming.py | from collections.abc import Generator
from contextlib import contextmanager
from typing import Annotated, Any
import pytest
from fastapi import Depends, FastAPI
from fastapi.responses import StreamingResponse
from fastapi.testclient import TestClient
class Session:
def __init__(self) -> None:
self.data = ["foo", "bar", "baz"]
self.open = True
def __iter__(self) -> Generator[str, None, None]:
for item in self.data:
if self.open:
yield item
else:
raise ValueError("Session closed")
@contextmanager
def acquire_session() -> Generator[Session, None, None]:
session = Session()
try:
yield session
finally:
session.open = False
def dep_session() -> Any:
with acquire_session() as s:
yield s
def broken_dep_session() -> Any:
with acquire_session() as s:
s.open = False
yield s
SessionDep = Annotated[Session, Depends(dep_session)]
BrokenSessionDep = Annotated[Session, Depends(broken_dep_session)]
app = FastAPI()
@app.get("/data")
def get_data(session: SessionDep) -> Any:
data = list(session)
return data
@app.get("/stream-simple")
def get_stream_simple(session: SessionDep) -> Any:
def iter_data():
yield from ["x", "y", "z"]
return StreamingResponse(iter_data())
@app.get("/stream-session")
def get_stream_session(session: SessionDep) -> Any:
def iter_data():
yield from session
return StreamingResponse(iter_data())
@app.get("/broken-session-data")
def get_broken_session_data(session: BrokenSessionDep) -> Any:
return list(session)
@app.get("/broken-session-stream")
def get_broken_session_stream(session: BrokenSessionDep) -> Any:
def iter_data():
yield from session
return StreamingResponse(iter_data())
client = TestClient(app)
def test_regular_no_stream():
response = client.get("/data")
assert response.json() == ["foo", "bar", "baz"]
def test_stream_simple():
response = client.get("/stream-simple")
assert response.text == "xyz"
def test_stream_session():
response = client.get("/stream-session")
assert response.text == "foobarbaz"
def test_broken_session_data():
with pytest.raises(ValueError, match="Session closed"):
client.get("/broken-session-data")
def test_broken_session_data_no_raise():
client = TestClient(app, raise_server_exceptions=False)
response = client.get("/broken-session-data")
assert response.status_code == 500
assert response.text == "Internal Server Error"
def test_broken_session_stream_raise():
# Can raise ValueError on Pydantic v2 and ExceptionGroup on Pydantic v1
with pytest.raises((ValueError, Exception)):
client.get("/broken-session-stream")
def test_broken_session_stream_no_raise():
"""
When a dependency with yield raises after the streaming response already started
the 200 status code is already sent, but there's still an error in the server
afterwards, an exception is raised and captured or shown in the server logs.
"""
with TestClient(app, raise_server_exceptions=False) as client:
response = client.get("/broken-session-stream")
assert response.status_code == 200
assert response.text == ""
| {
"repo_id": "fastapi/fastapi",
"file_path": "tests/test_dependency_after_yield_streaming.py",
"license": "MIT License",
"lines": 88,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
fastapi/fastapi:tests/test_dependency_after_yield_websockets.py | from collections.abc import Generator
from contextlib import contextmanager
from typing import Annotated, Any
import pytest
from fastapi import Depends, FastAPI, WebSocket
from fastapi.testclient import TestClient
class Session:
def __init__(self) -> None:
self.data = ["foo", "bar", "baz"]
self.open = True
def __iter__(self) -> Generator[str, None, None]:
for item in self.data:
if self.open:
yield item
else:
raise ValueError("Session closed")
@contextmanager
def acquire_session() -> Generator[Session, None, None]:
session = Session()
try:
yield session
finally:
session.open = False
def dep_session() -> Any:
with acquire_session() as s:
yield s
def broken_dep_session() -> Any:
with acquire_session() as s:
s.open = False
yield s
SessionDep = Annotated[Session, Depends(dep_session)]
BrokenSessionDep = Annotated[Session, Depends(broken_dep_session)]
app = FastAPI()
@app.websocket("/ws")
async def websocket_endpoint(websocket: WebSocket, session: SessionDep):
await websocket.accept()
for item in session:
await websocket.send_text(f"{item}")
@app.websocket("/ws-broken")
async def websocket_endpoint_broken(websocket: WebSocket, session: BrokenSessionDep):
await websocket.accept()
for item in session:
await websocket.send_text(f"{item}") # pragma no cover
client = TestClient(app)
def test_websocket_dependency_after_yield():
with client.websocket_connect("/ws") as websocket:
data = websocket.receive_text()
assert data == "foo"
data = websocket.receive_text()
assert data == "bar"
data = websocket.receive_text()
assert data == "baz"
def test_websocket_dependency_after_yield_broken():
with pytest.raises(ValueError, match="Session closed"):
with client.websocket_connect("/ws-broken"):
pass # pragma no cover
| {
"repo_id": "fastapi/fastapi",
"file_path": "tests/test_dependency_after_yield_websockets.py",
"license": "MIT License",
"lines": 56,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
fastapi/fastapi:tests/test_file_and_form_order_issue_9116.py | """
Regression test, Error 422 if Form is declared before File
See https://github.com/tiangolo/fastapi/discussions/9116
"""
from pathlib import Path
from typing import Annotated
import pytest
from fastapi import FastAPI, File, Form
from fastapi.testclient import TestClient
app = FastAPI()
@app.post("/file_before_form")
def file_before_form(
file: bytes = File(),
city: str = Form(),
):
return {"file_content": file, "city": city}
@app.post("/file_after_form")
def file_after_form(
city: str = Form(),
file: bytes = File(),
):
return {"file_content": file, "city": city}
@app.post("/file_list_before_form")
def file_list_before_form(
files: Annotated[list[bytes], File()],
city: Annotated[str, Form()],
):
return {"file_contents": files, "city": city}
@app.post("/file_list_after_form")
def file_list_after_form(
city: Annotated[str, Form()],
files: Annotated[list[bytes], File()],
):
return {"file_contents": files, "city": city}
client = TestClient(app)
@pytest.fixture
def tmp_file_1(tmp_path: Path) -> Path:
f = tmp_path / "example1.txt"
f.write_text("foo")
return f
@pytest.fixture
def tmp_file_2(tmp_path: Path) -> Path:
f = tmp_path / "example2.txt"
f.write_text("bar")
return f
@pytest.mark.parametrize("endpoint_path", ("/file_before_form", "/file_after_form"))
def test_file_form_order(endpoint_path: str, tmp_file_1: Path):
response = client.post(
url=endpoint_path,
data={"city": "Thimphou"},
files={"file": (tmp_file_1.name, tmp_file_1.read_bytes())},
)
assert response.status_code == 200, response.text
assert response.json() == {"file_content": "foo", "city": "Thimphou"}
@pytest.mark.parametrize(
"endpoint_path", ("/file_list_before_form", "/file_list_after_form")
)
def test_file_list_form_order(endpoint_path: str, tmp_file_1: Path, tmp_file_2: Path):
response = client.post(
url=endpoint_path,
data={"city": "Thimphou"},
files=(
("files", (tmp_file_1.name, tmp_file_1.read_bytes())),
("files", (tmp_file_2.name, tmp_file_2.read_bytes())),
),
)
assert response.status_code == 200, response.text
assert response.json() == {"file_contents": ["foo", "bar"], "city": "Thimphou"}
| {
"repo_id": "fastapi/fastapi",
"file_path": "tests/test_file_and_form_order_issue_9116.py",
"license": "MIT License",
"lines": 68,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
fastapi/fastapi:tests/test_response_model_default_factory.py | from fastapi import FastAPI
from fastapi.testclient import TestClient
from pydantic import BaseModel, Field
app = FastAPI()
class ResponseModel(BaseModel):
code: int = 200
message: str = Field(default_factory=lambda: "Successful operation.")
@app.get(
"/response_model_has_default_factory_return_dict",
response_model=ResponseModel,
)
async def response_model_has_default_factory_return_dict():
return {"code": 200}
@app.get(
"/response_model_has_default_factory_return_model",
response_model=ResponseModel,
)
async def response_model_has_default_factory_return_model():
return ResponseModel()
client = TestClient(app)
def test_response_model_has_default_factory_return_dict():
response = client.get("/response_model_has_default_factory_return_dict")
assert response.status_code == 200, response.text
assert response.json()["code"] == 200
assert response.json()["message"] == "Successful operation."
def test_response_model_has_default_factory_return_model():
response = client.get("/response_model_has_default_factory_return_model")
assert response.status_code == 200, response.text
assert response.json()["code"] == 200
assert response.json()["message"] == "Successful operation."
| {
"repo_id": "fastapi/fastapi",
"file_path": "tests/test_response_model_default_factory.py",
"license": "MIT License",
"lines": 30,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
fastapi/fastapi:tests/test_return_none_stringified_annotations.py | import http
from fastapi import FastAPI
from fastapi.testclient import TestClient
def test_no_content():
app = FastAPI()
@app.get("/no-content", status_code=http.HTTPStatus.NO_CONTENT)
def return_no_content() -> "None":
return
client = TestClient(app)
response = client.get("/no-content")
assert response.status_code == http.HTTPStatus.NO_CONTENT, response.text
assert not response.content
| {
"repo_id": "fastapi/fastapi",
"file_path": "tests/test_return_none_stringified_annotations.py",
"license": "MIT License",
"lines": 12,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
fastapi/fastapi:tests/test_get_model_definitions_formfeed_escape.py | import pytest
from fastapi import FastAPI
from fastapi.testclient import TestClient
from inline_snapshot import snapshot
@pytest.fixture(name="client")
def client_fixture() -> TestClient:
from pydantic import BaseModel
class Address(BaseModel):
"""
This is a public description of an Address
\f
You can't see this part of the docstring, it's private!
"""
line_1: str
city: str
state_province: str
class Facility(BaseModel):
id: str
address: Address
app = FastAPI()
@app.get("/facilities/{facility_id}")
def get_facility(facility_id: str) -> Facility:
return Facility(
id=facility_id,
address=Address(line_1="123 Main St", city="Anytown", state_province="CA"),
)
client = TestClient(app)
return client
def test_get(client: TestClient):
response = client.get("/facilities/42")
assert response.status_code == 200, response.text
assert response.json() == {
"id": "42",
"address": {
"line_1": "123 Main St",
"city": "Anytown",
"state_province": "CA",
},
}
def test_openapi_schema(client: TestClient):
"""
Sanity check to ensure our app's openapi schema renders as we expect
"""
response = client.get("/openapi.json")
assert response.status_code == 200, response.text
assert response.json() == snapshot(
{
"components": {
"schemas": {
"Address": {
# NOTE: the description of this model shows only the public-facing text, before the `\f` in docstring
"description": "This is a public description of an Address\n",
"properties": {
"city": {"title": "City", "type": "string"},
"line_1": {"title": "Line 1", "type": "string"},
"state_province": {
"title": "State Province",
"type": "string",
},
},
"required": ["line_1", "city", "state_province"],
"title": "Address",
"type": "object",
},
"Facility": {
"properties": {
"address": {"$ref": "#/components/schemas/Address"},
"id": {"title": "Id", "type": "string"},
},
"required": ["id", "address"],
"title": "Facility",
"type": "object",
},
"HTTPValidationError": {
"properties": {
"detail": {
"items": {
"$ref": "#/components/schemas/ValidationError"
},
"title": "Detail",
"type": "array",
}
},
"title": "HTTPValidationError",
"type": "object",
},
"ValidationError": {
"properties": {
"ctx": {"title": "Context", "type": "object"},
"input": {"title": "Input"},
"loc": {
"items": {
"anyOf": [{"type": "string"}, {"type": "integer"}]
},
"title": "Location",
"type": "array",
},
"msg": {"title": "Message", "type": "string"},
"type": {"title": "Error Type", "type": "string"},
},
"required": ["loc", "msg", "type"],
"title": "ValidationError",
"type": "object",
},
}
},
"info": {"title": "FastAPI", "version": "0.1.0"},
"openapi": "3.1.0",
"paths": {
"/facilities/{facility_id}": {
"get": {
"operationId": "get_facility_facilities__facility_id__get",
"parameters": [
{
"in": "path",
"name": "facility_id",
"required": True,
"schema": {"title": "Facility Id", "type": "string"},
}
],
"responses": {
"200": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/Facility"
}
}
},
"description": "Successful Response",
},
"422": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
},
"description": "Validation Error",
},
},
"summary": "Get Facility",
}
}
},
}
)
| {
"repo_id": "fastapi/fastapi",
"file_path": "tests/test_get_model_definitions_formfeed_escape.py",
"license": "MIT License",
"lines": 148,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
fastapi/fastapi:tests/test_openapi_schema_type.py | import pytest
from fastapi.openapi.models import Schema, SchemaType
@pytest.mark.parametrize(
"type_value",
[
"array",
["string", "null"],
None,
],
)
def test_allowed_schema_type(
type_value: SchemaType | list[SchemaType] | None,
) -> None:
"""Test that Schema accepts SchemaType, List[SchemaType] and None for type field."""
schema = Schema(type=type_value)
assert schema.type == type_value
def test_invalid_type_value() -> None:
"""Test that Schema raises ValueError for invalid type values."""
with pytest.raises(ValueError, match="2 validation errors for Schema"):
Schema(type=True) # type: ignore[arg-type]
| {
"repo_id": "fastapi/fastapi",
"file_path": "tests/test_openapi_schema_type.py",
"license": "MIT License",
"lines": 20,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
fastapi/fastapi:tests/test_tutorial/test_behind_a_proxy/test_tutorial001_01.py | from fastapi.testclient import TestClient
from docs_src.behind_a_proxy.tutorial001_01_py310 import app
client = TestClient(
app,
base_url="https://example.com",
follow_redirects=False,
)
def test_redirect() -> None:
response = client.get("/items")
assert response.status_code == 307
assert response.headers["location"] == "https://example.com/items/"
def test_no_redirect() -> None:
response = client.get("/items/")
assert response.status_code == 200
assert response.json() == ["plumbus", "portal gun"]
| {
"repo_id": "fastapi/fastapi",
"file_path": "tests/test_tutorial/test_behind_a_proxy/test_tutorial001_01.py",
"license": "MIT License",
"lines": 15,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
fastapi/fastapi:tests/test_union_forms.py | from typing import Annotated
from fastapi import FastAPI, Form
from fastapi.testclient import TestClient
from inline_snapshot import snapshot
from pydantic import BaseModel
app = FastAPI()
class UserForm(BaseModel):
name: str
email: str
class CompanyForm(BaseModel):
company_name: str
industry: str
@app.post("/form-union/")
def post_union_form(data: Annotated[UserForm | CompanyForm, Form()]):
return {"received": data}
client = TestClient(app)
def test_post_user_form():
response = client.post(
"/form-union/", data={"name": "John Doe", "email": "john@example.com"}
)
assert response.status_code == 200, response.text
assert response.json() == {
"received": {"name": "John Doe", "email": "john@example.com"}
}
def test_post_company_form():
response = client.post(
"/form-union/", data={"company_name": "Tech Corp", "industry": "Technology"}
)
assert response.status_code == 200, response.text
assert response.json() == {
"received": {"company_name": "Tech Corp", "industry": "Technology"}
}
def test_invalid_form_data():
response = client.post(
"/form-union/",
data={"name": "John", "company_name": "Tech Corp"},
)
assert response.status_code == 422, response.text
def test_empty_form():
response = client.post("/form-union/")
assert response.status_code == 422, response.text
def test_openapi_schema():
response = client.get("/openapi.json")
assert response.status_code == 200, response.text
assert response.json() == snapshot(
{
"openapi": "3.1.0",
"info": {"title": "FastAPI", "version": "0.1.0"},
"paths": {
"/form-union/": {
"post": {
"summary": "Post Union Form",
"operationId": "post_union_form_form_union__post",
"requestBody": {
"content": {
"application/x-www-form-urlencoded": {
"schema": {
"anyOf": [
{"$ref": "#/components/schemas/UserForm"},
{
"$ref": "#/components/schemas/CompanyForm"
},
],
"title": "Data",
}
}
},
"required": True,
},
"responses": {
"200": {
"description": "Successful Response",
"content": {"application/json": {"schema": {}}},
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
},
},
},
}
}
},
"components": {
"schemas": {
"CompanyForm": {
"properties": {
"company_name": {"type": "string", "title": "Company Name"},
"industry": {"type": "string", "title": "Industry"},
},
"type": "object",
"required": ["company_name", "industry"],
"title": "CompanyForm",
},
"HTTPValidationError": {
"properties": {
"detail": {
"items": {
"$ref": "#/components/schemas/ValidationError"
},
"type": "array",
"title": "Detail",
}
},
"type": "object",
"title": "HTTPValidationError",
},
"UserForm": {
"properties": {
"name": {"type": "string", "title": "Name"},
"email": {"type": "string", "title": "Email"},
},
"type": "object",
"required": ["name", "email"],
"title": "UserForm",
},
"ValidationError": {
"properties": {
"ctx": {"title": "Context", "type": "object"},
"input": {"title": "Input"},
"loc": {
"items": {
"anyOf": [{"type": "string"}, {"type": "integer"}]
},
"type": "array",
"title": "Location",
},
"msg": {"type": "string", "title": "Message"},
"type": {"type": "string", "title": "Error Type"},
},
"type": "object",
"required": ["loc", "msg", "type"],
"title": "ValidationError",
},
}
},
}
)
| {
"repo_id": "fastapi/fastapi",
"file_path": "tests/test_union_forms.py",
"license": "MIT License",
"lines": 143,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
fastapi/fastapi:tests/test_openapi_model_description_trim_on_formfeed.py | from fastapi import FastAPI
from fastapi.testclient import TestClient
from pydantic import BaseModel
app = FastAPI()
class MyModel(BaseModel):
"""
A model with a form feed character in the title.
\f
Text after form feed character.
"""
@app.get("/foo")
def foo(v: MyModel): # pragma: no cover
pass
client = TestClient(app)
def test_openapi():
response = client.get("/openapi.json")
assert response.status_code == 200, response.text
openapi_schema = response.json()
assert openapi_schema["components"]["schemas"]["MyModel"]["description"] == (
"A model with a form feed character in the title.\n"
)
| {
"repo_id": "fastapi/fastapi",
"file_path": "tests/test_openapi_model_description_trim_on_formfeed.py",
"license": "MIT License",
"lines": 21,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
fishaudio/fish-speech:fish_speech/content_sequence.py | from dataclasses import dataclass, field
from typing import List, Literal, Union
import numpy as np
import torch
from fish_speech.tokenizer import (
IM_END_TOKEN,
MODALITY_TOKENS,
FishTokenizer,
)
def restore_ndarray(obj, to_tensor: bool = False):
if isinstance(obj, dict) and "__ndarray__" in obj:
obj = np.frombuffer(obj["data"], dtype=obj["dtype"]).reshape(obj["shape"])
if to_tensor and isinstance(obj, np.ndarray):
obj = torch.from_numpy(obj.copy())
return obj
@dataclass
class BasePart:
type: Literal["text", "vq", "audio"] | None = None
cal_loss: bool = False
@dataclass(kw_only=True)
class VQPart(BasePart):
type = "vq"
codes: torch.Tensor
def __post_init__(self: "VQPart"):
self.type = "vq"
self.codes = restore_ndarray(self.codes, to_tensor=True)
@dataclass(kw_only=True)
class TextPart(BasePart):
type = "text"
text: str | None = None
tokens: list[int] | None = None
def __post_init__(self: "TextPart"):
self.type = "text"
if self.text is None and self.tokens is None:
raise ValueError("Either text or tokens must be provided")
@dataclass(kw_only=True)
class AudioPart(BasePart):
type = "audio"
features: torch.Tensor
def __post_init__(self: "AudioPart"):
self.type = "audio"
self.features = restore_ndarray(self.features, to_tensor=True)
@dataclass(kw_only=True)
class EncodedMessage:
tokens: torch.Tensor
labels: torch.Tensor
vq_mask_tokens: torch.Tensor | None = None
vq_mask_labels: torch.Tensor | None = None
vq_parts: list[torch.Tensor]
vq_require_losses: torch.Tensor | None = None
audio_parts: list[torch.Tensor]
audio_masks: torch.Tensor | None = None
metadata: dict | None = None
@dataclass
class ContentSequence:
"""
Flexible sequence of content parts that supports interleaved multimodal format.
Example format: <|interleave|><|speaker:1|> TEXT AUDIO <|im_end|><|speaker:2|> TEXT AUDIO <|im_end|>
"""
parts: list[BasePart] = field(default_factory=list)
modality: Literal["text", "voice", "interleave"] | None = None
metadata: dict | None = None
def __init__(
self: "ContentSequence",
parts: list[BasePart | dict] | None = None,
modality: Literal["text", "voice", "interleave"] | None = None,
metadata: dict | None = None,
):
self.modality = modality
self.metadata = metadata or {}
fixed_parts = []
for part in parts or []:
if isinstance(part, dict):
if part["type"] == "vq":
part = VQPart(**part)
elif part["type"] == "audio":
part = AudioPart(**part)
elif part["type"] == "text":
part = TextPart(**part)
else:
raise ValueError(f"Unsupported part type: {part['type']}")
fixed_parts.append(part)
self.parts = fixed_parts
# If modality is specified, add it at the beginning if it's not already there
if self.modality and not (
len(self.parts) > 0
and isinstance(self.parts[0], dict) is False
and isinstance(self.parts[0], TextPart)
and self.parts[0].text is not None
and self.parts[0].text.startswith(MODALITY_TOKENS[self.modality])
):
modality_token = MODALITY_TOKENS[self.modality]
self.parts.insert(0, TextPart(text=modality_token))
def append(
self: "ContentSequence",
part_or_parts: Union[BasePart, List[BasePart]],
add_end: bool = False,
speaker: Union[str, int] | None = None,
):
"""
Append a part or list of parts to the sequence.
Args:
part_or_parts: A single part or list of parts to add
add_end: Whether to add the IM_END_TOKEN after these parts
speaker: Optional speaker identifier (name or ID) to add before the parts
"""
# Convert single part to list
parts_to_add = (
[part_or_parts] if not isinstance(part_or_parts, list) else part_or_parts
)
# Add speaker token if specified
if speaker is not None:
speaker_token = f"<|speaker:{speaker}|>"
self.parts.append(TextPart(text=speaker_token))
# Add all the parts
self.parts.extend(parts_to_add)
# Add end token if requested
if add_end:
self.parts.append(
TextPart(text=IM_END_TOKEN, cal_loss=self.parts[-1].cal_loss)
)
def encode(
self: "ContentSequence",
tokenizer: FishTokenizer,
add_shift: bool = True,
ignore_loss_tokens: list[str] = [],
) -> EncodedMessage:
"""
Encode the sequence parts into tokens for the model.
Args:
tokenizer: The tokenizer to use
add_shift: Whether to shift tokens for next-token prediction
ignore_loss_tokens: List of token strings to ignore when calculating loss
Returns:
EncodedMessage with tensors ready for the model
"""
all_tokens = []
all_labels = []
# Multi-modal elements
vq_parts = []
vq_masks = []
vq_require_losses = []
audio_parts = []
audio_masks = []
ignore_loss_token_ids = [tokenizer.get_token_id(i) for i in ignore_loss_tokens]
for part in self.parts:
if isinstance(part, TextPart):
if part.tokens is None:
assert part.text is not None
tokens = tokenizer.encode(part.text)
else:
tokens = part.tokens
tokens = torch.tensor(tokens, dtype=torch.int)
elif isinstance(part, VQPart):
curr_codes = part.codes.clone().to(torch.int)
tokens = torch.tensor(
[
tokenizer.semantic_id_to_token_id[int(i.item())]
for i in curr_codes[0].int()
],
dtype=torch.int,
)
vq_parts.append(curr_codes)
vq_require_losses.append(part.cal_loss)
else:
raise ValueError(f"Unsupported part type: {type(part)}")
all_tokens.append(tokens)
# Set masks for different part types
if isinstance(part, VQPart):
vq_masks.append(torch.ones_like(tokens, dtype=torch.bool))
audio_masks.append(torch.zeros_like(tokens, dtype=torch.bool))
elif isinstance(part, AudioPart):
vq_masks.append(torch.zeros_like(tokens, dtype=torch.bool))
audio_mask = torch.ones_like(tokens, dtype=torch.bool)
audio_mask[0] = False # Skip start token
audio_mask[-1] = False # Skip end token
audio_masks.append(audio_mask)
else:
vq_masks.append(torch.zeros_like(tokens, dtype=torch.bool))
audio_masks.append(torch.zeros_like(tokens, dtype=torch.bool))
# Set labels based on whether we want to calculate loss for this part
if part.cal_loss and not isinstance(part, AudioPart):
all_labels.append(tokens.clone())
else:
all_labels.append(torch.full_like(tokens, -100))
# Concatenate all tensors
tokens = torch.cat(all_tokens, dim=0)
labels = torch.cat(all_labels, dim=0)
vq_masks = torch.cat(vq_masks, dim=0)
audio_masks = torch.cat(audio_masks, dim=0)
vq_require_losses = torch.tensor(vq_require_losses, dtype=torch.bool)
# Apply shift if needed for next-token prediction
vq_mask_tokens = vq_masks
vq_mask_labels = vq_masks
if add_shift:
tokens = tokens[:-1]
labels = labels[1:]
vq_masks = vq_masks[:-1]
vq_mask_tokens = vq_mask_tokens[:-1]
vq_mask_labels = vq_mask_labels[1:]
audio_masks = audio_masks[:-1]
# Ignore specified tokens
for i in ignore_loss_token_ids:
assert i != -100 and i is not None
labels[labels == i] = -100
assert tokens.dtype in [
torch.int,
torch.long,
], f"Invalid dtype: {tokens.dtype}"
return EncodedMessage(
tokens=tokens,
labels=labels,
vq_parts=vq_parts,
vq_mask_tokens=vq_mask_tokens,
vq_mask_labels=vq_mask_labels,
vq_require_losses=vq_require_losses,
audio_parts=audio_parts,
audio_masks=audio_masks,
metadata=self.metadata,
)
def encode_for_inference(
self: "ContentSequence",
tokenizer: FishTokenizer,
num_codebooks: int,
) -> tuple[torch.Tensor, torch.Tensor, torch.Tensor]:
encoded = self.encode(tokenizer, add_shift=False)
tokens = encoded.tokens
values = torch.zeros((num_codebooks + 1, len(tokens)), dtype=torch.int)
values[0] = tokens
if (encoded.vq_parts is None or len(encoded.vq_parts) == 0) and (
encoded.audio_parts is None or len(encoded.audio_parts) == 0
):
return values, None, None
audio_parts = audio_masks = None
if encoded.vq_parts is not None and len(encoded.vq_parts) > 0:
vq_parts = encoded.vq_parts
vq_parts = torch.cat(vq_parts, dim=1)
values[0, encoded.vq_mask_tokens] = (
vq_parts[0] + tokenizer.semantic_begin_id
)
values[1:, encoded.vq_mask_tokens] = vq_parts
if encoded.audio_parts is not None and len(encoded.audio_parts) > 0:
audio_parts = torch.cat(encoded.audio_parts, dim=0)
audio_masks = encoded.audio_masks[None, :]
return values, audio_masks, audio_parts
def visualize(
self: "ContentSequence",
tokenizer: FishTokenizer,
ignore_loss_tokens: list[str] = [],
merge_semantic_tokens: bool = False,
):
"""
Visualize the encoded sequence with color-coded tokens.
Blue/cyan tokens contribute to loss, green tokens do not.
"""
encoded = self.encode(
tokenizer, add_shift=False, ignore_loss_tokens=ignore_loss_tokens
)
# Colors for alternating tokens
colors = {
"blue": "\033[94m", # Light blue
"cyan": "\033[96m", # Cyan
"green": "\033[92m", # Light green
"dark_green": "\033[32m", # Dark green
}
blue_idx = 0
green_idx = 0
def print_in_blue(x):
nonlocal blue_idx
color = colors["blue"] if blue_idx % 2 == 0 else colors["cyan"]
print(f"{color}{x}\033[0m", end="")
blue_idx += 1
def print_in_green(x):
nonlocal green_idx
color = colors["green"] if green_idx % 2 == 0 else colors["dark_green"]
print(f"{color}{x}\033[0m", end="")
green_idx += 1
def print_semantic_token(x, count):
val = f"[<|semantic|>x{count}]"
if x == -100:
print_in_green(val)
else:
print_in_blue(val)
count_semantic_tokens = 0
semantic_label = None
for tok, lab in zip(encoded.tokens, encoded.labels):
token_id = int(tok.item())
if merge_semantic_tokens:
if (
tokenizer.semantic_begin_id <= token_id <= tokenizer.semantic_end_id
and (semantic_label is None or semantic_label == lab)
):
count_semantic_tokens += 1
semantic_label = lab
continue
elif count_semantic_tokens > 0:
print_semantic_token(semantic_label, count_semantic_tokens)
count_semantic_tokens = 0
semantic_label = None
val = tokenizer.decode([int(tok.item())])
if lab == -100:
print_in_green(val)
else:
print_in_blue(val)
if merge_semantic_tokens and count_semantic_tokens > 0:
print_semantic_token(semantic_label, count_semantic_tokens)
print()
| {
"repo_id": "fishaudio/fish-speech",
"file_path": "fish_speech/content_sequence.py",
"license": "Apache License 2.0",
"lines": 307,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
fishaudio/fish-speech:fish_speech/models/dac/modded_dac.py | import math
import typing as tp
from dataclasses import dataclass
from typing import List, Optional, Union
import hydra
import librosa
import numpy as np
import soundfile as sf
import torch
from audiotools import AudioSignal
from audiotools.ml import BaseModel
from dac.model.base import CodecMixin
from dac.nn.layers import Snake1d, WNConv1d, WNConvTranspose1d
from omegaconf import OmegaConf
from torch import Tensor, nn
from torch.nn import functional as F
from torch.nn.utils.parametrizations import weight_norm
from torch.nn.utils.parametrize import remove_parametrizations
@dataclass
class VQResult:
z: torch.Tensor
codes: torch.Tensor
latents: torch.Tensor
codebook_loss: torch.Tensor
commitment_loss: torch.Tensor
semantic_distill_z: torch.Tensor | None = None
def find_multiple(n: int, k: int) -> int:
if n % k == 0:
return n
return n + k - (n % k)
@dataclass
class ModelArgs:
block_size: int = 2048
n_layer: int = 8
n_head: int = 8
dim: int = 512
intermediate_size: int = 1536
n_local_heads: int = -1
head_dim: int = 64
rope_base: float = 10000
norm_eps: float = 1e-5
dropout_rate: float = 0.1
attn_dropout_rate: float = 0.1
channels_first: bool = True # to be compatible with conv1d input/output
pos_embed_type: str = "rope" # can be "rope" or "conformer"
max_relative_position: int = 128 # for conformer-style relative position embedding
def __post_init__(self):
if self.n_local_heads == -1:
self.n_local_heads = self.n_head
if self.intermediate_size is None:
hidden_dim = 4 * self.dim
n_hidden = int(2 * hidden_dim / 3)
self.intermediate_size = find_multiple(n_hidden, 256)
assert self.pos_embed_type in [
"rope",
"conformer",
], "pos_embed_type must be either 'rope' or 'conformer'"
class KVCache(nn.Module):
def __init__(
self, max_batch_size, max_seq_length, n_heads, head_dim, dtype=torch.bfloat16
):
super().__init__()
cache_shape = (max_batch_size, n_heads, max_seq_length, head_dim)
self.register_buffer("k_cache", torch.zeros(cache_shape, dtype=dtype))
self.register_buffer("v_cache", torch.zeros(cache_shape, dtype=dtype))
def update(self, input_pos, k_val, v_val):
# input_pos: [S], k_val: [B, H, S, D]
assert input_pos.shape[0] == k_val.shape[2]
k_out = self.k_cache
v_out = self.v_cache
k_out[:, :, input_pos] = k_val
v_out[:, :, input_pos] = v_val
return (
k_out[:, :, : input_pos.max() + 1, :],
v_out[:, :, : input_pos.max() + 1, :],
)
def clear_cache(self, prompt_len):
self.k_cache[:, :, prompt_len:, :].fill_(0)
self.v_cache[:, :, prompt_len:, :].fill_(0)
class Transformer(nn.Module):
def __init__(self, config: ModelArgs) -> None:
super().__init__()
self.config = config
self.layers = nn.ModuleList(
TransformerBlock(config) for _ in range(config.n_layer)
)
self.norm = RMSNorm(config.dim, eps=config.norm_eps)
# Only compute RoPE frequencies if using RoPE
if config.pos_embed_type == "rope":
freqs_cis = precompute_freqs_cis(
self.config.block_size, self.config.head_dim, self.config.rope_base
)
self.register_buffer("freqs_cis", freqs_cis)
else:
self.register_buffer("freqs_cis", None)
causal_mask = torch.tril(
torch.ones(self.config.block_size, self.config.block_size, dtype=torch.bool)
)
self.register_buffer("causal_mask", causal_mask)
self.max_batch_size = -1
self.max_seq_length = -1
self.use_kv_cache = False
def setup_caches(self, max_batch_size, max_seq_length):
"""
This method will only be called during inference when using KV cache.
"""
head_dim = self.config.dim // self.config.n_head
max_seq_length = find_multiple(max_seq_length, 8)
self.max_seq_length = max_seq_length
self.max_batch_size = max_batch_size
dtype = self.norm.weight.dtype
device = self.norm.weight.device
for b in self.layers:
b.attention.kv_cache = KVCache(
max_batch_size,
max_seq_length,
self.config.n_local_heads,
head_dim,
dtype,
).to(device)
self.use_kv_cache = True
def forward(
self,
x: Tensor,
input_pos: Optional[Tensor] = None,
mask: Optional[Tensor] = None,
) -> Tensor:
if self.config.pos_embed_type == "rope":
assert (
self.freqs_cis is not None
), "RoPE frequencies must be initialized for RoPE positional embedding"
freqs_cis = self.freqs_cis[input_pos]
else:
freqs_cis = None
if mask is None: # in case of non-causal model
if not self.training and self.use_kv_cache:
mask = self.causal_mask[None, None, input_pos]
mask = mask[..., : input_pos.max() + 1]
else:
mask = self.causal_mask[None, None, input_pos]
mask = mask[..., input_pos]
for i, layer in enumerate(self.layers):
x = layer(x, input_pos, freqs_cis, mask)
x = self.norm(x)
return x
class TransformerBlock(nn.Module):
def __init__(self, config: ModelArgs) -> None:
super().__init__()
self.attention = Attention(config)
self.feed_forward = FeedForward(config)
self.ffn_norm = RMSNorm(config.dim, eps=config.norm_eps)
self.attention_norm = RMSNorm(config.dim, eps=config.norm_eps)
self.attention_layer_scale = LayerScale(config.dim, inplace=True)
self.ffn_layer_scale = LayerScale(config.dim, inplace=True)
def forward(
self,
x: Tensor,
input_pos: Tensor,
freqs_cis: Tensor,
mask: Tensor,
) -> Tensor:
h = x + self.attention_layer_scale(
self.attention(self.attention_norm(x), freqs_cis, mask, input_pos)
)
out = h + self.ffn_layer_scale(self.feed_forward(self.ffn_norm(h)))
return out
class Attention(nn.Module):
def __init__(self, config: ModelArgs):
super().__init__()
assert config.dim % config.n_head == 0
total_head_dim = (config.n_head + 2 * config.n_local_heads) * config.head_dim
# key, query, value projections for all heads, but in a batch
self.wqkv = nn.Linear(config.dim, total_head_dim, bias=False)
self.wo = nn.Linear(config.head_dim * config.n_head, config.dim, bias=False)
self.kv_cache = None
self.n_head = config.n_head
self.head_dim = config.head_dim
self.n_local_heads = config.n_local_heads
self.dim = config.dim
self.attn_dropout_rate = config.attn_dropout_rate
self.pos_embed_type = config.pos_embed_type
# Add relative position embedding for conformer-style
if self.pos_embed_type == "conformer":
self.max_relative_position = config.max_relative_position
num_pos_embeddings = 2 * config.max_relative_position + 1
self.rel_pos_embeddings = nn.Parameter(
torch.zeros(num_pos_embeddings, self.head_dim)
)
nn.init.normal_(self.rel_pos_embeddings, mean=0.0, std=0.02)
def _compute_conformer_pos_scores(self, q: Tensor, seqlen: int) -> Tensor:
# q: [B, H, S, D]
# Returns: [B, H, S, S]
positions = torch.arange(seqlen, device=q.device)
relative_positions = positions.unsqueeze(1) - positions.unsqueeze(0) # [S, S]
relative_positions = torch.clamp(
relative_positions + self.max_relative_position,
0,
2 * self.max_relative_position,
)
rel_embeddings = self.rel_pos_embeddings[relative_positions] # [S, S, D]
# Compute attention scores with relative position embeddings
q = q.transpose(1, 2) # [B, S, H, D]
rel_logits = torch.matmul(q, rel_embeddings.transpose(-2, -1)) # [B, S, H, S]
rel_logits = rel_logits.transpose(1, 2) # [B, H, S, S]
return rel_logits
def forward(
self,
x: Tensor,
freqs_cis: Tensor,
mask: Tensor,
input_pos: Optional[Tensor] = None,
) -> Tensor:
bsz, seqlen, _ = x.shape
kv_size = self.n_local_heads * self.head_dim
q, k, v = self.wqkv(x).split([kv_size, kv_size, kv_size], dim=-1)
context_seqlen = seqlen
q = q.view(bsz, seqlen, self.n_head, self.head_dim)
k = k.view(bsz, context_seqlen, self.n_local_heads, self.head_dim)
v = v.view(bsz, context_seqlen, self.n_local_heads, self.head_dim)
if self.pos_embed_type == "rope":
q = apply_rotary_emb(q, freqs_cis)
k = apply_rotary_emb(k, freqs_cis)
q, k, v = map(lambda x: x.transpose(1, 2), (q, k, v))
if self.kv_cache is not None:
k, v = self.kv_cache.update(input_pos, k, v)
k = k.repeat_interleave(self.n_head // self.n_local_heads, dim=1)
v = v.repeat_interleave(self.n_head // self.n_local_heads, dim=1)
if self.pos_embed_type == "conformer":
# Compute attention scores
scale = 1.0 / math.sqrt(self.head_dim)
scores = torch.matmul(q, k.transpose(-2, -1)) * scale
# Add relative position embeddings for conformer-style
rel_scores = self._compute_conformer_pos_scores(q, seqlen)
scores = scores + rel_scores
# Apply attention
if mask is not None:
scores = scores.masked_fill(~mask, float("-inf"))
attn = F.softmax(scores, dim=-1)
if self.attn_dropout_rate > 0 and self.training:
attn = F.dropout(attn, p=self.attn_dropout_rate)
y = torch.matmul(attn, v)
else:
y = F.scaled_dot_product_attention(
q,
k,
v,
dropout_p=self.attn_dropout_rate if self.training else 0.0,
attn_mask=mask,
)
# is_causal=True)
y = (
y.transpose(1, 2)
.contiguous()
.view(bsz, seqlen, self.head_dim * self.n_head)
)
y = self.wo(y)
return y
class FeedForward(nn.Module):
def __init__(self, config: ModelArgs) -> None:
super().__init__()
self.w1 = nn.Linear(config.dim, config.intermediate_size, bias=False)
self.w3 = nn.Linear(config.dim, config.intermediate_size, bias=False)
self.w2 = nn.Linear(config.intermediate_size, config.dim, bias=False)
self.dropout = nn.Dropout(config.dropout_rate)
def forward(self, x: Tensor) -> Tensor:
return self.w2(self.dropout(F.silu(self.w1(x)) * self.w3(x)))
class RMSNorm(nn.Module):
def __init__(self, dim: int, eps: float = 1e-5):
super().__init__()
self.eps = eps
self.weight = nn.Parameter(torch.ones(dim))
def _norm(self, x):
return x * torch.rsqrt(torch.mean(x * x, dim=-1, keepdim=True) + self.eps)
def forward(self, x: Tensor) -> Tensor:
output = self._norm(x.float()).type_as(x)
return output * self.weight
class LayerScale(nn.Module):
def __init__(
self,
dim: int,
init_values: Union[float, Tensor] = 1e-2,
inplace: bool = False,
) -> None:
super().__init__()
self.inplace = inplace
self.gamma = nn.Parameter(init_values * torch.ones(dim))
def forward(self, x: Tensor) -> Tensor:
return x.mul_(self.gamma) if self.inplace else x * self.gamma
class WindowLimitedTransformer(Transformer):
"""
Transformer with window limited attention, causal.
"""
def __init__(
self,
config: ModelArgs,
input_dim: int = 512,
window_size: Optional[int] = None,
causal: bool = True,
look_ahead_conv: nn.Module = None,
):
super().__init__(config)
self.window_size = window_size
self.causal = causal
self.channels_first = config.channels_first
self.look_ahead_conv = (
look_ahead_conv if look_ahead_conv is not None else nn.Identity()
)
self.input_proj = (
nn.Linear(input_dim, config.dim)
if input_dim != config.dim
else nn.Identity()
)
self.output_proj = (
nn.Linear(config.dim, input_dim)
if input_dim != config.dim
else nn.Identity()
)
def make_window_limited_mask(
self,
max_length: int,
x_lens: Optional[Tensor] = None,
) -> Tensor:
"""
Make mask to form window limited attention.
"""
if self.causal:
mask = torch.tril(torch.ones(max_length, max_length))
row_indices = torch.arange(max_length).view(-1, 1)
window_size = self.window_size or max_length
valid_range = (row_indices - window_size + 1).clamp(min=0)
column_indices = torch.arange(max_length)
mask = (column_indices >= valid_range) & mask.bool()
else:
raise NotImplementedError
mask = mask.bool()[None, None]
return mask
def make_mask(
self,
max_length: int,
x_lens: Optional[Tensor] = None,
) -> Tensor:
"""
Make ordinary mask if window size is not specified.
"""
if self.causal:
mask = torch.tril(torch.ones(max_length, max_length))
else:
mask = torch.ones(max_length, max_length)
mask = mask.bool()[None, None]
for i, x_len in enumerate(x_lens):
mask[:x_len, i] = 0
mask = mask.bool()[None, None]
return mask
def forward(
self,
x: Tensor,
x_lens: Optional[Tensor] = None,
) -> Tensor:
if self.channels_first:
x = x.transpose(1, 2)
x = self.input_proj(x) # (B, T, D)
x = self.look_ahead_conv(x)
input_pos = torch.arange(x.shape[1], device=x.device)
# construct mask to form window limited attention
max_length = x.shape[1]
if self.window_size is not None:
mask = self.make_window_limited_mask(max_length, x_lens)
else:
mask = self.make_mask(max_length, x_lens)
mask = mask.to(x.device)
x = super().forward(x, input_pos, mask)
x = self.output_proj(x) # (B, T, D)
if self.channels_first:
x = x.transpose(1, 2)
return x
def precompute_freqs_cis(
seq_len: int, n_elem: int, base: int = 10000, dtype: torch.dtype = torch.bfloat16
) -> Tensor:
freqs = 1.0 / (
base ** (torch.arange(0, n_elem, 2)[: (n_elem // 2)].float() / n_elem)
)
t = torch.arange(seq_len, device=freqs.device)
freqs = torch.outer(t, freqs)
freqs_cis = torch.polar(torch.ones_like(freqs), freqs)
cache = torch.stack([freqs_cis.real, freqs_cis.imag], dim=-1)
return cache.to(dtype=dtype)
def apply_rotary_emb(x: Tensor, freqs_cis: Tensor) -> Tensor:
xshaped = x.float().reshape(*x.shape[:-1], -1, 2)
freqs_cis = freqs_cis.view(1, xshaped.size(1), 1, xshaped.size(3), 2)
x_out2 = torch.stack(
[
xshaped[..., 0] * freqs_cis[..., 0] - xshaped[..., 1] * freqs_cis[..., 1],
xshaped[..., 1] * freqs_cis[..., 0] + xshaped[..., 0] * freqs_cis[..., 1],
],
-1,
)
x_out2 = x_out2.flatten(3)
return x_out2.type_as(x)
def init_weights(m):
if isinstance(m, nn.Conv1d):
nn.init.trunc_normal_(m.weight, std=0.02)
nn.init.constant_(m.bias, 0)
def unpad1d(x: torch.Tensor, paddings: tp.Tuple[int, int]):
"""Remove padding from x, handling properly zero padding. Only for 1d!"""
padding_left, padding_right = paddings
assert padding_left >= 0 and padding_right >= 0, (padding_left, padding_right)
assert (padding_left + padding_right) <= x.shape[-1]
end = x.shape[-1] - padding_right
return x[..., padding_left:end]
def get_extra_padding_for_conv1d(
x: torch.Tensor, kernel_size: int, stride: int, padding_total: int = 0
) -> int:
"""See `pad_for_conv1d`."""
length = x.shape[-1]
n_frames = (length - kernel_size + padding_total) / stride + 1
ideal_length = (math.ceil(n_frames) - 1) * stride + (kernel_size - padding_total)
return ideal_length - length
def pad1d(
x: torch.Tensor,
paddings: tp.Tuple[int, int],
mode: str = "zeros",
value: float = 0.0,
):
"""Tiny wrapper around F.pad, just to allow for reflect padding on small input.
If this is the case, we insert extra 0 padding to the right
before the reflection happen.
"""
length = x.shape[-1]
padding_left, padding_right = paddings
assert padding_left >= 0 and padding_right >= 0, (padding_left, padding_right)
if mode == "reflect":
max_pad = max(padding_left, padding_right)
extra_pad = 0
if length <= max_pad:
extra_pad = max_pad - length + 1
x = F.pad(x, (0, extra_pad))
padded = F.pad(x, paddings, mode, value)
end = padded.shape[-1] - extra_pad
return padded[..., :end]
else:
return F.pad(x, paddings, mode, value)
class CausalConvNet(nn.Module):
def __init__(
self,
in_channels,
out_channels,
kernel_size,
dilation=1,
stride=1,
groups=1,
padding=None,
):
super(CausalConvNet, self).__init__()
self.conv = nn.Conv1d(
in_channels,
out_channels,
kernel_size,
stride=stride,
dilation=dilation,
groups=groups,
)
self.stride = stride
self.kernel_size = (kernel_size - 1) * dilation + 1
self.dilation = dilation
self.padding = self.kernel_size - self.stride
def forward(self, x):
pad = self.padding
extra_padding = get_extra_padding_for_conv1d(
x, self.kernel_size, self.stride, pad
)
x = pad1d(x, (pad, extra_padding), mode="constant", value=0)
return self.conv(x).contiguous()
def weight_norm(self, name="weight", dim=0):
self.conv = weight_norm(self.conv, name=name, dim=dim)
return self
def remove_weight_norm(self):
self.conv = remove_parametrizations(self.conv)
return self
class CausalTransConvNet(nn.Module):
def __init__(
self, in_channels, out_channels, kernel_size, dilation=1, stride=1, padding=None
):
super(CausalTransConvNet, self).__init__()
self.conv = nn.ConvTranspose1d(
in_channels, out_channels, kernel_size, stride=stride, dilation=dilation
)
self.stride = stride
self.kernel_size = kernel_size
def forward(self, x):
x = self.conv(x)
pad = self.kernel_size - self.stride
padding_right = math.ceil(pad)
padding_left = pad - padding_right
x = unpad1d(x, (padding_left, padding_right))
return x.contiguous()
def weight_norm(self, name="weight", dim=0):
self.conv = weight_norm(self.conv, name=name, dim=dim)
return self
def remove_weight_norm(self):
self.conv = remove_parametrizations(self.conv)
return self
def CausalWNConv1d(*args, **kwargs):
return CausalConvNet(*args, **kwargs).weight_norm()
def CausalWNConvTranspose1d(*args, **kwargs):
return CausalTransConvNet(*args, **kwargs).weight_norm()
class ResidualUnit(nn.Module):
def __init__(self, dim: int = 16, dilation: int = 1, causal: bool = False):
super().__init__()
conv_class = CausalWNConv1d if causal else WNConv1d
pad = ((7 - 1) * dilation) // 2
self.block = nn.Sequential(
Snake1d(dim),
conv_class(dim, dim, kernel_size=7, dilation=dilation, padding=pad),
Snake1d(dim),
conv_class(dim, dim, kernel_size=1),
)
self.causal = causal
def forward(self, x):
y = self.block(x)
pad = x.shape[-1] - y.shape[-1]
if pad > 0:
if self.causal:
x = x[..., :-pad]
else:
x = x[..., pad // 2 : -pad // 2]
return x + y
class EncoderBlock(nn.Module):
def __init__(
self,
dim: int = 16,
stride: int = 1,
causal: bool = False,
n_t_layer: int = 0,
transformer_general_config=None,
):
super().__init__()
conv_class = CausalWNConv1d if causal else WNConv1d
transformer_module = (
nn.Identity()
if n_t_layer == 0
else (
WindowLimitedTransformer(
causal=causal,
input_dim=dim,
window_size=512,
config=transformer_general_config(
n_layer=n_t_layer,
n_head=dim // 64,
dim=dim,
intermediate_size=dim * 3,
),
)
)
)
self.block = nn.Sequential(
ResidualUnit(dim // 2, dilation=1, causal=causal),
ResidualUnit(dim // 2, dilation=3, causal=causal),
ResidualUnit(dim // 2, dilation=9, causal=causal),
Snake1d(dim // 2),
conv_class(
dim // 2,
dim,
kernel_size=2 * stride,
stride=stride,
padding=math.ceil(stride / 2),
),
transformer_module,
)
def forward(self, x):
return self.block(x)
class Encoder(nn.Module):
def __init__(
self,
d_model: int = 64,
strides: list = [2, 4, 8, 8],
d_latent: int = 64,
n_transformer_layers: list = [0, 0, 4, 4],
transformer_general_config: ModelArgs = None,
causal: bool = False,
):
super().__init__()
conv_class = CausalWNConv1d if causal else WNConv1d
# Create first convolution
self.block = [conv_class(1, d_model, kernel_size=7, padding=3)]
# Create EncoderBlocks that double channels as they downsample by `stride`
for stride, n_t_layer in zip(strides, n_transformer_layers):
d_model *= 2
self.block += [
EncoderBlock(
d_model,
stride=stride,
causal=causal,
n_t_layer=n_t_layer,
transformer_general_config=transformer_general_config,
)
]
# Create last convolution
self.block += [
Snake1d(d_model),
conv_class(d_model, d_latent, kernel_size=3, padding=1),
]
# Wrap black into nn.Sequential
self.block = nn.Sequential(*self.block)
self.enc_dim = d_model
def forward(self, x):
return self.block(x)
class DecoderBlock(nn.Module):
def __init__(
self,
input_dim: int = 16,
output_dim: int = 8,
stride: int = 1,
causal: bool = False,
n_t_layer: int = 0,
transformer_general_config=None,
):
super().__init__()
conv_trans_class = CausalWNConvTranspose1d if causal else WNConvTranspose1d
transformer_module = (
nn.Identity()
if n_t_layer == 0
else (
WindowLimitedTransformer(
causal=causal,
input_dim=input_dim,
window_size=None,
config=transformer_general_config(
n_layer=n_t_layer,
n_head=input_dim // 64,
dim=input_dim,
intermediate_size=input_dim * 3,
),
)
)
)
self.block = nn.Sequential(
# transformer_module,
Snake1d(input_dim),
conv_trans_class(
input_dim,
output_dim,
kernel_size=2 * stride,
stride=stride,
padding=math.ceil(stride / 2),
),
ResidualUnit(output_dim, dilation=1, causal=causal),
ResidualUnit(output_dim, dilation=3, causal=causal),
ResidualUnit(output_dim, dilation=9, causal=causal),
)
def forward(self, x):
return self.block(x)
class Decoder(nn.Module):
def __init__(
self,
input_channel,
channels,
rates,
d_out: int = 1,
causal: bool = False,
n_transformer_layers: list = [0, 0, 0, 0],
transformer_general_config=None,
):
super().__init__()
conv_class = CausalWNConv1d if causal else WNConv1d
# Add first conv layer
layers = [conv_class(input_channel, channels, kernel_size=7, padding=3)]
# Add upsampling + MRF blocks
for i, (stride, n_t_layer) in enumerate(zip(rates, n_transformer_layers)):
input_dim = channels // 2**i
output_dim = channels // 2 ** (i + 1)
layers += [
DecoderBlock(
input_dim,
output_dim,
stride,
causal=causal,
n_t_layer=n_t_layer,
transformer_general_config=transformer_general_config,
)
]
# Add final conv layer
layers += [
Snake1d(output_dim),
conv_class(output_dim, d_out, kernel_size=7, padding=3),
nn.Tanh(),
]
self.model = nn.Sequential(*layers)
def forward(self, x):
return self.model(x)
class DAC(BaseModel, CodecMixin):
def __init__(
self,
encoder_dim: int = 64,
encoder_rates: List[int] = [2, 4, 8, 8],
latent_dim: int = None,
decoder_dim: int = 1536,
decoder_rates: List[int] = [8, 8, 4, 2],
quantizer: torch.nn.Module = None,
sample_rate: int = 44100,
causal: bool = True,
encoder_transformer_layers: List[int] = [0, 0, 0, 0],
decoder_transformer_layers: List[int] = [0, 0, 0, 0],
transformer_general_config=None,
):
super().__init__()
self.encoder_dim = encoder_dim
self.encoder_rates = encoder_rates
self.decoder_dim = decoder_dim
self.decoder_rates = decoder_rates
self.sample_rate = sample_rate
if latent_dim is None:
latent_dim = encoder_dim * (2 ** len(encoder_rates))
self.latent_dim = latent_dim
self.hop_length = np.prod(encoder_rates)
self.encoder = Encoder(
encoder_dim,
encoder_rates,
latent_dim,
causal=causal,
n_transformer_layers=encoder_transformer_layers,
transformer_general_config=transformer_general_config,
)
self.quantizer = quantizer
self.decoder = Decoder(
latent_dim,
decoder_dim,
decoder_rates,
causal=causal,
n_transformer_layers=decoder_transformer_layers,
transformer_general_config=transformer_general_config,
)
self.sample_rate = sample_rate
self.apply(init_weights)
self.delay = self.get_delay()
self.frame_length = self.hop_length * 4
def preprocess(self, audio_data, sample_rate):
if sample_rate is None:
sample_rate = self.sample_rate
assert sample_rate == self.sample_rate
length = audio_data.shape[-1]
right_pad = math.ceil(length / self.hop_length) * self.hop_length - length
audio_data = nn.functional.pad(audio_data, (0, right_pad))
return audio_data
def encode(
self,
audio_data: torch.Tensor,
audio_lengths: torch.Tensor = None,
n_quantizers: int = None,
**kwargs,
):
"""Encode given audio data and return quantized latent codes
Parameters
----------
audio_data : Tensor[B x T]
Audio data to encode
n_quantizers : int, optional
Number of quantizers to use, by default None
If None, all quantizers are used.
Returns
-------
dict
A dictionary with the following keys:
"z" : Tensor[B x D x T]
Quantized continuous representation of input
"codes" : Tensor[B x N x T]
Codebook indices for each codebook
(quantized discrete representation of input)
"latents" : Tensor[B x N*D x T]
Projected latents (continuous representation of input before quantization)
"vq/commitment_loss" : Tensor[1]
Commitment loss to train encoder to predict vectors closer to codebook
entries
"vq/codebook_loss" : Tensor[1]
Codebook loss to update the codebook
"length" : int
Number of samples in input audio
"""
# pad to multiple of self.frame_length
if audio_data.ndim == 2:
audio_data = audio_data.unsqueeze(1)
# print(audio_data.shape)
length = audio_data.shape[-1]
right_pad = math.ceil(length / self.frame_length) * self.frame_length - length
audio_data = nn.functional.pad(audio_data, (0, right_pad))
if audio_lengths is None:
audio_lengths = torch.LongTensor([length + right_pad]).to(audio_data.device)
z = self.encoder(audio_data)
vq_results = self.quantizer(z, n_quantizers, **kwargs)
indices = vq_results.codes
indices_lens = torch.ceil(audio_lengths / self.frame_length).long()
return indices, indices_lens
def decode(self, indices: torch.Tensor, feature_lengths):
if indices.ndim == 2:
indices = indices[None]
z = self.quantizer.decode(indices)
audio_lengths = feature_lengths * self.frame_length
return self.decoder(z), audio_lengths
def forward(
self,
audio_data: torch.Tensor,
template: torch.Tensor = None,
mask: torch.Tensor = None,
sample_rate: int = None,
n_quantizers: int = None,
**kwargs,
):
"""Model forward pass
Parameters
----------
audio_data : Tensor[B x 1 x T]
Audio data to encode
sample_rate : int, optional
Sample rate of audio data in Hz, by default None
If None, defaults to `self.sample_rate`
n_quantizers : int, optional
Number of quantizers to use, by default None.
If None, all quantizers are used.
Returns
-------
dict
A dictionary with the following keys:
"z" : Tensor[B x D x T]
Quantized continuous representation of input
"codes" : Tensor[B x N x T]
Codebook indices for each codebook
(quantized discrete representation of input)
"latents" : Tensor[B x N*D x T]
Projected latents (continuous representation of input before quantization)
"vq/commitment_loss" : Tensor[1]
Commitment loss to train encoder to predict vectors closer to codebook
entries
"vq/codebook_loss" : Tensor[1]
Codebook loss to update the codebook
"length" : int
Number of samples in input audio
"audio" : Tensor[B x 1 x length]
Decoded audio data.
"""
length = audio_data.shape[-1]
audio_data = self.preprocess(audio_data, sample_rate)
vq_results = self.encode(audio_data, n_quantizers, **kwargs)
z = vq_results[0] if isinstance(vq_results, tuple) else vq_results.z
x = self.decode(z)
return x[..., :length], vq_results
| {
"repo_id": "fishaudio/fish-speech",
"file_path": "fish_speech/models/dac/modded_dac.py",
"license": "Apache License 2.0",
"lines": 844,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
fishaudio/fish-speech:fish_speech/models/dac/rvq.py | import math
import typing as tp
from dataclasses import dataclass
import torch
import torch.nn as nn
import torch.nn.functional as F
from dac.nn.quantize import ResidualVectorQuantize
from torch.nn.utils.parametrizations import weight_norm
from torch.nn.utils.parametrize import remove_parametrizations
def unpad1d(x: torch.Tensor, paddings: tp.Tuple[int, int]):
"""Remove padding from x, handling properly zero padding. Only for 1d!"""
padding_left, padding_right = paddings
assert padding_left >= 0 and padding_right >= 0, (padding_left, padding_right)
assert (padding_left + padding_right) <= x.shape[-1]
end = x.shape[-1] - padding_right
return x[..., padding_left:end]
def get_extra_padding_for_conv1d(
x: torch.Tensor, kernel_size: int, stride: int, padding_total: int = 0
) -> int:
"""See `pad_for_conv1d`."""
length = x.shape[-1]
n_frames = (length - kernel_size + padding_total) / stride + 1
ideal_length = (math.ceil(n_frames) - 1) * stride + (kernel_size - padding_total)
return ideal_length - length
def pad1d(
x: torch.Tensor,
paddings: tp.Tuple[int, int],
mode: str = "zeros",
value: float = 0.0,
):
"""Tiny wrapper around F.pad, just to allow for reflect padding on small input.
If this is the case, we insert extra 0 padding to the right
before the reflection happen.
"""
length = x.shape[-1]
padding_left, padding_right = paddings
assert padding_left >= 0 and padding_right >= 0, (padding_left, padding_right)
if mode == "reflect":
max_pad = max(padding_left, padding_right)
extra_pad = 0
if length <= max_pad:
extra_pad = max_pad - length + 1
x = F.pad(x, (0, extra_pad))
padded = F.pad(x, paddings, mode, value)
end = padded.shape[-1] - extra_pad
return padded[..., :end]
else:
return F.pad(x, paddings, mode, value)
class CausalConvNet(nn.Module):
def __init__(
self,
in_channels,
out_channels,
kernel_size,
dilation=1,
stride=1,
groups=1,
padding=None,
):
super(CausalConvNet, self).__init__()
self.conv = nn.Conv1d(
in_channels,
out_channels,
kernel_size,
stride=stride,
dilation=dilation,
groups=groups,
)
self.stride = stride
self.kernel_size = (kernel_size - 1) * dilation + 1
self.dilation = dilation
self.padding = self.kernel_size - self.stride
def forward(self, x):
pad = self.padding
extra_padding = get_extra_padding_for_conv1d(
x, self.kernel_size, self.stride, pad
)
x = pad1d(x, (pad, extra_padding), mode="constant", value=0)
return self.conv(x).contiguous()
def weight_norm(self, name="weight", dim=0):
self.conv = weight_norm(self.conv, name=name, dim=dim)
return self
def remove_weight_norm(self):
self.conv = remove_parametrizations(self.conv)
return self
class CausalTransConvNet(nn.Module):
def __init__(
self, in_channels, out_channels, kernel_size, dilation=1, stride=1, padding=None
):
super(CausalTransConvNet, self).__init__()
self.conv = nn.ConvTranspose1d(
in_channels, out_channels, kernel_size, stride=stride, dilation=dilation
)
self.stride = stride
self.kernel_size = kernel_size
def forward(self, x):
x = self.conv(x)
pad = self.kernel_size - self.stride
padding_right = math.ceil(pad)
padding_left = pad - padding_right
x = unpad1d(x, (padding_left, padding_right))
return x.contiguous()
def weight_norm(self, name="weight", dim=0):
self.conv = weight_norm(self.conv, name=name, dim=dim)
return self
def remove_weight_norm(self):
self.conv = remove_parametrizations(self.conv)
return self
# ConvNeXt Block copied from https://github.com/fishaudio/fish-diffusion/blob/main/fish_diffusion/modules/convnext.py
class ConvNeXtBlock(nn.Module):
r"""ConvNeXt Block. There are two equivalent implementations:
(1) DwConv -> LayerNorm (channels_first) -> 1x1 Conv -> GELU -> 1x1 Conv; all in (N, C, H, W)
(2) DwConv -> Permute to (N, H, W, C); LayerNorm (channels_last) -> Linear -> GELU -> Linear; Permute back
We use (2) as we find it slightly faster in PyTorch
Args:
dim (int): Number of input channels.
drop_path (float): Stochastic depth rate. Default: 0.0
layer_scale_init_value (float): Init value for Layer Scale. Default: 1e-6.
mlp_ratio (float): Ratio of mlp hidden dim to embedding dim. Default: 4.0.
kernel_size (int): Kernel size for depthwise conv. Default: 7.
dilation (int): Dilation for depthwise conv. Default: 1.
""" # noqa: E501
def __init__(
self,
dim: int,
layer_scale_init_value: float = 1e-6,
mlp_ratio: float = 4.0,
kernel_size: int = 7,
dilation: int = 1,
):
super().__init__()
convnet_type = CausalConvNet
self.dwconv = convnet_type(
dim,
dim,
kernel_size=kernel_size,
# padding=int(dilation * (kernel_size - 1) / 2),
groups=dim,
dilation=dilation,
) # depthwise conv
self.norm = nn.LayerNorm(dim, eps=1e-6)
self.pwconv1 = nn.Linear(
dim, int(mlp_ratio * dim)
) # pointwise/1x1 convs, implemented with linear layers
self.act = nn.GELU()
self.pwconv2 = nn.Linear(int(mlp_ratio * dim), dim)
self.gamma = (
nn.Parameter(layer_scale_init_value * torch.ones((dim)), requires_grad=True)
if layer_scale_init_value > 0
else None
)
def forward(self, x, apply_residual: bool = True):
input = x
x = self.dwconv(x)
x = x.permute(0, 2, 1) # (N, C, L) -> (N, L, C)
x = self.norm(x)
x = self.pwconv1(x)
x = self.act(x)
x = self.pwconv2(x)
if self.gamma is not None:
x = self.gamma * x
x = x.permute(0, 2, 1) # (N, L, C) -> (N, C, L)
if apply_residual:
x = input + x
return x
@dataclass
class VQResult:
z: torch.Tensor
codes: torch.Tensor
latents: torch.Tensor
codebook_loss: torch.Tensor
commitment_loss: torch.Tensor
semantic_distill_z: torch.Tensor | None = None
class DownsampleResidualVectorQuantize(nn.Module):
def __init__(
self,
input_dim: int = 1024,
n_codebooks: int = 9,
codebook_dim: int = 8,
quantizer_dropout: float = 0.5,
codebook_size: int = 1024,
semantic_codebook_size: int = 4096,
downsample_factor: tuple[int] = (2, 2),
downsample_dims: tuple[int] | None = None,
pre_module: nn.Module | None = None,
post_module: nn.Module | None = None,
semantic_predictor_module: nn.Module | None = None,
):
super().__init__()
if downsample_dims is None:
downsample_dims = [input_dim for _ in range(len(downsample_factor))]
all_dims = (input_dim,) + tuple(downsample_dims)
self.semantic_quantizer = ResidualVectorQuantize(
input_dim=input_dim,
n_codebooks=1,
codebook_size=semantic_codebook_size,
codebook_dim=codebook_dim,
quantizer_dropout=0.0,
)
self.quantizer = ResidualVectorQuantize(
input_dim=input_dim,
n_codebooks=n_codebooks,
codebook_size=codebook_size,
codebook_dim=codebook_dim,
quantizer_dropout=quantizer_dropout,
)
self.downsample_factor = downsample_factor
self.downsample_dims = downsample_dims
convnet_type = CausalConvNet
transconvnet_type = CausalTransConvNet
self.downsample = nn.Sequential(
*[
nn.Sequential(
convnet_type(
all_dims[idx],
all_dims[idx + 1],
kernel_size=factor,
stride=factor,
),
ConvNeXtBlock(dim=all_dims[idx + 1]),
)
for idx, factor in enumerate(downsample_factor)
]
)
self.upsample = nn.Sequential(
*[
nn.Sequential(
transconvnet_type(
all_dims[idx + 1],
all_dims[idx],
kernel_size=factor,
stride=factor,
),
ConvNeXtBlock(dim=all_dims[idx]),
)
for idx, factor in reversed(list(enumerate(downsample_factor)))
]
)
self.apply(self._init_weights)
self.pre_module = (
pre_module if pre_module is not None else nn.Identity()
) # leave for transformer, LSTM or Mamba or something else
self.post_module = post_module if post_module is not None else nn.Identity()
self.semantic_predictor_module = (
semantic_predictor_module
if semantic_predictor_module is not None
else nn.Identity()
)
def _init_weights(self, m):
if isinstance(m, (nn.Conv1d, nn.Linear)):
nn.init.trunc_normal_(m.weight, std=0.02)
nn.init.constant_(m.bias, 0)
def forward(
self, z, n_quantizers: int = None, semantic_len: torch.Tensor = None, **kwargs
):
# z: (B, D, T)
original_shape = z.shape
if semantic_len is None:
semantic_len = torch.LongTensor([z.shape[-1]])
z = self.downsample(z)
z = self.pre_module(z) # B, T, D
(
semantic_z,
semantic_codes,
semantic_latents,
semantic_commitment_loss,
semantic_codebook_loss,
) = self.semantic_quantizer(z)
residual_z = z - semantic_z
residual_z, codes, latents, commitment_loss, codebook_loss = self.quantizer(
residual_z, n_quantizers=n_quantizers
)
z = semantic_z + residual_z
commitment_loss = commitment_loss + semantic_commitment_loss
codebook_loss = codebook_loss + semantic_codebook_loss
codes = torch.cat([semantic_codes, codes], dim=1)
latents = torch.cat([semantic_latents, latents], dim=1)
z = self.post_module(z)
z = self.upsample(z)
# z: (B, D, T)
# semantic distillation (disabled here since only used in training)
# semantic_distill_z = self.semantic_predictor_module(semantic_z, semantic_len).mT # wav2vec target is B, T, D
# Pad or crop z to match original shape
diff = original_shape[-1] - z.shape[-1]
right = 0
left = abs(diff) - right
if diff > 0:
z = F.pad(z, (left, right))
elif diff < 0:
z = z[..., left:]
results = VQResult(
z=z,
codes=codes,
latents=latents,
commitment_loss=commitment_loss,
codebook_loss=codebook_loss,
)
return results
# def encode(self, z):
# z = self.downsample(z)
# z = self.pre_module(z)
# _, indices, _, _, _ = self.quantizer(z.mT)
# indices = rearrange(indices, "g b l r -> b (g r) l")
# return indices
#
def decode(self, indices: torch.Tensor):
# indices = rearrange(indices, "b (g r) l -> g b l r", g=self.residual_fsq.groups)
# print(f"indices: {indices.shape}, semantic_quantizer.codebook_size: {self.semantic_quantizer.codebook_size}, quantizer.codebook_size: {self.quantizer.codebook_size}, semantic min: {indices[:, 0].min()}, max: {indices[:, 0].max()}, quantizer min: {indices[:, 1:].min()}, max: {indices[:, 1:].max()}")
new_indices = torch.zeros_like(indices)
new_indices[:, 0] = torch.clamp(
indices[:, 0], max=self.semantic_quantizer.codebook_size - 1
)
new_indices[:, 1:] = torch.clamp(
indices[:, 1:], max=self.quantizer.codebook_size - 1
)
z_q_semantic = self.semantic_quantizer.from_codes(new_indices[:, :1])[0]
z_q_residual = self.quantizer.from_codes(new_indices[:, 1:])[0]
z_q = z_q_semantic + z_q_residual
z_q = self.post_module(z_q)
z_q = self.upsample(z_q)
return z_q
# def from_latents(self, latents: torch.Tensor):
# z_q, z_p, codes = super().from_latents(latents)
# z_q = self.upsample(z_q)
# return z_q, z_p, codes
if __name__ == "__main__":
rvq = DownsampleResidualVectorQuantize(
input_dim=512,
n_codebooks=8,
codebook_dim=8,
codebook_size=1024,
quantizer_dropout=0.5,
downsample_factor=[2, 2],
)
rvq.eval()
x = torch.randn(2, 512, 442)
result = rvq(x)
print(rvq)
print(result.latents.shape, result.codes.shape, result.z.shape)
# y = rvq.from_codes(result.codes)
# print(y[0].shape)
# y = rvq.from_latents(
result1 = rvq(x[:, :, :40])
print(result1.latents.shape, result1.codes.shape, result1.z.shape)
assert torch.allclose(result.z[:, :, :40], result1.z, atol=1e-8)
print("Success")
| {
"repo_id": "fishaudio/fish-speech",
"file_path": "fish_speech/models/dac/rvq.py",
"license": "Apache License 2.0",
"lines": 346,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
frappe/erpnext:erpnext/patches/v16_0/update_order_qty_and_requested_qty_based_on_mr_and_po.py | import frappe
from frappe.query_builder import DocType
from frappe.query_builder.functions import Sum
def execute():
PurchaseOrderItem = DocType("Purchase Order Item")
MaterialRequestItem = DocType("Material Request Item")
poi_query = (
frappe.qb.from_(PurchaseOrderItem)
.select(PurchaseOrderItem.sales_order_item, Sum(PurchaseOrderItem.stock_qty))
.where(PurchaseOrderItem.sales_order_item.isnotnull() & PurchaseOrderItem.docstatus == 1)
.groupby(PurchaseOrderItem.sales_order_item)
)
mri_query = (
frappe.qb.from_(MaterialRequestItem)
.select(MaterialRequestItem.sales_order_item, Sum(MaterialRequestItem.stock_qty))
.where(MaterialRequestItem.sales_order_item.isnotnull() & MaterialRequestItem.docstatus == 1)
.groupby(MaterialRequestItem.sales_order_item)
)
poi_data = poi_query.run()
mri_data = mri_query.run()
updates_against_poi = {data[0]: {"ordered_qty": data[1]} for data in poi_data}
updates_against_mri = {data[0]: {"requested_qty": data[1], "ordered_qty": 0} for data in mri_data}
frappe.db.auto_commit_on_many_writes = 1
frappe.db.bulk_update("Sales Order Item", updates_against_mri)
frappe.db.bulk_update("Sales Order Item", updates_against_poi)
frappe.db.auto_commit_on_many_writes = 0
| {
"repo_id": "frappe/erpnext",
"file_path": "erpnext/patches/v16_0/update_order_qty_and_requested_qty_based_on_mr_and_po.py",
"license": "GNU General Public License v3.0",
"lines": 26,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
frappe/erpnext:erpnext/accounts/doctype/payment_reference/payment_reference.py | # Copyright (c) 2025, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
# import frappe
from frappe.model.document import Document
class PaymentReference(Document):
# begin: auto-generated types
# This code is auto-generated. Do not modify anything in this block.
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from frappe.types import DF
amount: DF.Currency
description: DF.SmallText | None
due_date: DF.Date | None
parent: DF.Data
parentfield: DF.Data
parenttype: DF.Data
payment_schedule: DF.Link | None
payment_term: DF.Link | None
# end: auto-generated types
pass
| {
"repo_id": "frappe/erpnext",
"file_path": "erpnext/accounts/doctype/payment_reference/payment_reference.py",
"license": "GNU General Public License v3.0",
"lines": 20,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | license |
frappe/erpnext:erpnext/patches/v16_0/complete_onboarding_steps_for_older_sites.py | import frappe
from frappe.utils import date_diff, getdate, today
def execute():
steps = frappe.get_all(
"Onboarding Step",
filters={"is_complete": 0},
fields=["name", "action", "reference_document"],
)
if not steps:
return
company_creation = frappe.get_all("Company", fields=["creation"], order_by="creation asc", limit=1)
days_diff = date_diff(getdate(today()), getdate(company_creation[0].creation))
if days_diff > 15:
complete_all_onboarding_steps(steps)
else:
complete_onboarding_steps_if_record_exists(steps)
def complete_all_onboarding_steps(steps):
for step in steps:
frappe.db.set_value("Onboarding Step", step.name, "is_complete", 1, update_modified=False)
def complete_onboarding_steps_if_record_exists(steps):
for step in steps:
if (
step.action == "Create Entry"
and step.reference_document
and frappe.get_all(step.reference_document, limit=1)
):
frappe.db.set_value("Onboarding Step", step.name, "is_complete", 1, update_modified=False)
| {
"repo_id": "frappe/erpnext",
"file_path": "erpnext/patches/v16_0/complete_onboarding_steps_for_older_sites.py",
"license": "GNU General Public License v3.0",
"lines": 27,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
frappe/erpnext:erpnext/patches/v15_0/add_bank_transaction_as_journal_entry_reference.py | import frappe
def execute():
"""Append Bank Transaction in custom reference_type options."""
new_reference_type = "Bank Transaction"
property_setters = frappe.get_all(
"Property Setter",
filters={
"doc_type": "Journal Entry Account",
"field_name": "reference_type",
"property": "options",
},
pluck="name",
)
for property_setter in property_setters:
existing_value = frappe.db.get_value("Property Setter", property_setter, "value") or ""
raw_options = [option.strip() for option in existing_value.split("\n")]
# Preserve a single leading blank (for the empty select option) but drop spurious trailing blanks
options = raw_options[:1] + [o for o in raw_options[1:] if o]
if new_reference_type in options:
continue
options.append(new_reference_type)
frappe.db.set_value(
"Property Setter",
property_setter,
"value",
"\n".join(options),
)
| {
"repo_id": "frappe/erpnext",
"file_path": "erpnext/patches/v15_0/add_bank_transaction_as_journal_entry_reference.py",
"license": "GNU General Public License v3.0",
"lines": 27,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
frappe/erpnext:erpnext/patches/v16_0/add_portal_redirects.py | import frappe
def execute():
if frappe.db.exists("Portal Menu Item", {"route": "/addresses", "reference_doctype": "Address"}) and (
doc := frappe.get_doc("Portal Menu Item", {"route": "/addresses", "reference_doctype": "Address"})
):
doc.role = "Customer"
doc.save()
website_settings = frappe.get_single("Website Settings")
website_settings.append("route_redirects", {"source": "addresses", "target": "address/list"})
website_settings.append("route_redirects", {"source": "projects", "target": "project"})
website_settings.save()
| {
"repo_id": "frappe/erpnext",
"file_path": "erpnext/patches/v16_0/add_portal_redirects.py",
"license": "GNU General Public License v3.0",
"lines": 11,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
frappe/erpnext:erpnext/patches/v15_0/create_accounting_dimensions_in_advance_taxes_and_charges.py | from erpnext.accounts.doctype.accounting_dimension.accounting_dimension import (
create_accounting_dimensions_for_doctype,
)
def execute():
create_accounting_dimensions_for_doctype(doctype="Advance Taxes and Charges")
| {
"repo_id": "frappe/erpnext",
"file_path": "erpnext/patches/v15_0/create_accounting_dimensions_in_advance_taxes_and_charges.py",
"license": "GNU General Public License v3.0",
"lines": 5,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
frappe/erpnext:erpnext/patches/v15_0/delete_quotation_lost_record_detail.py | import frappe
from frappe.query_builder import DocType
def execute():
qlr = DocType("Quotation Lost Reason Detail")
quotation = DocType("Quotation")
sub_query = frappe.qb.from_(quotation).select(quotation.name)
query = frappe.qb.from_(qlr).delete().where(qlr.parent.notin(sub_query))
query.run()
| {
"repo_id": "frappe/erpnext",
"file_path": "erpnext/patches/v15_0/delete_quotation_lost_record_detail.py",
"license": "GNU General Public License v3.0",
"lines": 8,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
frappe/erpnext:erpnext/patches/v15_0/replace_http_with_https_in_sales_partner.py | import frappe
from frappe import qb
from pypika.functions import Replace
def execute():
sp = frappe.qb.DocType("Sales Partner")
qb.update(sp).set(sp.partner_website, Replace(sp.partner_website, "http://", "https://")).where(
sp.partner_website.rlike("^http://.*")
).run()
| {
"repo_id": "frappe/erpnext",
"file_path": "erpnext/patches/v15_0/replace_http_with_https_in_sales_partner.py",
"license": "GNU General Public License v3.0",
"lines": 8,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
frappe/erpnext:erpnext/patches/v16_0/add_accounting_dimensions_to_journal_template_accounts.py | from erpnext.accounts.doctype.accounting_dimension.accounting_dimension import (
get_dimensions,
make_dimension_in_accounting_doctypes,
)
def execute():
dimensions_and_defaults = get_dimensions()
if dimensions_and_defaults:
for dimension in dimensions_and_defaults[0]:
make_dimension_in_accounting_doctypes(dimension, ["Journal Entry Template Account"])
| {
"repo_id": "frappe/erpnext",
"file_path": "erpnext/patches/v16_0/add_accounting_dimensions_to_journal_template_accounts.py",
"license": "GNU General Public License v3.0",
"lines": 9,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
frappe/erpnext:erpnext/patches/v16_0/migrate_asset_type_checkboxes_to_select.py | import frappe
from frappe.query_builder import Case
def execute():
Asset = frappe.qb.DocType("Asset")
frappe.qb.update(Asset).set(
Asset.asset_type,
Case()
.when(Asset.is_existing_asset == 1, "Existing Asset")
.when(Asset.is_composite_asset == 1, "Composite Asset")
.when(Asset.is_composite_component == 1, "Composite Component")
.else_(""),
).run()
| {
"repo_id": "frappe/erpnext",
"file_path": "erpnext/patches/v16_0/migrate_asset_type_checkboxes_to_select.py",
"license": "GNU General Public License v3.0",
"lines": 12,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
frappe/erpnext:erpnext/patches/v16_0/migrate_transaction_deletion_task_flags_to_status.py | import frappe
def execute():
"""
Migrate Transaction Deletion Record boolean task flags to status Select fields.
Renames fields from old names to new names with _status suffix.
Maps: 0 -> "Pending", 1 -> "Completed"
"""
if not frappe.db.table_exists("tabTransaction Deletion Record"):
return
# Field mapping: old boolean field name -> new status field name
field_mapping = {
"delete_bin_data": "delete_bin_data_status",
"delete_leads_and_addresses": "delete_leads_and_addresses_status",
"reset_company_default_values": "reset_company_default_values_status",
"clear_notifications": "clear_notifications_status",
"initialize_doctypes_table": "initialize_doctypes_table_status",
"delete_transactions": "delete_transactions_status",
}
# Get all Transaction Deletion Records
records = frappe.db.get_all("Transaction Deletion Record", pluck="name")
for name in records or []:
updates = {}
for old_field, new_field in field_mapping.items():
# Read from old boolean field
current_value = frappe.db.get_value("Transaction Deletion Record", name, old_field)
# Map to new status and write to new field name
if current_value in (1, "1", True):
updates[new_field] = "Completed"
else:
# Handle 0, "0", False, None, empty string
updates[new_field] = "Pending"
# Update all fields at once
if updates:
frappe.db.set_value("Transaction Deletion Record", name, updates, update_modified=False)
| {
"repo_id": "frappe/erpnext",
"file_path": "erpnext/patches/v16_0/migrate_transaction_deletion_task_flags_to_status.py",
"license": "GNU General Public License v3.0",
"lines": 34,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
frappe/erpnext:erpnext/patches/v16_0/set_ordered_qty_in_quotation_item.py | import frappe
def execute():
data = frappe.get_all(
"Sales Order Item",
filters={"quotation_item": ["is", "set"], "docstatus": 1},
fields=["quotation_item", {"SUM": "stock_qty", "as": "ordered_qty"}],
group_by="quotation_item",
)
if data:
frappe.db.auto_commit_on_many_writes = 1
frappe.db.bulk_update(
"Quotation Item", {d.quotation_item: {"ordered_qty": d.ordered_qty} for d in data}
)
frappe.db.auto_commit_on_many_writes = 0
| {
"repo_id": "frappe/erpnext",
"file_path": "erpnext/patches/v16_0/set_ordered_qty_in_quotation_item.py",
"license": "GNU General Public License v3.0",
"lines": 14,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
frappe/erpnext:erpnext/patches/v16_0/update_company_custom_field_in_bin.py | import frappe
def execute():
frappe.reload_doc("stock", "doctype", "bin")
frappe.db.sql(
"""
UPDATE `tabBin` b
INNER JOIN `tabWarehouse` w ON b.warehouse = w.name
SET b.company = w.company
WHERE b.company IS NULL OR b.company = ''
"""
)
| {
"repo_id": "frappe/erpnext",
"file_path": "erpnext/patches/v16_0/update_company_custom_field_in_bin.py",
"license": "GNU General Public License v3.0",
"lines": 11,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | documentation |
frappe/erpnext:erpnext/setup/doctype/transaction_deletion_record_to_delete/transaction_deletion_record_to_delete.py | # Copyright (c) 2025, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
# import frappe
from frappe.model.document import Document
class TransactionDeletionRecordToDelete(Document):
# begin: auto-generated types
# This code is auto-generated. Do not modify anything in this block.
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from frappe.types import DF
child_doctypes: DF.SmallText | None
company_field: DF.Data | None
deleted: DF.Check
doctype_name: DF.Link | None
document_count: DF.Int
parent: DF.Data
parentfield: DF.Data
parenttype: DF.Data
# end: auto-generated types
pass
| {
"repo_id": "frappe/erpnext",
"file_path": "erpnext/setup/doctype/transaction_deletion_record_to_delete/transaction_deletion_record_to_delete.py",
"license": "GNU General Public License v3.0",
"lines": 20,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | license |
frappe/erpnext:erpnext/stock/report/negative_batch_report/negative_batch_report.py | # Copyright (c) 2026, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
import frappe
from frappe import _
from frappe.utils import add_to_date, flt, today
from erpnext.stock.report.stock_ledger.stock_ledger import execute as stock_ledger_execute
def execute(filters: dict | None = None):
"""Return columns and data for the report.
This is the main entry point for the report. It accepts the filters as a
dictionary and should return columns and data. It is called by the framework
every time the report is refreshed or a filter is updated.
"""
columns = get_columns()
data = get_data(filters)
return columns, data
def get_columns() -> list[dict]:
return [
{
"label": _("Posting Datetime"),
"fieldname": "posting_date",
"fieldtype": "Datetime",
"width": 160,
},
{
"label": _("Batch No"),
"fieldname": "batch_no",
"fieldtype": "Link",
"options": "Batch",
"width": 120,
},
{
"label": _("Item Code"),
"fieldname": "item_code",
"fieldtype": "Link",
"options": "Item",
"width": 150,
},
{
"label": _("Warehouse"),
"fieldname": "warehouse",
"fieldtype": "Link",
"options": "Warehouse",
"width": 160,
},
{
"label": _("Previous Qty"),
"fieldname": "previous_qty",
"fieldtype": "Float",
"width": 130,
},
{
"label": _("Transaction Qty"),
"fieldname": "actual_qty",
"fieldtype": "Float",
"width": 130,
},
{
"label": _("Qty After Transaction"),
"fieldname": "qty_after_transaction",
"fieldtype": "Float",
"width": 180,
},
{
"label": _("Document Type"),
"fieldname": "voucher_type",
"fieldtype": "Data",
"width": 130,
},
{
"label": _("Document No"),
"fieldname": "voucher_no",
"fieldtype": "Dynamic Link",
"options": "voucher_type",
"width": 130,
},
]
def get_data(filters) -> list[dict]:
batches = get_batches(filters)
companies = get_companies(filters)
batch_negative_data = []
flt_precision = frappe.db.get_default("float_precision") or 2
for company in companies:
for batch in batches:
_c, data = stock_ledger_execute(
frappe._dict(
{
"company": company,
"batch_no": batch,
"from_date": add_to_date(today(), years=-12),
"to_date": today(),
"segregate_serial_batch_bundle": 1,
"warehouse": filters.get("warehouse"),
"valuation_field_type": "Currency",
}
)
)
previous_qty = 0
for row in data:
if flt(row.get("qty_after_transaction"), flt_precision) < 0:
batch_negative_data.append(
{
"posting_date": row.get("date"),
"batch_no": row.get("batch_no"),
"item_code": row.get("item_code"),
"item_name": row.get("item_name"),
"warehouse": row.get("warehouse"),
"actual_qty": row.get("actual_qty"),
"qty_after_transaction": row.get("qty_after_transaction"),
"previous_qty": previous_qty,
"voucher_type": row.get("voucher_type"),
"voucher_no": row.get("voucher_no"),
}
)
previous_qty = row.get("qty_after_transaction")
return batch_negative_data
def get_batches(filters):
batch_filters = {}
if filters.get("item_code"):
batch_filters["item"] = filters["item_code"]
return frappe.get_all("Batch", pluck="name", filters=batch_filters)
def get_companies(filters):
company_filters = {}
if filters.get("company"):
company_filters["name"] = filters["company"]
return frappe.get_all("Company", pluck="name", filters=company_filters)
| {
"repo_id": "frappe/erpnext",
"file_path": "erpnext/stock/report/negative_batch_report/negative_batch_report.py",
"license": "GNU General Public License v3.0",
"lines": 125,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | license |
frappe/erpnext:erpnext/accounts/doctype/account_category/account_category.py | # Copyright (c) 2025, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
import json
import os
import frappe
from frappe import _
from frappe.model.document import Document, bulk_insert
DOCTYPE = "Account Category"
class AccountCategory(Document):
# begin: auto-generated types
# This code is auto-generated. Do not modify anything in this block.
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from frappe.types import DF
account_category_name: DF.Data
description: DF.SmallText | None
# end: auto-generated types
def after_rename(self, old_name, new_name, merge):
from erpnext.accounts.doctype.financial_report_template.financial_report_engine import (
FormulaFieldUpdater,
)
# get all template rows with this account category being used
row = frappe.qb.DocType("Financial Report Row")
rows = frappe._dict(
frappe.qb.from_(row)
.select(row.name, row.calculation_formula)
.where(row.calculation_formula.like(f"%{old_name}%"))
.run()
)
if not rows:
return
# Update formulas with new name
updater = FormulaFieldUpdater(
field_name="account_category",
value_mapping={old_name: new_name},
exclude_operators=["like", "not like"],
)
updated_formulas = updater.update_in_rows(rows)
if updated_formulas:
frappe.msgprint(
_("Updated {0} Financial Report Row(s) with new category name").format(len(updated_formulas))
)
def import_account_categories(template_path: str):
categories_file = os.path.join(template_path, "account_categories.json")
if not os.path.exists(categories_file):
return
with open(categories_file) as f:
categories = json.load(f, object_hook=frappe._dict)
create_account_categories(categories)
def create_account_categories(categories: list[dict]):
if not categories:
return
existing_categories = set(frappe.get_all(DOCTYPE, pluck="name"))
new_categories = []
for category_data in categories:
category_name = category_data.get("account_category_name")
if not category_name or category_name in existing_categories:
continue
doc = frappe.get_doc(
{
**category_data,
"doctype": DOCTYPE,
"name": category_name,
}
)
new_categories.append(doc)
existing_categories.add(category_name)
if new_categories:
bulk_insert(DOCTYPE, new_categories)
| {
"repo_id": "frappe/erpnext",
"file_path": "erpnext/accounts/doctype/account_category/account_category.py",
"license": "GNU General Public License v3.0",
"lines": 69,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | license |
frappe/erpnext:erpnext/accounts/doctype/account_category/test_account_category.py | # Copyright (c) 2025, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
# import frappe
from frappe.tests import IntegrationTestCase
# On IntegrationTestCase, the doctype test records and all
# link-field test record dependencies are recursively loaded
# Use these module variables to add/remove to/from that list
EXTRA_TEST_RECORD_DEPENDENCIES = [] # eg. ["User"]
IGNORE_TEST_RECORD_DEPENDENCIES = [] # eg. ["User"]
class IntegrationTestAccountCategory(IntegrationTestCase):
"""
Integration tests for AccountCategory.
Use this class for testing interactions between multiple components.
"""
pass
| {
"repo_id": "frappe/erpnext",
"file_path": "erpnext/accounts/doctype/account_category/test_account_category.py",
"license": "GNU General Public License v3.0",
"lines": 15,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
frappe/erpnext:erpnext/accounts/doctype/bank_transaction/test_bank_transaction_fees.py | # Copyright (c) 2025, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
import frappe
from frappe.tests import UnitTestCase
class TestBankTransactionFees(UnitTestCase):
def test_included_fee_throws(self):
"""A fee that's part of a withdrawal cannot be bigger than the
withdrawal itself."""
bt = frappe.new_doc("Bank Transaction")
bt.withdrawal = 100
bt.included_fee = 101
self.assertRaises(frappe.ValidationError, bt.validate_included_fee)
def test_included_fee_allows_equal(self):
"""A fee that's part of a withdrawal may be equal to the withdrawal
amount (only the fee was deducted from the account)."""
bt = frappe.new_doc("Bank Transaction")
bt.withdrawal = 100
bt.included_fee = 100
bt.validate_included_fee()
def test_included_fee_allows_for_deposit(self):
"""For deposits, a fee may be recorded separately without limiting the
received amount."""
bt = frappe.new_doc("Bank Transaction")
bt.deposit = 10
bt.included_fee = 999
bt.validate_included_fee()
def test_excluded_fee_noop_when_zero(self):
"""When there is no excluded fee to apply, the amounts should remain
unchanged."""
bt = frappe.new_doc("Bank Transaction")
bt.deposit = 100
bt.withdrawal = 0
bt.included_fee = 5
bt.excluded_fee = 0
bt.handle_excluded_fee()
self.assertEqual(bt.deposit, 100)
self.assertEqual(bt.withdrawal, 0)
self.assertEqual(bt.included_fee, 5)
self.assertEqual(bt.excluded_fee, 0)
def test_excluded_fee_throws_when_exceeds_deposit(self):
"""A fee deducted from an incoming payment must not exceed the incoming
amount (else it would be a withdrawal, a conversion we don't support)."""
bt = frappe.new_doc("Bank Transaction")
bt.deposit = 10
bt.excluded_fee = 11
self.assertRaises(frappe.ValidationError, bt.handle_excluded_fee)
def test_excluded_fee_throws_when_both_deposit_and_withdrawal_are_set(self):
"""A transaction must be either incoming or outgoing when applying a
fee, not both."""
bt = frappe.new_doc("Bank Transaction")
bt.deposit = 10
bt.withdrawal = 10
bt.excluded_fee = 1
self.assertRaises(frappe.ValidationError, bt.handle_excluded_fee)
def test_excluded_fee_deducts_from_deposit(self):
"""When a fee is deducted from an incoming payment, the net received
amount decreases and the fee is tracked as included."""
bt = frappe.new_doc("Bank Transaction")
bt.deposit = 100
bt.withdrawal = 0
bt.included_fee = 2
bt.excluded_fee = 5
bt.handle_excluded_fee()
self.assertEqual(bt.deposit, 95)
self.assertEqual(bt.withdrawal, 0)
self.assertEqual(bt.included_fee, 7)
self.assertEqual(bt.excluded_fee, 0)
def test_excluded_fee_can_reduce_an_incoming_payment_to_zero(self):
"""A separately-deducted fee may reduce an incoming payment to zero,
while still tracking the fee."""
bt = frappe.new_doc("Bank Transaction")
bt.deposit = 5
bt.withdrawal = 0
bt.included_fee = 0
bt.excluded_fee = 5
bt.handle_excluded_fee()
self.assertEqual(bt.deposit, 0)
self.assertEqual(bt.withdrawal, 0)
self.assertEqual(bt.included_fee, 5)
self.assertEqual(bt.excluded_fee, 0)
def test_excluded_fee_increases_outgoing_payment(self):
"""When a separately-deducted fee is provided for an outgoing payment,
the total money leaving increases and the fee is tracked."""
bt = frappe.new_doc("Bank Transaction")
bt.deposit = 0
bt.withdrawal = 100
bt.included_fee = 2
bt.excluded_fee = 5
bt.handle_excluded_fee()
self.assertEqual(bt.deposit, 0)
self.assertEqual(bt.withdrawal, 105)
self.assertEqual(bt.included_fee, 7)
self.assertEqual(bt.excluded_fee, 0)
def test_excluded_fee_turns_zero_amount_into_withdrawal(self):
"""If only an excluded fee is provided, it should be treated as an
outgoing payment and the fee is then tracked as included."""
bt = frappe.new_doc("Bank Transaction")
bt.deposit = 0
bt.withdrawal = 0
bt.included_fee = 0
bt.excluded_fee = 5
bt.handle_excluded_fee()
self.assertEqual(bt.deposit, 0)
self.assertEqual(bt.withdrawal, 5)
self.assertEqual(bt.included_fee, 5)
self.assertEqual(bt.excluded_fee, 0)
| {
"repo_id": "frappe/erpnext",
"file_path": "erpnext/accounts/doctype/bank_transaction/test_bank_transaction_fees.py",
"license": "GNU General Public License v3.0",
"lines": 106,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
frappe/erpnext:erpnext/accounts/doctype/financial_report_row/financial_report_row.py | # Copyright (c) 2025, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
# import frappe
from frappe.model.document import Document
class FinancialReportRow(Document):
# begin: auto-generated types
# This code is auto-generated. Do not modify anything in this block.
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from frappe.types import DF
advanced_filtering: DF.Check
balance_type: DF.Literal[
"", "Opening Balance", "Closing Balance", "Period Movement (Debits - Credits)"
]
bold_text: DF.Check
calculation_formula: DF.Code | None
color: DF.Color | None
data_source: DF.Literal[
"",
"Account Data",
"Calculated Amount",
"Custom API",
"Blank Line",
"Column Break",
"Section Break",
]
display_name: DF.Data | None
fieldtype: DF.Literal["", "Currency", "Float", "Int", "Percent"]
hidden_calculation: DF.Check
hide_when_empty: DF.Check
include_in_charts: DF.Check
indentation_level: DF.Int
italic_text: DF.Check
parent: DF.Data
parentfield: DF.Data
parenttype: DF.Data
reference_code: DF.Data | None
reverse_sign: DF.Check
# end: auto-generated types
pass
| {
"repo_id": "frappe/erpnext",
"file_path": "erpnext/accounts/doctype/financial_report_row/financial_report_row.py",
"license": "GNU General Public License v3.0",
"lines": 40,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | license |
frappe/erpnext:erpnext/accounts/doctype/financial_report_template/financial_report_engine.py | # Copyright (c) 2025, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
import ast
import json
import math
from abc import ABC, abstractmethod
from dataclasses import dataclass, field
from functools import reduce
from typing import Any, Union
import frappe
from frappe import _
from frappe.database.operator_map import OPERATOR_MAP
from frappe.query_builder import Case
from frappe.query_builder.functions import Sum
from frappe.utils import cstr, date_diff, flt, getdate
from pypika.terms import Bracket, LiteralValue
from erpnext import get_company_currency
from erpnext.accounts.doctype.accounting_dimension.accounting_dimension import (
get_accounting_dimensions,
get_dimension_with_children,
)
from erpnext.accounts.doctype.financial_report_row.financial_report_row import FinancialReportRow
from erpnext.accounts.doctype.financial_report_template.financial_report_template import (
FinancialReportTemplate,
)
from erpnext.accounts.doctype.financial_report_template.financial_report_validation import (
AccountFilterValidator,
CalculationFormulaValidator,
DependencyValidator,
)
from erpnext.accounts.report.financial_statements import (
get_columns,
get_cost_centers_with_children,
get_period_list,
)
from erpnext.accounts.utils import get_children, get_currency_precision
# ============================================================================
# DATA MODELS
# ============================================================================
@dataclass
class PeriodValue:
"""Represents financial data for a single period"""
period_key: str
opening: float = 0.0
closing: float = 0.0
movement: float = 0.0
def get_value(self, balance_type: str) -> float:
if balance_type == "Opening Balance":
return self.opening
elif balance_type == "Closing Balance":
return self.closing
elif balance_type == "Period Movement (Debits - Credits)":
return self.movement
return 0.0
def copy(self):
return PeriodValue(
period_key=self.period_key, opening=self.opening, closing=self.closing, movement=self.movement
)
@dataclass
class AccountData:
"""Account data across all periods"""
account: str # docname
account_name: str = "" # account name
account_number: str = ""
period_values: dict[str, PeriodValue] = field(default_factory=dict)
def add_period(self, period_value: PeriodValue) -> None:
self.period_values[period_value.period_key] = period_value
def get_period(self, period_key: str) -> PeriodValue | None:
return self.period_values.get(period_key)
def get_values_by_type(self, balance_type: str) -> list[float]:
return [pv.get_value(balance_type) for pv in self.period_values.values()]
def get_ordered_values(self, period_keys: list[str], balance_type: str) -> list[float]:
return [
self.period_values[key].get_value(balance_type) if key in self.period_values else 0.0
for key in period_keys
]
def has_periods(self) -> bool:
return len(self.period_values) > 0
def accumulate_values(self) -> None:
for period_value in self.period_values.values():
period_value.movement += period_value.opening
# closing is accumulated by default
def unaccumulate_values(self) -> None:
for period_value in self.period_values.values():
period_value.closing -= period_value.opening
# movement is unaccumulated by default
def copy(self):
copied = AccountData(
account=self.account,
account_name=self.account_name,
account_number=self.account_number,
)
copied.period_values = {k: v.copy() for k, v in self.period_values.items()}
return copied
def reverse_values(self) -> None:
for period_value in self.period_values.values():
period_value.opening = -period_value.opening if period_value.opening else 0.0
period_value.closing = -period_value.closing if period_value.closing else 0.0
period_value.movement = -period_value.movement if period_value.movement else 0.0
@dataclass
class RowData:
"""Represents a processed template row with calculated values"""
row: FinancialReportRow
values: list[float] = field(default_factory=list)
account_details: dict[str, AccountData] | None = None
is_detail_row: bool = False
parent_reference: str | None = None
@dataclass
class SegmentData:
"""Represents a segment with its rows and metadata"""
rows: list[RowData] = field(default_factory=list)
label: str = ""
index: int = 0
@property
def id(self) -> str:
return f"seg_{self.index}"
@dataclass
class SectionData:
"""Represents a horizontal section containing multiple column segments"""
segments: list[SegmentData]
label: str = ""
index: int = 0
@property
def id(self) -> str:
return f"section_{self.index}"
@dataclass
class ReportContext:
"""Context object that flows through the pipeline"""
template: FinancialReportTemplate
filters: dict[str, Any]
period_list: list[dict] = field(default_factory=list)
processed_rows: list[RowData] = field(default_factory=list)
column_segments: list[list[RowData]] = field(default_factory=list)
account_data: dict[str, AccountData] = field(default_factory=dict)
raw_data: dict[str, Any] = field(default_factory=dict)
show_detailed: bool = False
currency: str | None = None
def get_result(self) -> tuple[list[dict], list[dict]]:
"""Get final formatted columns and data"""
return (
self.raw_data.get("columns", []),
self.raw_data.get("formatted_data", []),
None,
self.raw_data.get("chart", {}),
)
@dataclass
class FormattingRule:
"""Rule for applying formatting to rows"""
condition: callable
format_properties: Union[dict[str, Any], callable] # noqa: UP007
def applies_to(self, row_data: RowData) -> bool:
return self.condition(row_data)
def get_properties(self, row_data: RowData) -> dict[str, Any]:
"""Get the format properties, handling both static and dynamic cases"""
if callable(self.format_properties):
return self.format_properties(row_data)
return self.format_properties
# ============================================================================
# REPORT ENGINE
# ============================================================================
class FinancialReportEngine:
def execute(self, filters: dict[str, Any]) -> tuple[list[dict], list[dict]]:
"""Execute the complete report generation"""
self._validate_filters(filters)
# Initialize context
context = self._initialize_context(filters)
# Execute
self.collect_financial_data(context)
self.process_calculations(context)
self.format_report_data(context)
self.apply_view_transformation(context)
# Chart
self.generate_chart_data(context)
return context.get_result()
def _validate_filters(self, filters: dict[str, Any]) -> None:
required_filters = ["report_template", "period_start_date", "period_end_date"]
for filter_key in required_filters:
if not filters.get(filter_key):
frappe.throw(_("Missing required filter: {0}").format(filter_key))
if filters.get("presentation_currency"):
frappe.msgprint(_("Currency filters are currently unsupported in Custom Financial Report."))
# Margin view is dependent on first row being an income account. Hence not supported.
# Way to implement this would be using calculated rows with formulas.
supported_views = ("Report", "Growth")
if (view := filters.get("selected_view")) and view not in supported_views:
frappe.msgprint(_("{0} view is currently unsupported in Custom Financial Report.").format(view))
def _initialize_context(self, filters: dict[str, Any]) -> ReportContext:
template_name = filters.get("report_template")
template = frappe.get_doc("Financial Report Template", template_name)
if not template:
frappe.throw(_("Financial Report Template {0} not found").format(template_name))
if template.disabled:
frappe.throw(_("Financial Report Template {0} is disabled").format(template_name))
# Generate periods
period_list = get_period_list(
filters.from_fiscal_year,
filters.to_fiscal_year,
filters.period_start_date,
filters.period_end_date,
filters.filter_based_on,
filters.periodicity,
company=filters.company,
)
# Support both old and new field names for backward compatibility
show_detailed = filters.get("show_account_details") == "Account Breakdown"
context = ReportContext(
template=template,
filters=filters,
period_list=period_list,
show_detailed=show_detailed,
# TODO: Enhance this to support report currencies
# after fixing which exchange rate to use for P&L
currency=get_company_currency(filters.company),
)
# Add period_keys to context
context.raw_data["period_keys"] = [p["key"] for p in period_list]
return context
def collect_financial_data(self, context: ReportContext) -> ReportContext:
collector = DataCollector(context.filters, context.period_list)
for row in context.template.rows:
if row.data_source == "Account Data":
collector.add_account_request(row)
all_data = collector.collect_all_data()
context.account_data = all_data["account_data"]
context.raw_data.update(all_data)
return context
def process_calculations(self, context: ReportContext) -> ReportContext:
processor = RowProcessor(context)
context.processed_rows = processor.process_all_rows()
return context
def format_report_data(self, context: ReportContext) -> ReportContext:
formatter = DataFormatter(context)
formatted_data, columns = formatter.format_for_display()
context.raw_data["formatted_data"] = formatted_data
context.raw_data["columns"] = columns
return context
def apply_view_transformation(self, context: ReportContext) -> ReportContext:
if context.filters.get("selected_view") == "Growth":
transformer = GrowthViewTransformer(context)
transformer.transform()
# Default is "Report" view - no transformation needed
return context
def generate_chart_data(self, context: ReportContext) -> dict[str, Any]:
generator = ChartDataGenerator(context)
generator.generate()
return context
# ============================================================================
# DATA COLLECTION
# ============================================================================
class DataCollector:
"""Data collector that fetches all data in optimized queries"""
def __init__(self, filters: dict[str, Any], periods: list[dict]):
self.filters = filters
self.periods = periods
self.company = filters.get("company")
self.account_requests = []
self.query_builder = FinancialQueryBuilder(filters, periods)
self.account_fields = {field.fieldname for field in frappe.get_meta("Account").fields}
def add_account_request(self, row):
self.account_requests.append(
{
"row": row,
"accounts": self._parse_account_filter(self.company, row),
"balance_type": row.balance_type,
"reference_code": row.reference_code,
"reverse_sign": row.reverse_sign,
}
)
def collect_all_data(self) -> dict[str, Any]:
if not self.account_requests:
return {"account_data": {}, "summary": {}, "account_details": {}}
# Get all accounts
all_accounts = []
for request in self.account_requests:
all_accounts.extend(request["accounts"])
if not all_accounts:
return {"account_data": {}, "summary": {}, "account_details": {}}
# Fetch balance data for all accounts
account_data = self.query_builder.fetch_account_balances(all_accounts)
# Calculate summaries for each request
summary = {}
account_details = {}
period_keys = [p["key"] for p in self.periods]
for request in self.account_requests:
ref_code = request["reference_code"]
if not ref_code:
continue
balance_type = request["balance_type"]
accounts = request["accounts"]
total_values = [0.0] * len(self.periods)
request_account_details = {}
for account in accounts:
account_name = account.name
if account_name not in account_data:
continue
account_obj: AccountData = account_data[account_name].copy()
if request["reverse_sign"]:
account_obj.reverse_values()
account_values = account_obj.get_ordered_values(period_keys, balance_type)
# Add to totals
for i, value in enumerate(account_values):
total_values[i] += value
# Store for detailed view
request_account_details[account_name] = account_obj
summary[ref_code] = total_values
account_details[ref_code] = request_account_details
return {"account_data": account_data, "summary": summary, "account_details": account_details}
@staticmethod
def _parse_account_filter(company, report_row) -> list[dict]:
"""
Find accounts matching filter criteria.
Example:
- Input: '["account_type", "=", "Cash"]'
- Output: [{"name": "Cash - COMP", "account_name": "Cash", "account_number": "1001"}]
"""
filter_parser = FilterExpressionParser()
account = frappe.qb.DocType("Account")
query = (
frappe.qb.from_(account)
.select(account.name, account.account_name, account.account_number)
.where(account.disabled == 0)
.where(account.is_group == 0)
)
if company:
query = query.where(account.company == company)
where_condition = filter_parser.build_condition(report_row, account)
if where_condition is None:
return []
query = query.where(where_condition)
query = query.orderby(account.name)
return query.run(as_dict=True)
@staticmethod
def get_filtered_accounts(company: str, account_rows: list) -> list[str]:
filter_parser = FilterExpressionParser()
account = frappe.qb.DocType("Account")
query = (
frappe.qb.from_(account)
.select(account.name)
.distinct()
.where(account.disabled == 0)
.where(account.is_group == 0)
.orderby(account.name)
)
if company:
query = query.where(account.company == company)
if conditions := filter_parser.build_conditions(account_rows, account):
query = query.where(conditions)
return query.run(pluck=True)
class FinancialQueryBuilder:
"""Centralized query builder for financial data"""
def __init__(self, filters: dict[str, Any], periods: list[dict]):
self.filters = filters
self.periods = periods
self.company = filters.get("company")
self.account_meta = {} # {name: {account_name, account_number}}
def fetch_account_balances(self, accounts: list[dict]) -> dict[str, AccountData]:
"""
Fetch account balances for all periods with optimization.
Steps: get opening balances → fetch GL entries → calculate running totals
- accounts: list of accounts with details
```
{
"name": "Cash - COMP",
"account_name": "Cash",
"account_number": "1001",
}
```
Returns:
dict: {account: AccountData}
"""
account_names = list({acc.name for acc in accounts})
# NOTE: do not change accounts list as it is used in caller function
self.account_meta = {
acc.name: {"account_name": acc.account_name, "account_number": acc.account_number}
for acc in accounts
}
balances_data = self._get_opening_balances(account_names)
gl_data = self._get_gl_movements(account_names)
self._calculate_running_balances(balances_data, gl_data)
self._handle_balance_accumulation(balances_data)
return balances_data
def _get_opening_balances(self, accounts: list[str]) -> dict[str, dict[str, dict[str, float]]]:
"""
Return opening balances for *all accounts* defaulting to zero.
"""
if frappe.get_single_value("Accounts Settings", "ignore_account_closing_balance"):
return self._get_opening_balances_from_gl(accounts)
first_period_start = getdate(self.periods[0]["from_date"])
last_closing_voucher = frappe.db.get_all(
"Period Closing Voucher",
filters={
"docstatus": 1,
"company": self.company,
"period_end_date": ("<", first_period_start),
},
fields=["period_end_date", "name"],
order_by="period_end_date desc",
limit=1,
)
if last_closing_voucher:
closing_voucher = last_closing_voucher[0]
closing_data = self._get_closing_balances(accounts, closing_voucher.name)
if sum(closing_data.values()) != 0.0:
return self._rebase_closing_balances(closing_data, closing_voucher.period_end_date)
return self._get_opening_balances_from_gl(accounts)
def _get_closing_balances(self, account_names: list[str], closing_voucher: str) -> dict[str, float]:
closing_balances = {account: 0.0 for account in account_names}
acb_table = frappe.qb.DocType("Account Closing Balance")
query = (
frappe.qb.from_(acb_table)
.select(
acb_table.account,
(acb_table.debit - acb_table.credit).as_("balance"),
)
.where(acb_table.company == self.company)
.where(acb_table.account.isin(account_names))
.where(acb_table.period_closing_voucher == closing_voucher)
)
query = self._apply_standard_filters(query, acb_table, "Account Closing Balance")
results = self._execute_with_permissions(query, "Account Closing Balance")
for row in results:
closing_balances[row["account"]] = row["balance"]
return closing_balances
def _rebase_closing_balances(
self, closing_data: dict[str, float], closing_date: str
) -> dict[str, dict[str, dict[str, float]]]:
balances_data = {}
first_period_key = self.periods[0]["key"]
report_start = getdate(self.periods[0]["from_date"])
closing_end = getdate(closing_date)
has_gap = date_diff(report_start, closing_end) > 1
gap_movements = {}
if has_gap:
gap_movements = self._get_gap_movements(list(closing_data.keys()), closing_date, report_start)
for account, closing_balance in closing_data.items():
gap_movement = gap_movements.get(account, 0.0)
opening_balance = closing_balance + gap_movement
account_data = AccountData(account=account, **self._get_account_meta(account))
account_data.add_period(PeriodValue(first_period_key, opening_balance, 0, 0))
balances_data[account] = account_data
return balances_data
def _get_opening_balances_from_gl(self, accounts: list[str]) -> dict:
# Simulate zero closing balances
zero_closing_balances = {account: 0.0 for account in accounts}
# Use a very early date
earliest_date = "1900-01-01"
return self._rebase_closing_balances(zero_closing_balances, earliest_date)
def _get_gap_movements(self, account_names: list[str], from_date: str, to_date: str) -> dict[str, float]:
gl_table = frappe.qb.DocType("GL Entry")
query = (
frappe.qb.from_(gl_table)
.select(gl_table.account, Sum(gl_table.debit - gl_table.credit).as_("movement"))
.where(gl_table.company == self.company)
.where(gl_table.is_cancelled == 0)
.where(gl_table.account.isin(account_names))
.where(gl_table.posting_date > from_date)
.where(gl_table.posting_date < to_date)
.groupby(gl_table.account)
)
query = self._apply_standard_filters(query, gl_table)
results = self._execute_with_permissions(query, "GL Entry")
return {row["account"]: row["movement"] or 0.0 for row in results}
def _get_gl_movements(self, account_names: list[str]) -> list[dict]:
gl_table = frappe.qb.DocType("GL Entry")
query = (
frappe.qb.from_(gl_table)
.select(gl_table.account)
.where(gl_table.company == self.company)
.where(gl_table.is_cancelled == 0)
.where(gl_table.account.isin(account_names))
.where(gl_table.posting_date >= self.periods[0]["from_date"])
.groupby(gl_table.account)
)
if not frappe.get_single_value("Accounts Settings", "ignore_is_opening_check_for_reporting"):
query = query.where(gl_table.is_opening == "No")
# Add period-specific columns
for period in self.periods:
period_condition = (
Case()
.when(
(gl_table.posting_date >= period["from_date"])
& (gl_table.posting_date <= period["to_date"]),
gl_table.debit - gl_table.credit,
)
.else_(0)
)
query = query.select(Sum(period_condition).as_(period["key"]))
query = self._apply_standard_filters(query, gl_table)
return self._execute_with_permissions(query, "GL Entry")
def _calculate_running_balances(self, balances_data: dict, gl_data: list[dict]) -> dict:
gl_dict = {row["account"]: row for row in gl_data}
accounts = set(balances_data.keys()) | set(gl_dict.keys())
for account in accounts:
if account not in balances_data:
balances_data[account] = AccountData(account=account, **self._get_account_meta(account))
account_data: AccountData = balances_data[account]
gl_movement = gl_dict.get(account, {})
if account_data.has_periods():
first_period = account_data.get_period(self.periods[0]["key"])
current_balance = first_period.get_value("Opening Balance") if first_period else 0.0
else:
current_balance = 0.0
for period in self.periods:
period_key = period["key"]
movement = gl_movement.get(period_key, 0.0)
closing_balance = current_balance + movement
account_data.add_period(PeriodValue(period_key, current_balance, closing_balance, movement))
current_balance = closing_balance
def _handle_balance_accumulation(self, balances_data):
for account_data in balances_data.values():
account_data: AccountData
accumulated_values = self.filters.get("accumulated_values")
if accumulated_values is None:
# respect user setting if not in filters
# closing = accumulated
# movement = unaccumulated
continue
# for legacy reports
elif accumulated_values:
account_data.accumulate_values()
else:
account_data.unaccumulate_values()
def _apply_standard_filters(self, query, table, doctype: str = "GL Entry"):
if self.filters.get("ignore_closing_entries"):
if doctype == "GL Entry":
query = query.where(table.voucher_type != "Period Closing Voucher")
else:
query = query.where(table.is_period_closing_voucher_entry == 0)
if self.filters.get("project"):
projects = self.filters.get("project")
if isinstance(projects, str):
projects = [projects]
query = query.where(table.project.isin(projects))
if self.filters.get("cost_center"):
self.filters.cost_center = get_cost_centers_with_children(self.filters.cost_center)
query = query.where(table.cost_center.isin(self.filters.cost_center))
finance_book = self.filters.get("finance_book")
if self.filters.get("include_default_book_entries"):
default_book = frappe.get_cached_value("Company", self.filters.company, "default_finance_book")
if finance_book and default_book and cstr(finance_book) != cstr(default_book):
frappe.throw(
_("To use a different finance book, please uncheck 'Include Default FB Entries'")
)
query = query.where(
(table.finance_book.isin([cstr(finance_book), cstr(default_book), ""]))
| (table.finance_book.isnull())
)
else:
query = query.where(
(table.finance_book.isin([cstr(finance_book), ""])) | (table.finance_book.isnull())
)
dimensions = get_accounting_dimensions(as_list=False)
for dimension in dimensions:
if self.filters.get(dimension.fieldname):
if frappe.get_cached_value("DocType", dimension.document_type, "is_tree"):
self.filters[dimension.fieldname] = get_dimension_with_children(
dimension.document_type, self.filters.get(dimension.fieldname)
)
query = query.where(table[dimension.fieldname].isin(self.filters.get(dimension.fieldname)))
return query
def _execute_with_permissions(self, query, doctype):
from frappe.desk.reportview import build_match_conditions
user_conditions = build_match_conditions(doctype)
if user_conditions:
query = query.where(Bracket(LiteralValue(user_conditions)))
return query.run(as_dict=True)
def _get_account_meta(self, account: str) -> dict[str, Any]:
return self.account_meta.get(account, {})
class FilterExpressionParser:
"""Direct filter expression to SQL condition builder"""
def __init__(self):
self.validator = AccountFilterValidator()
def build_conditions(self, report_rows, table):
conditions = []
for row in report_rows or []:
condition = self.build_condition(row, table)
if condition is not None:
conditions.append(condition)
# ensure brackets in or condition
return reduce(lambda a, b: (a) | (b), conditions)
def build_condition(self, report_row, table):
"""
Build SQL condition directly from filter formula.
Supports:
1. Simple condition: ["field", "operator", "value"]
Example: ["account_type", "=", "Income"]
2. Complex logical conditions:
{"and": [condition1, condition2, ...]} # All conditions must be true
{"or": [condition1, condition2, ...]} # Any condition can be true
Example:
{
"and": [
["account_type", "=", "Income"],
{"or": [
["category", "=", "Direct Income"],
["category", "=", "Indirect Income"]
]}
]
}
Returns:
SQL condition object or None if invalid
"""
filter_formula = report_row.calculation_formula
if not filter_formula:
return None
errors = self.validator.validate(report_row)
if not errors.is_valid:
error_messages = [str(issue) for issue in errors.issues]
frappe.log_error(f"Filter validation errors found:\n{'<br><br>'.join(error_messages)}")
return None
try:
parsed = ast.literal_eval(filter_formula)
return self._build_from_parsed(parsed, table)
except (ValueError, SyntaxError) as e:
frappe.log_error(f"Invalid filter formula syntax: {filter_formula} - {e}")
return None
except Exception as e:
frappe.log_error(f"Failed to build condition from formula: {filter_formula} - {e}")
return None
def _build_from_parsed(self, parsed, table):
if isinstance(parsed, dict):
return self._build_logical_condition(parsed, table)
if isinstance(parsed, list):
return self._build_simple_condition(parsed, table)
return None
def _build_simple_condition(self, condition_list: list[str, str, str | float], table):
field_name, operator, value = condition_list
if value is None:
return None
field = getattr(table, field_name, None)
operator_fn = OPERATOR_MAP.get(operator.casefold())
if "like" in operator.casefold() and "%" not in value:
value = f"%{value}%"
return operator_fn(field, value)
def _build_logical_condition(self, condition_dict: dict, table):
"""Build SQL condition from logical {"and/or": [...]} format"""
logical_op = next(iter(condition_dict.keys())).lower()
sub_conditions = condition_dict.get(logical_op)
# recursive
built_conditions = []
for sub_condition in sub_conditions:
condition = self._build_from_parsed(sub_condition, table)
if condition is not None:
built_conditions.append(condition)
if not built_conditions:
return None
if len(built_conditions) == 1:
return built_conditions[0]
# combine
if logical_op == "and":
return reduce(lambda a, b: a & b, built_conditions)
else: # logical_op == "or"
return reduce(lambda a, b: a | b, built_conditions)
class FormulaFieldExtractor:
"""Extract field values from filter formulas without SQL execution"""
def __init__(self, field_name: str, exclude_operators: list[str] | None = None):
"""
Initialize field extractor.
Args:
field_name: The field to extract values for (e.g., "account_category")
exclude_operators: List of operators to exclude (e.g., ["like"])
"""
self.field_name = field_name
self.exclude_operators = [op.lower() for op in (exclude_operators or [])]
def extract_from_rows(self, rows: list) -> set:
values = set()
for row in rows:
if not hasattr(row, "calculation_formula") or not row.calculation_formula:
continue
try:
parsed = ast.literal_eval(row.calculation_formula)
self._extract_recursive(parsed, values)
except (ValueError, SyntaxError):
continue # Skip rows with invalid formulas
return values
def _extract_recursive(self, parsed, values: set):
if isinstance(parsed, list) and len(parsed) == 3:
# Simple condition: ["field", "operator", "value"]
field, operator, value = parsed
if field == self.field_name and operator.lower() not in self.exclude_operators:
if isinstance(value, str):
values.add(value)
elif isinstance(value, list):
# Handle "in" operator with list of values
values.update(v for v in value if isinstance(v, str))
elif isinstance(parsed, dict):
# Logical condition: {"and/or": [...]}
for sub_conditions in parsed.values():
if isinstance(sub_conditions, list):
for sub_condition in sub_conditions:
self._extract_recursive(sub_condition, values)
class FormulaFieldUpdater:
"""Update field values in filter formulas"""
def __init__(
self, field_name: str, value_mapping: dict[str, str], exclude_operators: list[str] | None = None
):
"""
Initialize field updater.
Args:
field_name: The field to update values for (e.g., "account_category")
value_mapping: Mapping of old values to new values (e.g., {"Old Name": "New Name"})
exclude_operators: List of operators to exclude from updates (e.g., ["like", "not like"])
"""
self.field_name = field_name
self.value_mapping = value_mapping
self.exclude_operators = [op.lower() for op in (exclude_operators or [])]
def update_in_rows(self, rows: list) -> dict[str, dict[str, str]]:
updated_rows = {}
for row_name, formula in rows.items():
if not formula:
continue
try:
parsed = ast.literal_eval(formula)
updated = self._update_recursive(parsed)
if updated != parsed:
updated_formula = json.dumps(updated)
updated_rows[row_name] = {"calculation_formula": updated_formula}
except (ValueError, SyntaxError):
continue # Skip rows with invalid formulas
if updated_rows:
frappe.db.bulk_update("Financial Report Row", updated_rows, update_modified=False)
return updated_rows
def _update_recursive(self, parsed):
if isinstance(parsed, list) and len(parsed) == 3:
# Simple condition: ["field", "operator", "value"]
field, operator, value = parsed
if field == self.field_name and operator.lower() not in self.exclude_operators:
updated_value = self._update_value(value)
return [field, operator, updated_value]
return parsed
elif isinstance(parsed, dict):
# Logical condition: {"and/or": [...]}
updated_dict = {}
for key, sub_conditions in parsed.items():
updated_conditions = [
self._update_recursive(sub_condition) for sub_condition in sub_conditions
]
updated_dict[key] = updated_conditions
return updated_dict
return parsed
def _update_value(self, value):
if isinstance(value, str):
return self.value_mapping.get(value, value)
elif isinstance(value, list):
# Handle "in" operator with list of values
return [self.value_mapping.get(v, v) if isinstance(v, str) else v for v in value]
return value
@frappe.whitelist()
def get_filtered_accounts(company: str, account_rows: str | list):
frappe.has_permission("Financial Report Template", ptype="read", throw=True)
if isinstance(account_rows, str):
account_rows = json.loads(account_rows, object_hook=frappe._dict)
return DataCollector.get_filtered_accounts(company, account_rows)
@frappe.whitelist()
def get_children_accounts(
doctype: str,
parent: str,
company: str,
filtered_accounts: list[str] | str | None = None,
missed: bool = False,
is_root: bool = False,
include_disabled: bool = False,
):
"""
Get children accounts based on the provided filters to view in tree.
Args:
parent: The parent account to get children for.
company: The company to filter accounts by.
account_rows: Template rows with `Data Source` == `Account Data`.
missed:
- If True, only missed by filters accounts will be included.
- If False, only filtered accounts will be included.
is_root: Whether the parent is a root account.
include_disabled: Whether to include disabled accounts.
Example:
```python
[
{
value: "Current Liabilities - WP",
expandable: 1,
root_type: "Liability",
account_currency: "USD",
parent: "Source of Funds (Liabilities) - WP",
},
{
value: "Non-Current Liabilities - WP",
expandable: 1,
root_type: "Liability",
account_currency: "USD",
parent: "Source of Funds (Liabilities) - WP",
},
]
```
"""
frappe.has_permission(doctype, ptype="read", throw=True)
children_accounts = get_children(
doctype, parent, company, is_root=is_root, include_disabled=include_disabled
)
if not children_accounts:
return []
if isinstance(filtered_accounts, str):
filtered_accounts = frappe.parse_json(filtered_accounts)
if not filtered_accounts:
return children_accounts if missed else []
valid_accounts = []
for account in children_accounts:
if account.expandable:
valid_accounts.append(account)
continue
is_in_filtered = account.value in filtered_accounts
if (missed and not is_in_filtered) or (not missed and is_in_filtered):
valid_accounts.append(account)
return valid_accounts
# ============================================================================
# PROCESS CALCULATIONS
# ============================================================================
class RowProcessor:
"""
Processes individual rows of the financial report template.
Handles dependency resolution and calculation order.
"""
def __init__(self, context: ReportContext):
self.context = context
self.period_list = context.period_list
self.row_values = {} # For formula calculations
self.dependency_resolver = DependencyResolver(context.template)
def process_all_rows(self) -> list[RowData]:
processing_order = self.dependency_resolver.get_processing_order()
processed_rows = []
# Get account data from context
account_summary = self.context.raw_data.get("summary", {})
account_details = self.context.raw_data.get("account_details", {})
for row in processing_order:
row_data = self._process_single_row(row, account_summary, account_details)
processed_rows.append(row_data)
processed_rows.sort(key=lambda x: getattr(x.row, "idx", 0) or 0)
return processed_rows
def _process_single_row(self, row, account_summary: dict, account_details: dict) -> RowData:
if row.data_source == "Account Data":
return self._process_account_row(row, account_summary, account_details)
elif row.data_source == "Custom API":
return self._process_api_row(row)
elif row.data_source == "Calculated Amount":
return self._process_formula_row(row)
elif row.data_source == "Blank Line":
return self._process_blank_row(row)
elif row.data_source == "Column Break":
return self._process_column_break_row(row)
elif row.data_source == "Section Break":
return self._process_section_break_row(row)
else:
return RowData(row=row, values=[0.0] * len(self.period_list))
def _process_account_row(self, row, account_summary: dict, account_details: dict) -> RowData:
ref_code = row.reference_code
values = account_summary.get(ref_code, [0.0] * len(self.period_list))
details = account_details.get(ref_code, {})
if ref_code:
self.row_values[ref_code] = values
return RowData(row=row, values=values, account_details=details)
def _process_api_row(self, row) -> RowData:
api_path = row.calculation_formula
# TODO
try:
values = frappe.call(api_path, filters=self.context.filters, periods=self.period_list, row=row)
if row.reverse_sign:
values = [-1 * v for v in values]
# TODO: add support for server script
# use form_dict to pass input in server script
except Exception as e:
frappe.log_error(f"Custom API Error: {api_path} - {e!s}")
values = [0.0] * len(self.period_list)
if row.reference_code:
self.row_values[row.reference_code] = values
return RowData(row=row, values=values)
def _process_formula_row(self, row) -> RowData:
calculator = FormulaCalculator(self.row_values, self.period_list)
values = calculator.evaluate_formula(row)
if row.reference_code:
self.row_values[row.reference_code] = values
return RowData(row=row, values=values)
def _process_blank_row(self, row) -> RowData:
return RowData(row=row, values=[""] * len(self.period_list))
def _process_column_break_row(self, row) -> RowData:
return RowData(row=row, values=[])
def _process_section_break_row(self, row) -> RowData:
return RowData(row=row, values=[])
class DependencyResolver:
"""Optimized dependency resolver with better circular reference detection"""
def __init__(self, template):
self.template: FinancialReportTemplate = template
self.rows = template.rows
self.row_map = {row.reference_code: row for row in self.rows if row.reference_code}
self.dependencies = {}
self._validate_dependencies()
def _validate_dependencies(self):
"""Validate dependencies using the new validation framework"""
validator = DependencyValidator(self.template)
result = validator.validate()
result.notify_user()
self.dependencies = validator.dependencies
def get_processing_order(self) -> list:
# rows by type
api_rows = []
account_rows = []
formula_rows = []
other_rows = []
for row in self.rows:
if row.data_source == "Custom API":
api_rows.append(row)
elif row.data_source == "Account Data":
account_rows.append(row)
elif row.data_source == "Calculated Amount":
formula_rows.append(row)
else:
other_rows.append(row)
ordered_rows = api_rows + account_rows
# sort formula rows
if formula_rows:
ordered_formula_rows = self._topological_sort(formula_rows)
ordered_rows.extend(ordered_formula_rows)
ordered_rows.extend(other_rows)
return ordered_rows
def _topological_sort(self, formula_rows: list) -> list:
formula_row_map = {row.reference_code: row for row in formula_rows if row.reference_code}
adj_list = {code: [] for code in formula_row_map}
in_degree = {code: 0 for code in formula_row_map}
# Calculate in-degree
for code in formula_row_map:
deps = self.dependencies.get(code, [])
for dep in deps:
if dep in formula_row_map: # Only consider dependencies within formula rows
adj_list[dep].append(code)
in_degree[code] += 1
# Topological sort
queue = [code for code, degree in in_degree.items() if degree == 0]
result = []
while queue:
current = queue.pop(0)
result.append(formula_row_map[current])
# Reduce in-degree
for neighbor in adj_list[current]:
in_degree[neighbor] -= 1
if in_degree[neighbor] == 0:
queue.append(neighbor)
# Add any remaining formula rows
result_set = set(result)
for row in formula_rows:
if row not in result_set:
result.append(row)
return result
class FormulaCalculator:
"""Enhanced formula calculator with better error handling"""
def __init__(self, row_data: dict[str, list[float]], period_list: list[dict]):
self.row_data = row_data
self.period_list = period_list
self.precision = get_currency_precision()
self.validator = CalculationFormulaValidator(set(row_data.keys()))
self.math_functions = {
"abs": abs,
"round": round,
"min": min,
"max": max,
"sum": sum,
"sqrt": math.sqrt,
"pow": math.pow,
"ceil": math.ceil,
"floor": math.floor,
}
def evaluate_formula(self, report_row: dict[str, Any]) -> list[float]:
validation_result = self.validator.validate(report_row)
formula = report_row.calculation_formula
negation_factor = -1 if report_row.reverse_sign else 1
if validation_result.issues:
# TODO: Throw?
messages = "<br><br>".join(issue.message for issue in validation_result.issues)
frappe.log_error(f"Formula validation errors found:\n{messages}")
return [0.0] * len(self.period_list)
results = []
for i in range(len(self.period_list)):
result = self._evaluate_for_period(formula, i, negation_factor)
results.append(result)
return results
def _evaluate_for_period(self, formula: str, period_index: int, negation_factor: int) -> float:
# TODO: consistent error handling
try:
context = self._build_context(period_index)
result = frappe.safe_eval(formula, context)
return flt(result * negation_factor, self.precision)
except ZeroDivisionError:
frappe.log_error(f"Division by zero in formula: {formula}")
return 0.0
except Exception as e:
frappe.log_error(f"Formula evaluation error: {formula} - {e!s}")
return 0.0
def _build_context(self, period_index: int) -> dict[str, Any]:
context = {}
# row values
for code, values in self.row_data.items():
if period_index < len(values):
context[code] = values[period_index] or 0.0
else:
context[code] = 0.0
# math functions
context.update(self.math_functions)
return context
# ============================================================================
# DATA FORMATTING
# ============================================================================
class DataFormatter:
def __init__(self, context: ReportContext):
self.context = context
self.formatting_engine = FormattingEngine()
self.organizer = SegmentOrganizer(context.processed_rows)
if self.organizer.is_single_segment:
self.formatter = SingleSegmentFormatter(context, self.formatting_engine)
else:
self.formatter = MultiSegmentFormatter(context, self.formatting_engine)
if context.show_detailed:
self._expand_segments_with_details()
def format_for_display(self) -> tuple[list[dict], list[dict]]:
formatted_data = self._format_rows()
columns = self._generate_columns()
return formatted_data, columns
def _format_rows(self) -> list[dict]:
formatted_data = []
for section in self.organizer.sections:
for row_index in range(self.organizer.max_rows(section)):
formatted_row = self.formatter.format_row(section.segments, row_index)
if formatted_row: # Always include rows that were formatted
# Add metadata
formatted_row["_segment_info"] = {
"total_segments": len(section.segments),
"period_keys": [p["key"] for p in self.context.period_list], # Add period keys
}
formatted_data.append(formatted_row)
return formatted_data
def _generate_columns(self) -> list[dict]:
base_columns = get_columns(
self.context.filters.get("periodicity"),
self.context.period_list,
self.context.filters.get("accumulated_values") in (1, None),
self.context.filters.get("company"),
)
return self.formatter.get_columns(self.organizer.section_with_max_segments.segments, base_columns)
def _expand_segments_with_details(self):
for section in self.organizer.sections:
for segment in section.segments:
expanded_rows = []
for row_data in segment.rows:
expanded_rows.append(row_data)
if row_data.account_details:
detail_rows = DetailRowBuilder(self.context.filters, row_data).build()
expanded_rows.extend(detail_rows)
segment.rows = expanded_rows
class FormattingEngine:
"""Manages formatting rules and application"""
def __init__(self):
self.initialize_rules()
def initialize_rules(self):
self.rules = [
FormattingRule(
condition=lambda rd: getattr(rd.row, "bold_text", False), format_properties={"bold": True}
),
FormattingRule(
condition=lambda rd: getattr(rd.row, "italic_text", False), format_properties={"italic": True}
),
FormattingRule(
condition=lambda rd: rd.is_detail_row, format_properties={"is_detail": True, "prefix": "• "}
),
FormattingRule(
condition=lambda rd: getattr(rd.row, "warn_if_negative", False),
format_properties={"warn_if_negative": True},
),
FormattingRule(
condition=lambda rd: getattr(rd.row, "data_source", "") == "Blank Line",
format_properties={"is_blank_line": True},
),
FormattingRule(
condition=lambda rd: getattr(rd.row, "fieldtype", ""),
format_properties=lambda rd: {"fieldtype": getattr(rd.row, "fieldtype", "").strip()},
),
FormattingRule(
condition=lambda rd: getattr(rd.row, "color", ""),
format_properties=lambda rd: {"color": getattr(rd.row, "color", "").strip()},
),
FormattingRule(
condition=lambda rd: getattr(rd.row, "data_source", "") == "Account Data",
format_properties=lambda rd: {
"account_filters": getattr(rd.row, "calculation_formula", "").strip()
},
),
]
def get_formatting(self, row_data: RowData) -> dict[str, Any]:
formatting = {}
for rule in self.rules:
if rule.applies_to(row_data):
properties = rule.get_properties(row_data)
formatting.update(properties)
return formatting
class SegmentOrganizer:
"""Handles segment organization by `Column Break`, `Section Break` and metadata extraction"""
def __init__(self, processed_rows: list[RowData]):
self.sections = self._organize_into_sections(processed_rows)
# ensure same segment length across sections
max_segments = self.max_segments
for section in self.sections:
if len(section.segments) >= max_segments:
continue
# Pad with empty segments
empty_segments = [SegmentData(index=i) for i in range(len(section.segments), max_segments)]
section.segments.extend(empty_segments)
def _organize_into_sections(self, rows: list[RowData]) -> list[SectionData]:
sections = []
current_section_rows = []
section_index = 0
section_label = ""
for row_data in rows:
if not self._should_show_row(row_data):
continue
if row_data.row.data_source == "Section Break":
# Process current section if we have rows
if current_section_rows:
section_segments = self._organize_into_segments(current_section_rows, section_label)
sections.append(
SectionData(segments=section_segments, label=section_label, index=section_index)
)
section_index += 1
current_section_rows = []
# Label for the next section
section_label = getattr(row_data.row, "display_name", "") or ""
else:
current_section_rows.append(row_data)
# Add final section
if current_section_rows or not sections:
section_segments = self._organize_into_segments(current_section_rows, section_label)
sections.append(SectionData(segments=section_segments, label=section_label, index=section_index))
return sections
def _organize_into_segments(self, rows: list[RowData], section_label: str) -> list[SegmentData]:
segments = []
current_rows = []
segment_index = 0
segment_label = ""
section_header = None
if section_label:
section_header = RowData(
row=frappe._dict(
{
"data_source": "Blank Line",
"display_name": section_label,
"bold_text": True,
}
)
)
for row_data in rows:
if row_data.row.data_source == "Column Break":
# Save current segment
if section_header and current_rows:
current_rows.insert(0, section_header)
section_header = RowData(row=frappe._dict({"data_source": "Blank Line"}))
if current_rows:
segments.append(SegmentData(rows=current_rows, label=segment_label, index=segment_index))
segment_index += 1
current_rows = []
# Label for the next segment
segment_label = getattr(row_data.row, "display_name", "") or ""
else:
current_rows.append(row_data)
# Add final segment
if section_header and current_rows:
current_rows.insert(0, section_header)
if current_rows or not segments:
segments.append(SegmentData(rows=current_rows, label=segment_label, index=segment_index))
return segments
@property
def is_single_segment(self) -> bool:
return self.max_segments == 1
def max_rows(self, section: SectionData) -> int:
return max(len(seg.rows) for seg in section.segments) if section.segments else 0
@property
def max_segments(self) -> bool:
return max(len(s.segments) for s in self.sections)
@property
def section_with_max_segments(self) -> SectionData:
return max(self.sections, key=lambda s: len(s.segments))
def _should_show_row(self, row_data: RowData) -> bool:
row = row_data.row
# Always show blank lines
if row.data_source == "Blank Line":
return True
if getattr(row, "hidden_calculation", False):
return False
if getattr(row, "hide_when_empty", False):
significant_values = [
val for val in row_data.values if isinstance(val, int | float) and abs(flt(val)) > 0.01
]
return len(significant_values) > 0
return True
class RowFormatterBase(ABC):
def __init__(self, context: ReportContext, formatting_engine: FormattingEngine):
self.context = context
self.period_list = context.period_list
self.formatting_engine = formatting_engine
@abstractmethod
def format_row(self, segments: list[SegmentData], row_index: int) -> dict[str, Any]:
pass
@abstractmethod
def get_columns(self, segments: list[SegmentData], base_columns: list[dict]) -> list[dict]:
pass
def _get_values(self, row_data: RowData) -> dict[str, Any]:
def _get_row_data(key: str, default: Any = "") -> Any:
return getattr(row_data.row, key, default) or default
def _get_filter_value(key: str, default: Any = "") -> Any:
return getattr(self.context.filters, key, default) or default
child_accounts = []
if row_data.account_details:
child_accounts = list(row_data.account_details.keys())
display_name = _get_row_data("display_name", "")
values = {
"account": _get_row_data("account", "") or display_name,
"account_name": display_name,
"acc_name": _get_row_data("account_name", ""),
"acc_number": _get_row_data("account_number", ""),
"child_accounts": child_accounts,
"currency": self.context.currency or "",
"indent": _get_row_data("indentation_level", 0),
"period_start_date": _get_filter_value("period_start_date", ""),
"period_end_date": _get_filter_value("period_end_date", ""),
"total": 0,
}
for i, period in enumerate(self.period_list):
period_value = self._get_period_value(row_data, i)
values[period["key"]] = period_value
if self.context.filters.get("accumulated_values") == 0:
values["total"] += flt(period_value)
# avg for percent
if self.context.filters.get("accumulated_values") == 0 and row_data.row.fieldtype == "Percent":
values["total"] = values["total"] / len(self.period_list)
return values
def _get_period_value(self, row_data: RowData, period_index: int) -> Any:
if period_index < len(row_data.values):
return row_data.values[period_index]
return ""
class SingleSegmentFormatter(RowFormatterBase):
def format_row(self, segments: list[SegmentData], row_index: int) -> dict[str, Any]:
if not segments or row_index >= len(segments[0].rows):
return {}
row_data = segments[0].rows[row_index]
formatted = self._get_values(row_data)
formatting = self.formatting_engine.get_formatting(row_data)
formatted.update(formatting)
return formatted
def get_columns(self, segments: list[SegmentData], base_columns: list[dict]) -> list[dict]:
for col in base_columns:
if col["fieldname"] == "account":
col["align"] = "left"
return base_columns
class MultiSegmentFormatter(RowFormatterBase):
def format_row(self, segments: list[SegmentData], row_index: int) -> dict[str, Any]:
formatted = {"segment_values": {}}
for segment in segments:
if row_index < len(segment.rows):
row_data = segment.rows[row_index]
self._add_segment_data(formatted, row_data, segment)
else:
self._add_empty_segment(formatted, segment)
return formatted
def get_columns(self, segments: list[SegmentData], base_columns: list[dict]) -> list[dict]:
columns = []
# TODO: Refactor
for segment in segments:
for col in base_columns:
new_col = col.copy()
new_col["fieldname"] = f"{segment.id}_{col['fieldname']}"
if col["fieldname"] == "account":
new_col["label"] = segment.label or f"Account (Segment {segment.index + 1})"
new_col["align"] = "left"
if segment.label and col["fieldname"] in [p["key"] for p in self.period_list]:
new_col["label"] = f"{segment.label} - {col['label']}"
columns.append(new_col)
return columns
def _add_segment_data(self, formatted: dict, row_data: RowData, segment: SegmentData):
segment_values = self._get_values(row_data)
for key, value in segment_values.items():
formatted[f"{segment.id}_{key}"] = value
formatting = self.formatting_engine.get_formatting(row_data)
segment_values.update(formatting)
formatted["segment_values"][segment.id] = segment_values
def _add_empty_segment(self, formatted: dict, segment: SegmentData):
formatted[f"account_{segment.id}"] = ""
for period in self.period_list:
formatted[f"{segment.id}_{period['key']}"] = ""
formatted["segment_values"][segment.id] = {"is_blank_line": True}
class DetailRowBuilder:
"""Builds detail rows for account breakdown"""
def __init__(self, filters: dict, parent_row_data: RowData):
self.filters = filters
self.parent_row_data = parent_row_data
def build(self) -> list[RowData]:
if not self.parent_row_data.account_details:
return []
detail_rows = []
parent_row = self.parent_row_data.row
for account_data in self.parent_row_data.account_details.values():
detail_row = self._create_detail_row_object(account_data, parent_row)
balance_type = getattr(parent_row, "balance_type", "Closing Balance")
values = account_data.get_values_by_type(balance_type)
detail_row_data = RowData(
row=detail_row,
values=values,
is_detail_row=True,
parent_reference=parent_row.reference_code,
)
detail_rows.append(detail_row_data)
return detail_rows
def _create_detail_row_object(self, account_data: AccountData, parent_row):
acc_name = account_data.account_name or ""
acc_number = account_data.account_number or ""
display_name = f"{_(acc_number)} - {_(acc_name)}" if acc_number else _(acc_name)
return type(
"DetailRow",
(),
{
"account": account_data.account,
"display_name": display_name,
"account_name": acc_name,
"account_number": acc_number,
"data_source": "Account Detail",
"indentation_level": getattr(parent_row, "indentation_level", 0) + 1,
"fieldtype": getattr(parent_row, "fieldtype", None),
"bold_text": False,
"italic_text": True,
"reverse_sign": getattr(parent_row, "reverse_sign", False),
"warn_if_negative": getattr(parent_row, "warn_if_negative", False),
"hide_when_empty": getattr(parent_row, "hide_when_empty", False),
"hidden_calculation": False,
},
)()
class ChartDataGenerator:
def __init__(self, context: ReportContext):
self.context = context
self.processed_rows = context.processed_rows
self.period_list = context.period_list
self.filters = context.filters
self.currency = context.currency
def generate(self) -> dict[str, Any]:
chart_rows = [
row
for row in self.processed_rows
if getattr(row.row, "include_in_charts", False)
and row.row.data_source not in ["Blank Line", "Column Break", "Section Break"]
]
if not chart_rows:
return {}
labels = [p.get("label") for p in self.period_list]
datasets = []
for row_data in chart_rows:
display_name = getattr(row_data.row, "display_name", "")
values = []
for i, _period in enumerate(self.period_list):
if i < len(row_data.values):
value = row_data.values[i]
values.append(flt(value, 2))
else:
values.append(0.0)
# only non-zero values
if any(v != 0 for v in values):
datasets.append({"name": display_name, "values": values})
if not datasets:
return {}
# chart config
if not self.filters.get("accumulated_values") or len(labels) <= 1:
chart_type = "bar"
else:
chart_type = "line"
self.context.raw_data["chart"] = {
"data": {"labels": labels, "datasets": datasets},
"type": chart_type,
"fieldtype": "Currency",
"options": "currency",
"currency": self.currency,
}
class GrowthViewTransformer:
def __init__(self, context: ReportContext):
self.context = context
self.formatted_rows = context.raw_data.get("formatted_data", [])
self.period_list = context.period_list
def transform(self) -> None:
for row_data in self.formatted_rows:
if row_data.get("is_blank_line"):
continue
transformed_values = {}
for i in range(len(self.period_list)):
current_period = self.period_list[i]["key"]
current_value = row_data[current_period]
previous_value = row_data[self.period_list[i - 1]["key"]] if i != 0 else 0
if i == 0:
transformed_values[current_period] = current_value
else:
growth_percent = self._calculate_growth(previous_value, current_value)
transformed_values[current_period] = growth_percent
row_data.update(transformed_values)
def _calculate_growth(self, previous_value: float, current_value: float) -> float | None:
if current_value is None:
return None
if previous_value == 0 and current_value > 0:
return 100.0
elif previous_value == 0 and current_value <= 0:
return 0.0
else:
return flt(((current_value - previous_value) / abs(previous_value)) * 100, 2)
| {
"repo_id": "frappe/erpnext",
"file_path": "erpnext/accounts/doctype/financial_report_template/financial_report_engine.py",
"license": "GNU General Public License v3.0",
"lines": 1410,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | license |
frappe/erpnext:erpnext/accounts/doctype/financial_report_template/financial_report_template.py | # Copyright (c) 2025, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
import os
import shutil
import frappe
from frappe.model.document import Document
from erpnext.accounts.doctype.account_category.account_category import import_account_categories
from erpnext.accounts.doctype.financial_report_template.financial_report_validation import TemplateValidator
class FinancialReportTemplate(Document):
# begin: auto-generated types
# This code is auto-generated. Do not modify anything in this block.
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from frappe.types import DF
from erpnext.accounts.doctype.financial_report_row.financial_report_row import FinancialReportRow
disabled: DF.Check
module: DF.Link | None
report_type: DF.Literal[
"", "Profit and Loss Statement", "Balance Sheet", "Cash Flow", "Custom Financial Statement"
]
rows: DF.Table[FinancialReportRow]
template_name: DF.Data
# end: auto-generated types
def validate(self):
validator = TemplateValidator(self)
result = validator.validate()
result.notify_user()
def on_update(self):
self._export_template()
def on_trash(self):
self._delete_template()
def _export_template(self):
from frappe.modules.utils import export_module_json
if not self.module:
return
export_module_json(self, True, self.module)
self._export_account_categories()
def _delete_template(self):
if not self.module or not frappe.conf.developer_mode:
return
module_path = frappe.get_module_path(self.module)
dir_path = os.path.join(module_path, "financial_report_template", frappe.scrub(self.name))
shutil.rmtree(dir_path, ignore_errors=True)
def _export_account_categories(self):
import json
from erpnext.accounts.doctype.financial_report_template.financial_report_engine import (
FormulaFieldExtractor,
)
if not self.module or not frappe.conf.developer_mode or frappe.flags.in_import:
return
# Extract category from rows
extractor = FormulaFieldExtractor(
field_name="account_category", exclude_operators=["like", "not like"]
)
account_data_rows = [row for row in self.rows if row.data_source == "Account Data"]
category_names = extractor.extract_from_rows(account_data_rows)
if not category_names:
return
# Get path
module_path = frappe.get_module_path(self.module)
categories_file = os.path.join(module_path, "financial_report_template", "account_categories.json")
# Load existing categories
existing_categories = {}
if os.path.exists(categories_file):
try:
with open(categories_file) as f:
existing_data = json.load(f)
existing_categories = {cat["account_category_name"]: cat for cat in existing_data}
except (json.JSONDecodeError, KeyError):
pass # Create new file
# Fetch categories from database
if category_names:
db_categories = frappe.get_all(
"Account Category",
filters={"account_category_name": ["in", list(category_names)]},
fields=["account_category_name", "description"],
)
for cat in db_categories:
existing_categories[cat["account_category_name"]] = cat
# Sort by category name
sorted_categories = sorted(existing_categories.values(), key=lambda x: x["account_category_name"])
# Write to file
os.makedirs(os.path.dirname(categories_file), exist_ok=True)
with open(categories_file, "w") as f:
json.dump(sorted_categories, f, indent=2)
def sync_financial_report_templates(chart_of_accounts=None, existing_company=None):
from erpnext.accounts.doctype.account.chart_of_accounts.chart_of_accounts import get_chart
# If COA is being created for an existing company,
# skip syncing templates as they are likely already present
if existing_company:
return
# Allow regional templates to completely override ERPNext
# templates based on the chart of accounts selected
disable_default_financial_report_template = False
if chart_of_accounts:
coa = get_chart(chart_of_accounts)
if coa.get("disable_default_financial_report_template", False):
disable_default_financial_report_template = True
installed_apps = frappe.get_installed_apps()
for app in installed_apps:
if disable_default_financial_report_template and app == "erpnext":
continue
_sync_templates_for(app)
def _sync_templates_for(app_name):
templates = []
for module_name in frappe.local.app_modules.get(app_name) or []:
module_path = frappe.get_module_path(module_name)
template_path = os.path.join(module_path, "financial_report_template")
if not os.path.isdir(template_path):
continue
import_account_categories(template_path)
for template_dir in os.listdir(template_path):
json_file = os.path.join(template_path, template_dir, f"{template_dir}.json")
if os.path.isfile(json_file):
templates.append(json_file)
if not templates:
return
# ensure files are not exported
frappe.flags.in_import = True
for template_path in templates:
with open(template_path) as f:
template_data = frappe._dict(frappe.parse_json(f.read()))
template_name = template_data.get("name")
if not frappe.db.exists("Financial Report Template", template_name):
doc = frappe.get_doc(template_data)
doc.flags.ignore_mandatory = True
doc.flags.ignore_permissions = True
doc.flags.ignore_validate = True
doc.insert()
frappe.flags.in_import = False
| {
"repo_id": "frappe/erpnext",
"file_path": "erpnext/accounts/doctype/financial_report_template/financial_report_template.py",
"license": "GNU General Public License v3.0",
"lines": 130,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | license |
frappe/erpnext:erpnext/accounts/doctype/financial_report_template/financial_report_validation.py | # Copyright (c) 2025, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
import json
import re
from abc import ABC, abstractmethod
from dataclasses import dataclass, field
from typing import Any
import frappe
from frappe import _
from frappe.database.operator_map import OPERATOR_MAP
@dataclass
class ValidationIssue:
"""Represents a single validation issue"""
message: str
row_idx: int | None = None
field: str | None = None
details: dict[str, Any] = None
def __post_init__(self):
if self.details is None:
self.details = {}
def __str__(self) -> str:
prefix = f"Row {self.row_idx}: " if self.row_idx else ""
field_info = f"[{self.field}] " if self.field else ""
message = f"{prefix}{field_info}{self.message}"
return _(message)
@dataclass
class ValidationResult:
issues: list[ValidationIssue] = field(default_factory=list)
warnings: list[ValidationIssue] = field(default_factory=list)
@property
def is_valid(self) -> bool:
return len(self.issues) == 0
@property
def has_warnings(self) -> bool:
return len(self.warnings) > 0
@property
def error_count(self) -> int:
return len(self.issues)
@property
def warning_count(self) -> int:
return len(self.warnings)
def merge(self, other: "ValidationResult") -> "ValidationResult":
self.issues.extend(other.issues)
self.warnings.extend(other.warnings)
return self
def add_error(self, issue: ValidationIssue) -> None:
"""Add a critical error that prevents functionality"""
self.issues.append(issue)
def add_warning(self, issue: ValidationIssue) -> None:
"""Add a warning for recommendatory validation"""
self.warnings.append(issue)
def notify_user(self) -> None:
warnings = "<br><br>".join(str(w) for w in self.warnings)
errors = "<br><br>".join(str(e) for e in self.issues)
if warnings:
frappe.msgprint(warnings, title=_("Warnings"), indicator="orange")
if errors:
frappe.throw(errors, title=_("Errors"))
class TemplateValidator:
"""Main validator that orchestrates all validations"""
def __init__(self, template):
self.template = template
self.validators = [
TemplateStructureValidator(),
DependencyValidator(template),
]
self.formula_validator = FormulaValidator(template)
def validate(self) -> ValidationResult:
result = ValidationResult([])
# Run template-level validators
for validator in self.validators:
result.merge(validator.validate(self.template))
# Run row-level validations
account_fields = {field.fieldname for field in frappe.get_meta("Account").fields}
for row in self.template.rows:
result.merge(self.formula_validator.validate(row, account_fields))
return result
class Validator(ABC):
@abstractmethod
def validate(self, context: Any) -> ValidationResult:
pass
class TemplateStructureValidator(Validator):
def validate(self, template) -> ValidationResult:
result = ValidationResult()
result.merge(self._validate_reference_codes(template))
result.merge(self._validate_required_fields(template))
return result
def _validate_reference_codes(self, template) -> ValidationResult:
result = ValidationResult()
used_codes = set()
for row in template.rows:
if not row.reference_code:
continue
ref_code = row.reference_code.strip()
# Check format
if not re.match(r"^[A-Za-z][A-Za-z0-9_-]*$", ref_code):
result.add_error(
ValidationIssue(
message=f"Invalid line reference format: '{ref_code}'. Must start with letter and contain only letters, numbers, underscores, and hyphens",
row_idx=row.idx,
)
)
# Check uniqueness
if ref_code in used_codes:
result.add_error(
ValidationIssue(
message=f"Duplicate line reference: '{ref_code}'",
row_idx=row.idx,
)
)
used_codes.add(ref_code)
return result
def _validate_required_fields(self, template) -> ValidationResult:
result = ValidationResult()
for row in template.rows:
# Balance type required
if row.data_source == "Account Data" and not row.balance_type:
result.add_error(
ValidationIssue(
message="Balance Type is required for Account Data",
row_idx=row.idx,
)
)
# Calculation formula required
if row.data_source in ["Account Data", "Calculated Amount", "Custom API"]:
if not row.calculation_formula:
result.add_error(
ValidationIssue(
message=f"Formula is required for {row.data_source}",
row_idx=row.idx,
)
)
return result
class DependencyValidator(Validator):
def __init__(self, template):
self.template = template
self.dependencies = self._build_dependency_graph()
def validate(self, context=None) -> ValidationResult:
result = ValidationResult()
result.merge(self._validate_circular_dependencies())
result.merge(self._validate_missing_dependencies())
return result
def _build_dependency_graph(self) -> dict[str, list[str]]:
graph = {}
available_codes = {row.reference_code for row in self.template.rows if row.reference_code}
for row in self.template.rows:
if row.reference_code and row.data_source == "Calculated Amount" and row.calculation_formula:
deps = extract_reference_codes_from_formula(row.calculation_formula, list(available_codes))
if deps:
graph[row.reference_code] = deps
return graph
def _validate_circular_dependencies(self) -> ValidationResult:
"""
Efficient cycle detection using DFS (Depth-First Search) with three-color algorithm:
- WHITE (0): unvisited node
- GRAY (1): currently being processed (on recursion stack)
- BLACK (2): fully processed
Example cycle detection:
A → B → C → A (cycle detected when A is GRAY and visited again)
"""
result = ValidationResult()
WHITE, GRAY, BLACK = 0, 1, 2
colors = {node: WHITE for node in self.dependencies}
def dfs(node, path):
if node not in colors:
return # External dependency
if colors[node] == GRAY:
# Found cycle
cycle_start = path.index(node)
cycle = [*path[cycle_start:], node]
result.add_error(
ValidationIssue(
message=f"Circular dependency detected: {' → '.join(cycle)}",
)
)
return
if colors[node] == BLACK:
return # Already processed
colors[node] = GRAY
path.append(node)
for neighbor in self.dependencies.get(node, []):
dfs(neighbor, path.copy())
colors[node] = BLACK
for node in self.dependencies:
if colors[node] == WHITE:
dfs(node, [])
return result
def _validate_missing_dependencies(self) -> ValidationResult:
available = {row.reference_code for row in self.template.rows if row.reference_code}
result = ValidationResult()
for ref_code, deps in self.dependencies.items():
undefined = [d for d in deps if d not in available]
if undefined:
row_idx = self._get_row_idx(ref_code)
result.add_error(
ValidationIssue(
message=f"Line References undefined in Formula: {', '.join(undefined)}",
row_idx=row_idx,
)
)
return result
def _get_row_idx(self, reference_code: str) -> int | None:
for row in self.template.rows:
if row.reference_code == reference_code:
return row.idx
return None
class CalculationFormulaValidator(Validator):
"""Validates calculation formulas used in Calculated Amount rows"""
def __init__(self, reference_codes: set[str]):
self.reference_codes = reference_codes
def validate(self, row) -> ValidationResult:
"""Validate calculation formula for a single row"""
result = ValidationResult()
if row.data_source != "Calculated Amount":
return result
if not row.calculation_formula:
result.add_error(
ValidationIssue(
message="Formula is required for Calculated Amount",
row_idx=row.idx,
field="Formula",
)
)
return result
formula = self._preprocess_formula(row.calculation_formula)
row.calculation_formula = formula
# Check parentheses
if not self._are_parentheses_balanced(formula):
result.add_error(
ValidationIssue(
message="Formula has unbalanced parentheses",
row_idx=row.idx,
)
)
return result
# Check self-reference
available_codes = list(self.reference_codes)
refs = extract_reference_codes_from_formula(formula, available_codes)
if row.reference_code and row.reference_code in refs:
result.add_error(
ValidationIssue(
message=f"Formula references itself ('{row.reference_code}')",
row_idx=row.idx,
)
)
# Check undefined references
undefined = set(refs) - set(available_codes)
if undefined:
result.add_error(
ValidationIssue(
message=f"Formula references undefined codes: {', '.join(undefined)}",
row_idx=row.idx,
)
)
# Try to evaluate with dummy values
eval_error = self._test_formula_evaluation(formula, available_codes)
if eval_error:
result.add_error(
ValidationIssue(
message=f"Formula evaluation error: {eval_error}",
row_idx=row.idx,
)
)
return result
def _preprocess_formula(self, formula: str) -> str:
if not formula or not isinstance(formula, str):
return ""
return formula.strip()
@staticmethod
def _are_parentheses_balanced(formula: str) -> bool:
return formula.count("(") == formula.count(")")
def _test_formula_evaluation(self, formula: str, available_codes: list[str]) -> str | None:
try:
context = {code: 1.0 for code in available_codes}
context.update(
{
"abs": abs,
"round": round,
"min": min,
"max": max,
"sum": sum,
"sqrt": lambda x: x**0.5,
"pow": pow,
"ceil": lambda x: int(x) + (1 if x % 1 else 0),
"floor": lambda x: int(x),
}
)
result = frappe.safe_eval(formula, eval_globals=None, eval_locals=context)
if not isinstance(result, (int, float)): # noqa: UP038
return f"Formula must return a numeric value, got {type(result).__name__}"
return None
except Exception as e:
return str(e)
class AccountFilterValidator(Validator):
"""Validates account filter expressions used in Account Data rows"""
def __init__(self, account_fields: set | None = None):
self.account_fields = account_fields or set(frappe.get_meta("Account")._valid_columns)
def validate(self, row) -> ValidationResult:
result = ValidationResult()
if row.data_source != "Account Data":
return result
if not row.calculation_formula:
result.add_error(
ValidationIssue(
message="Account filter is required for Account Data",
row_idx=row.idx,
field="Formula",
)
)
return result
try:
filter_config = json.loads(row.calculation_formula)
error = self._validate_filter_structure(filter_config, self.account_fields)
if error:
result.add_error(
ValidationIssue(
message=error,
row_idx=row.idx,
field="Account Filter",
)
)
except json.JSONDecodeError as e:
result.add_error(
ValidationIssue(
message=f"Invalid JSON format: {e!s}",
row_idx=row.idx,
field="Account Filter",
)
)
return result
def _validate_filter_structure(self, filter_config, account_fields: set) -> str | None:
# simple condition: [field, operator, value]
if isinstance(filter_config, list):
if len(filter_config) != 3:
return "Filter must be [field, operator, value]"
field, operator, value = filter_config
if not isinstance(field, str) or not isinstance(operator, str):
return "Field and operator must be strings"
if field not in account_fields:
return f"Field '{field}' is not a valid account field"
if operator.casefold() not in OPERATOR_MAP:
return f"Invalid operator '{operator}'"
if operator in ["in", "not in"] and not isinstance(value, list):
return f"Operator '{operator}' requires a list value"
# logical condition: {"and": [condition1, condition2]}
elif isinstance(filter_config, dict):
if len(filter_config) != 1:
return "Logical condition must have exactly one operator"
op = next(iter(filter_config.keys())).lower()
if op not in ["and", "or"]:
return "Logical operators must be 'and' or 'or'"
conditions = filter_config[next(iter(filter_config.keys()))]
if not isinstance(conditions, list) or len(conditions) < 1:
return "Logical conditions need at least 1 sub-condition"
# recursive
for condition in conditions:
error = self._validate_filter_structure(condition, account_fields)
if error:
return error
else:
return "Filter must be a list or dict"
return None
class FormulaValidator(Validator):
def __init__(self, template):
self.template = template
reference_codes = {row.reference_code for row in template.rows if row.reference_code}
self.calculation_validator = CalculationFormulaValidator(reference_codes)
self.account_filter_validator = AccountFilterValidator()
def validate(self, row, account_fields: set) -> ValidationResult:
result = ValidationResult()
if not row.calculation_formula:
return result
if row.data_source == "Calculated Amount":
return self.calculation_validator.validate(row)
elif row.data_source == "Account Data":
# Update account fields if provided
if account_fields:
self.account_filter_validator.account_fields = account_fields
return self.account_filter_validator.validate(row)
elif row.data_source == "Custom API":
result.merge(self._validate_custom_api(row))
return result
def _validate_custom_api(self, row) -> ValidationResult:
result = ValidationResult()
api_path = row.calculation_formula
if "." not in api_path:
result.add_error(
ValidationIssue(
message="Custom API path should be in format: app.module.method",
row_idx=row.idx,
field="Formula",
)
)
return result
# Method exists?
try:
module_path, method_name = api_path.rsplit(".", 1)
module = frappe.get_module(module_path)
if not hasattr(module, method_name):
result.add_error(
ValidationIssue(
message=f"Method '{method_name}' not found in module '{module_path}' (might be environment-specific)",
row_idx=row.idx,
field="Formula",
)
)
except Exception as e:
result.add_error(
ValidationIssue(
message=f"Could not validate API path: {e!s}",
row_idx=row.idx,
field="Formula",
)
)
return result
def extract_reference_codes_from_formula(formula: str, available_codes: list[str]) -> list[str]:
found_codes = []
for code in available_codes:
# Match complete words only to avoid partial matches
pattern = r"\b" + re.escape(code) + r"\b"
if re.search(pattern, formula):
found_codes.append(code)
return found_codes
| {
"repo_id": "frappe/erpnext",
"file_path": "erpnext/accounts/doctype/financial_report_template/financial_report_validation.py",
"license": "GNU General Public License v3.0",
"lines": 423,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | license |
frappe/erpnext:erpnext/accounts/doctype/financial_report_template/test_financial_report_engine.py | # Copyright (c) 2025, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
import frappe
from frappe.utils import flt
from erpnext.accounts.doctype.financial_report_template.financial_report_engine import (
DependencyResolver,
FilterExpressionParser,
FinancialQueryBuilder,
FormulaCalculator,
)
from erpnext.accounts.doctype.financial_report_template.test_financial_report_template import (
FinancialReportTemplateTestCase,
)
from erpnext.accounts.doctype.journal_entry.test_journal_entry import make_journal_entry
from erpnext.accounts.utils import get_currency_precision, get_fiscal_year
# On IntegrationTestCase, the doctype test records and all
# link-field test record dependencies are recursively loaded
# Use these module variables to add/remove to/from that list
EXTRA_TEST_RECORD_DEPENDENCIES = [] # eg. ["User"]
IGNORE_TEST_RECORD_DEPENDENCIES = [] # eg. ["User"]
class TestDependencyResolver(FinancialReportTemplateTestCase):
"""Test cases for DependencyResolver class"""
# 1. BASIC FUNCTIONALITY
def test_resolve_basic_processing_order(self):
resolver = DependencyResolver(self.test_template)
order = resolver.get_processing_order()
# Should process account rows before formula rows
account_indices = [i for i, row in enumerate(order) if row.data_source == "Account Data"]
formula_indices = [i for i, row in enumerate(order) if row.data_source == "Calculated Amount"]
self.assertTrue(all(ai < fi for ai in account_indices for fi in formula_indices))
def test_resolve_simple_dependency(self):
# Create test rows with dependencies
test_rows = [
{
"reference_code": "A001",
"display_name": "Base Account",
"data_source": "Account Data",
"balance_type": "Closing Balance",
"calculation_formula": '["account_type", "=", "Income"]',
},
{
"reference_code": "B001",
"display_name": "Calculated Row",
"data_source": "Calculated Amount",
"calculation_formula": "A001 * 2",
},
]
test_template = FinancialReportTemplateTestCase.create_test_template_with_rows(test_rows)
resolver = DependencyResolver(test_template)
# Check dependencies were correctly identified
self.assertIn("B001", resolver.dependencies)
self.assertEqual(resolver.dependencies["B001"], ["A001"])
# Check processing order
order = resolver.get_processing_order()
a001_index = next(i for i, row in enumerate(order) if row.reference_code == "A001")
b001_index = next(i for i, row in enumerate(order) if row.reference_code == "B001")
self.assertLess(a001_index, b001_index, "A001 should be processed before B001")
# 2. DEPENDENCY PATTERNS
def test_resolve_multiple_dependencies(self):
test_rows = [
{
"reference_code": "INC001",
"display_name": "Income",
"data_source": "Account Data",
"balance_type": "Closing Balance",
"calculation_formula": '["root_type", "=", "Income"]',
},
{
"reference_code": "EXP001",
"display_name": "Expenses",
"data_source": "Account Data",
"balance_type": "Closing Balance",
"calculation_formula": '["root_type", "=", "Expense"]',
},
{
"reference_code": "GROSS001",
"display_name": "Gross Profit",
"data_source": "Calculated Amount",
"calculation_formula": "INC001 - EXP001",
},
{
"reference_code": "MARGIN001",
"display_name": "Profit Margin",
"data_source": "Calculated Amount",
"calculation_formula": "GROSS001 / INC001 * 100",
},
]
test_template = FinancialReportTemplateTestCase.create_test_template_with_rows(test_rows)
resolver = DependencyResolver(test_template)
# Check dependencies
self.assertEqual(set(resolver.dependencies["GROSS001"]), {"INC001", "EXP001"})
self.assertEqual(set(resolver.dependencies["MARGIN001"]), {"GROSS001", "INC001"})
# Check processing order
order = resolver.get_processing_order()
positions = {row.reference_code: i for i, row in enumerate(order) if row.reference_code}
# Account rows should come before formula rows
self.assertLess(positions["INC001"], positions["GROSS001"])
self.assertLess(positions["EXP001"], positions["GROSS001"])
# GROSS001 should come before MARGIN001 (which depends on it)
self.assertLess(positions["GROSS001"], positions["MARGIN001"])
def test_resolve_chain_dependencies(self):
"""Test dependency resolution with chain of dependencies (A -> B -> C -> D)"""
test_rows = [
{
"reference_code": "A001",
"display_name": "Base",
"data_source": "Account Data",
"balance_type": "Closing Balance",
"calculation_formula": '["account_type", "=", "Income"]',
},
{
"reference_code": "B001",
"display_name": "Level 1",
"data_source": "Calculated Amount",
"calculation_formula": "A001 + 100",
},
{
"reference_code": "C001",
"display_name": "Level 2",
"data_source": "Calculated Amount",
"calculation_formula": "B001 * 1.2",
},
{
"reference_code": "D001",
"display_name": "Level 3",
"data_source": "Calculated Amount",
"calculation_formula": "C001 - 50",
},
]
test_template = FinancialReportTemplateTestCase.create_test_template_with_rows(test_rows)
resolver = DependencyResolver(test_template)
order = resolver.get_processing_order()
positions = {row.reference_code: i for i, row in enumerate(order) if row.reference_code}
# Verify chain order
self.assertLess(positions["A001"], positions["B001"])
self.assertLess(positions["B001"], positions["C001"])
self.assertLess(positions["C001"], positions["D001"])
def test_resolve_diamond_dependency_pattern(self):
"""Test Diamond Dependency Pattern - A → B, A → C, and both B,C → D"""
test_rows = [
{
"reference_code": "A001",
"display_name": "Base Data",
"data_source": "Account Data",
"balance_type": "Closing Balance",
"calculation_formula": '["account_type", "=", "Income"]',
},
{
"reference_code": "B001",
"display_name": "Branch B",
"data_source": "Calculated Amount",
"calculation_formula": "A001 * 0.6", # B depends on A
},
{
"reference_code": "C001",
"display_name": "Branch C",
"data_source": "Calculated Amount",
"calculation_formula": "A001 * 0.4", # C depends on A
},
{
"reference_code": "D001",
"display_name": "Final Result",
"data_source": "Calculated Amount",
"calculation_formula": "B001 + C001", # D depends on both B and C
},
]
test_template = FinancialReportTemplateTestCase.create_test_template_with_rows(test_rows)
resolver = DependencyResolver(test_template)
order = resolver.get_processing_order()
positions = {row.reference_code: i for i, row in enumerate(order)}
# A should be processed first
self.assertLess(positions["A001"], positions["B001"])
self.assertLess(positions["A001"], positions["C001"])
self.assertLess(positions["A001"], positions["D001"])
# Both B and C should be processed before D
self.assertLess(positions["B001"], positions["D001"])
self.assertLess(positions["C001"], positions["D001"])
# Verify D has correct dependencies
self.assertEqual(set(resolver.dependencies["D001"]), {"B001", "C001"})
def test_resolve_independent_formula_row_groups(self):
test_rows = [
# Chain 1: A → B → C
{
"reference_code": "A001",
"display_name": "Chain 1 Base",
"data_source": "Account Data",
"balance_type": "Closing Balance",
"calculation_formula": '["account_type", "=", "Asset"]',
},
{
"reference_code": "B001",
"display_name": "Chain 1 Level 2",
"data_source": "Calculated Amount",
"calculation_formula": "A001 * 1.1",
},
{
"reference_code": "C001",
"display_name": "Chain 1 Final",
"data_source": "Calculated Amount",
"calculation_formula": "B001 + 100",
},
# Chain 2: X → Y → Z (independent)
{
"reference_code": "X001",
"display_name": "Chain 2 Base",
"data_source": "Account Data",
"balance_type": "Closing Balance",
"calculation_formula": '["account_type", "=", "Liability"]',
},
{
"reference_code": "Y001",
"display_name": "Chain 2 Level 2",
"data_source": "Calculated Amount",
"calculation_formula": "X001 * 0.9",
},
{
"reference_code": "Z001",
"display_name": "Chain 2 Final",
"data_source": "Calculated Amount",
"calculation_formula": "Y001 - 50",
},
]
test_template = FinancialReportTemplateTestCase.create_test_template_with_rows(test_rows)
resolver = DependencyResolver(test_template)
order = resolver.get_processing_order()
positions = {row.reference_code: i for i, row in enumerate(order)}
# Verify Chain 1 order
self.assertLess(positions["A001"], positions["B001"])
self.assertLess(positions["B001"], positions["C001"])
# Verify Chain 2 order
self.assertLess(positions["X001"], positions["Y001"])
self.assertLess(positions["Y001"], positions["Z001"])
# Verify chains are independent (no cross-dependencies)
chain1_codes = {"A001", "B001", "C001"}
chain2_codes = {"X001", "Y001", "Z001"}
for code in chain1_codes:
if code in resolver.dependencies:
deps = set(resolver.dependencies[code])
self.assertFalse(deps.intersection(chain2_codes), f"{code} should not depend on chain 2")
for code in chain2_codes:
if code in resolver.dependencies:
deps = set(resolver.dependencies[code])
self.assertFalse(deps.intersection(chain1_codes), f"{code} should not depend on chain 1")
# 3. DATA SOURCE PROCESSING
def test_resolve_mixed_data_sources(self):
test_rows = [
{
"reference_code": "CALC001",
"display_name": "Calculated",
"data_source": "Calculated Amount",
"calculation_formula": "ACC001 + 100",
},
{
"reference_code": None,
"display_name": "Spacing",
"data_source": "Blank Line",
},
{
"reference_code": "ACC001",
"display_name": "Account",
"data_source": "Account Data",
"balance_type": "Closing Balance",
"calculation_formula": '["account_type", "=", "Income"]',
},
{
"reference_code": None,
"display_name": "Custom",
"data_source": "Custom API",
},
]
test_template = FinancialReportTemplateTestCase.create_test_template_with_rows(test_rows)
resolver = DependencyResolver(test_template)
order = resolver.get_processing_order()
# Find positions
positions = {}
for i, row in enumerate(order):
if row.reference_code:
positions[row.reference_code] = i
else:
positions[f"{row.data_source}_{i}"] = i
# Account data should come before calculated
self.assertLess(positions["ACC001"], positions["CALC001"])
# All rows should be present
self.assertEqual(len(order), 4)
def test_resolve_api_to_formula_dependencies(self):
test_rows = [
{
"reference_code": "API001",
"display_name": "Custom API Result",
"data_source": "Custom API",
},
{
"reference_code": "ACC001",
"display_name": "Account Data",
"data_source": "Account Data",
"balance_type": "Closing Balance",
"calculation_formula": '["account_type", "=", "Income"]',
},
{
"reference_code": "CALC001",
"display_name": "Calculated Result",
"data_source": "Calculated Amount",
"calculation_formula": "API001 + ACC001",
},
]
test_template = FinancialReportTemplateTestCase.create_test_template_with_rows(test_rows)
resolver = DependencyResolver(test_template)
order = resolver.get_processing_order()
positions = {row.reference_code: i for i, row in enumerate(order)}
# API001 should be processed before CALC001
self.assertLess(positions["API001"], positions["CALC001"])
# ACC001 should be processed before CALC001
self.assertLess(positions["ACC001"], positions["CALC001"])
# API001 should be processed before ACC001 (API rows come first)
self.assertLess(positions["API001"], positions["ACC001"])
def test_resolve_cross_datasource_dependencies(self):
test_rows = [
{
"reference_code": "API001",
"display_name": "API Data",
"data_source": "Custom API",
},
{
"reference_code": "ACC001",
"display_name": "Account Total",
"data_source": "Account Data",
"balance_type": "Closing Balance",
"calculation_formula": '["account_type", "=", "Income"]',
},
{
"reference_code": "MIXED001",
"display_name": "Mixed Calculation",
"data_source": "Calculated Amount",
"calculation_formula": "(API001 + ACC001) * 0.5",
},
{
"reference_code": "FINAL001",
"display_name": "Final Result",
"data_source": "Calculated Amount",
"calculation_formula": "MIXED001 + API001",
},
]
test_template = FinancialReportTemplateTestCase.create_test_template_with_rows(test_rows)
resolver = DependencyResolver(test_template)
order = resolver.get_processing_order()
positions = {row.reference_code: i for i, row in enumerate(order)}
# API rows should be processed first
self.assertLess(positions["API001"], positions["ACC001"])
self.assertLess(positions["API001"], positions["MIXED001"])
# Account data should be processed before formula rows
self.assertLess(positions["ACC001"], positions["MIXED001"])
# Mixed calculation should be processed before final result
self.assertLess(positions["MIXED001"], positions["FINAL001"])
# Verify dependencies
self.assertEqual(set(resolver.dependencies["MIXED001"]), {"API001", "ACC001"})
self.assertEqual(set(resolver.dependencies["FINAL001"]), {"MIXED001", "API001"})
# 4. FORMULA PARSING
def test_extract_from_complex_formulas(self):
test_rows = [
{
"reference_code": "INCOME",
"display_name": "Total Income",
"data_source": "Account Data",
"balance_type": "Closing Balance",
"calculation_formula": '["root_type", "=", "Income"]',
},
{
"reference_code": "EXPENSE",
"display_name": "Total Expense",
"data_source": "Account Data",
"balance_type": "Closing Balance",
"calculation_formula": '["root_type", "=", "Expense"]',
},
{
"reference_code": "TAX_RATE",
"display_name": "Tax Rate",
"data_source": "Account Data",
"balance_type": "Closing Balance",
"calculation_formula": '["account_name", "like", "Tax"]',
},
{
"reference_code": "NET_RESULT",
"display_name": "Net Result",
"data_source": "Calculated Amount",
"calculation_formula": "(INCOME - EXPENSE) * (1 - TAX_RATE / 100)",
},
]
test_template = FinancialReportTemplateTestCase.create_test_template_with_rows(test_rows)
resolver = DependencyResolver(test_template)
# Should correctly identify all three dependencies in complex formula
net_deps = resolver.dependencies.get("NET_RESULT", [])
self.assertEqual(set(net_deps), {"INCOME", "EXPENSE", "TAX_RATE"})
def test_extract_references_with_math_functions(self):
test_rows = [
{
"reference_code": "INCOME",
"display_name": "Total Income",
"data_source": "Account Data",
"balance_type": "Closing Balance",
"calculation_formula": '["root_type", "=", "Income"]',
},
{
"reference_code": "EXPENSE",
"display_name": "Total Expense",
"data_source": "Account Data",
"balance_type": "Closing Balance",
"calculation_formula": '["root_type", "=", "Expense"]',
},
{
"reference_code": "TAX",
"display_name": "Tax Amount",
"data_source": "Account Data",
"balance_type": "Closing Balance",
"calculation_formula": '["account_name", "like", "Tax"]',
},
{
"reference_code": "MATH_TEST1",
"display_name": "Mathematical Test 1",
"data_source": "Calculated Amount",
"calculation_formula": "max(INCOME, EXPENSE) + min(TAX, 0)",
},
{
"reference_code": "MATH_TEST2",
"display_name": "Mathematical Test 2",
"data_source": "Calculated Amount",
"calculation_formula": "abs(INCOME - EXPENSE) + round(TAX, 2)",
},
{
"reference_code": "MATH_TEST3",
"display_name": "Mathematical Test 3",
"data_source": "Calculated Amount",
"calculation_formula": "sqrt(pow(INCOME, 2) + pow(EXPENSE, 2))",
},
]
test_template = FinancialReportTemplateTestCase.create_test_template_with_rows(test_rows)
resolver = DependencyResolver(test_template)
# MATH_TEST1 should correctly identify dependencies despite max/min functions
self.assertEqual(set(resolver.dependencies["MATH_TEST1"]), {"INCOME", "EXPENSE", "TAX"})
# MATH_TEST2 should correctly identify dependencies despite abs/round functions
self.assertEqual(set(resolver.dependencies["MATH_TEST2"]), {"INCOME", "EXPENSE", "TAX"})
# MATH_TEST3 should correctly identify dependencies despite sqrt/pow functions
self.assertEqual(set(resolver.dependencies["MATH_TEST3"]), {"INCOME", "EXPENSE"})
def test_extract_accurate_reference_matching(self):
test_rows = [
{
"reference_code": "INC001",
"display_name": "Income Base",
"data_source": "Account Data",
"calculation_formula": '["account_type", "=", "Income"]',
"balance_type": "Closing Balance",
},
{
"reference_code": "INC002",
"display_name": "Income Secondary",
"data_source": "Account Data",
"calculation_formula": '["account_type", "=", "Income"]',
"balance_type": "Closing Balance",
},
{
"reference_code": "INC001_2023", # Should not match INC001
"display_name": "Income 2023",
"data_source": "Account Data",
"calculation_formula": '["account_type", "=", "Income"]',
"balance_type": "Closing Balance",
},
{
"reference_code": "TEST1",
"display_name": "Test Formula 1",
"data_source": "Calculated Amount",
"calculation_formula": "2 * INC001", # Should correctly extract INC001
},
{
"reference_code": "TEST2",
"display_name": "Test Formula 2",
"data_source": "Calculated Amount",
"calculation_formula": "INC001 + INC002", # Word boundaries require separation
},
{
"reference_code": "TEST3",
"display_name": "Test Formula 3",
"data_source": "Calculated Amount",
"calculation_formula": "INC001_2023 + INC001", # Should match both correctly
},
{
"reference_code": "TEST4",
"display_name": "Test Formula 4",
"data_source": "Calculated Amount",
"calculation_formula": "INC001_2023*INC001", # No space separation but different tokens
},
]
test_template = FinancialReportTemplateTestCase.create_test_template_with_rows(test_rows)
resolver = DependencyResolver(test_template)
# TEST1 should only depend on INC001
self.assertEqual(resolver.dependencies["TEST1"], ["INC001"])
# TEST2 should match both INC001 and INC002 (separated by space and +)
self.assertEqual(set(resolver.dependencies["TEST2"]), {"INC001", "INC002"})
# TEST3 should depend on both INC001_2023 and INC001
self.assertEqual(set(resolver.dependencies["TEST3"]), {"INC001_2023", "INC001"})
# TEST4 should depend on both INC001_2023 and INC001 (separated by *)
self.assertEqual(set(resolver.dependencies["TEST4"]), {"INC001_2023", "INC001"})
def test_prevent_partial_reference_matches(self):
test_rows = [
{
"reference_code": "INC001",
"display_name": "Income",
"data_source": "Account Data",
"calculation_formula": '["account_type", "=", "Income"]',
"balance_type": "Closing Balance",
},
{
"reference_code": "INC001_ADJ", # Contains INC001 but shouldn't match
"display_name": "Income Adjustment",
"data_source": "Account Data",
"calculation_formula": '["account_type", "=", "Income"]',
"balance_type": "Closing Balance",
},
{
"reference_code": "RESULT",
"display_name": "Result",
"data_source": "Calculated Amount",
"calculation_formula": "INC001 + 500", # Should only match INC001, not INC001_ADJ
},
]
test_template = FinancialReportTemplateTestCase.create_test_template_with_rows(test_rows)
resolver = DependencyResolver(test_template)
# RESULT should only depend on INC001, not INC001_ADJ
self.assertEqual(resolver.dependencies["RESULT"], ["INC001"])
# Processing order should work correctly
order = resolver.get_processing_order()
positions = {row.reference_code: i for i, row in enumerate(order)}
self.assertLess(positions["INC001"], positions["RESULT"])
# INC001_ADJ can be processed in any order relative to RESULT since there's no dependency
self.assertIn("INC001_ADJ", positions)
# 5. EDGE CASES
def test_resolve_rows_without_dependencies(self):
test_rows = [
{
"reference_code": "A001",
"display_name": "Account Row",
"data_source": "Account Data",
"balance_type": "Closing Balance",
"calculation_formula": '["account_type", "=", "Income"]',
},
{
"reference_code": "B001",
"display_name": "Static Value",
"data_source": "Calculated Amount",
"calculation_formula": "1000 + 500", # No reference codes
},
]
test_template = FinancialReportTemplateTestCase.create_test_template_with_rows(test_rows)
resolver = DependencyResolver(test_template)
# B001 should have no dependencies
self.assertEqual(resolver.dependencies.get("B001", []), [])
# Should still process correctly
order = resolver.get_processing_order()
self.assertEqual(len(order), 2)
def test_handle_empty_reference_codes(self):
test_rows = [
{
"reference_code": "VALID001",
"display_name": "Valid Row",
"data_source": "Account Data",
"balance_type": "Closing Balance",
"calculation_formula": '["account_type", "=", "Income"]',
},
{
"reference_code": "", # Empty string
"display_name": "Empty Reference",
"data_source": "Account Data",
"balance_type": "Closing Balance",
"calculation_formula": '["account_type", "=", "Asset"]',
},
{
"reference_code": " ", # Whitespace only
"display_name": "Whitespace Reference",
"data_source": "Account Data",
"balance_type": "Closing Balance",
"calculation_formula": '["account_type", "=", "Liability"]',
},
{
"reference_code": None, # None value
"display_name": "None Reference",
"data_source": "Account Data",
"balance_type": "Closing Balance",
"calculation_formula": '["account_type", "=", "Expense"]',
},
{
"reference_code": "CALC001",
"display_name": "Calculated Row",
"data_source": "Calculated Amount",
"calculation_formula": "VALID001 * 2", # Should only depend on VALID001
},
]
test_template = FinancialReportTemplateTestCase.create_test_template_with_rows(test_rows)
resolver = DependencyResolver(test_template)
# Should not break dependency resolution
order = resolver.get_processing_order()
self.assertEqual(len(order), 5) # All rows should be present
# CALC001 should only depend on VALID001
self.assertEqual(resolver.dependencies.get("CALC001", []), ["VALID001"])
# Verify processing order
positions = {
row.reference_code: i
for i, row in enumerate(order)
if row.reference_code and row.reference_code.strip()
}
self.assertLess(positions["VALID001"], positions["CALC001"])
def test_resolve_include_orphaned_nodes(self):
test_rows = [
{
"reference_code": "USED001",
"display_name": "Used Row",
"data_source": "Account Data",
"balance_type": "Closing Balance",
"calculation_formula": '["account_type", "=", "Income"]',
},
{
"reference_code": "ORPHAN001",
"display_name": "Orphaned Row 1",
"data_source": "Account Data",
"balance_type": "Closing Balance",
"calculation_formula": '["account_type", "=", "Asset"]',
},
{
"reference_code": "ORPHAN002",
"display_name": "Orphaned Row 2",
"data_source": "Account Data",
"balance_type": "Closing Balance",
"calculation_formula": '["account_type", "=", "Liability"]',
},
{
"reference_code": "DEPENDENT",
"display_name": "Dependent Row",
"data_source": "Calculated Amount",
"calculation_formula": "USED001 * 2", # Only uses USED001
},
]
test_template = FinancialReportTemplateTestCase.create_test_template_with_rows(test_rows)
resolver = DependencyResolver(test_template)
order = resolver.get_processing_order()
# All rows should be included in processing order
self.assertEqual(len(order), 4)
positions = {row.reference_code: i for i, row in enumerate(order) if row.reference_code}
# USED001 should be processed before DEPENDENT
self.assertLess(positions["USED001"], positions["DEPENDENT"])
# Orphaned rows should be included but have no dependencies
self.assertIn("ORPHAN001", positions)
self.assertIn("ORPHAN002", positions)
# Orphaned rows should have no dependencies recorded
self.assertEqual(resolver.dependencies.get("ORPHAN001", []), [])
self.assertEqual(resolver.dependencies.get("ORPHAN002", []), [])
def test_handle_valid_missing_references(self):
test_rows = [
{
"reference_code": "A001",
"display_name": "Row A",
"data_source": "Account Data",
"balance_type": "Closing Balance",
"calculation_formula": '["account_type", "=", "Asset"]',
},
{
"reference_code": "B001",
"display_name": "Row B",
"data_source": "Calculated Amount",
"calculation_formula": "A001 * 2", # Valid reference
},
]
# This should work without errors
test_template = FinancialReportTemplateTestCase.create_test_template_with_rows(test_rows)
resolver = DependencyResolver(test_template)
# Basic test - ensure it doesn't crash
processing_order = resolver.get_processing_order()
self.assertEqual(len(processing_order), 2)
# 6. ERROR DETECTION
def test_detect_circular_dependency(self):
"""Test detection of circular dependency (A -> B -> C -> A)"""
test_rows = [
{
"reference_code": "A001",
"display_name": "Row A",
"data_source": "Calculated Amount",
"calculation_formula": "C001 + 100", # A depends on C
},
{
"reference_code": "B001",
"display_name": "Row B",
"data_source": "Calculated Amount",
"calculation_formula": "A001 + 200", # B depends on A
},
{
"reference_code": "C001",
"display_name": "Row C",
"data_source": "Calculated Amount",
"calculation_formula": "B001 * 1.5", # C depends on B -> creates cycle
},
]
# Should raise ValidationError for circular dependency
test_template = FinancialReportTemplateTestCase.create_test_template_with_rows(test_rows)
with self.assertRaises(frappe.ValidationError):
DependencyResolver(test_template)
class TestFormulaCalculator(FinancialReportTemplateTestCase):
"""Test cases for FormulaCalculator class"""
def _create_mock_report_row(self, formula: str, reference_code: str = "TEST_ROW"):
class MockReportRow:
def __init__(self, formula, ref_code):
self.calculation_formula = formula
self.reference_code = ref_code
self.data_source = "Calculated Amount"
self.idx = 1
self.reverse_sign = 0
return MockReportRow(formula, reference_code)
# 1. FOUNDATION TESTS
def test_evaluate_basic_operations(self):
# Mock row data with different scenarios
row_data = {
"INC001": [1000.0, 1200.0, 1500.0],
"EXP001": [800.0, 900.0, 1100.0],
"TAX001": [50.0, 60.0, 75.0],
"ZERO_VAL": [0.0, 0.0, 0.0],
"NEG_VAL": [-100.0, -200.0, -150.0],
}
period_list = [
{"key": "2023_q1", "from_date": "2023-01-01", "to_date": "2023-03-31"},
{"key": "2023_q2", "from_date": "2023-04-01", "to_date": "2023-06-30"},
{"key": "2023_q3", "from_date": "2023-07-01", "to_date": "2023-09-30"},
]
calculator = FormulaCalculator(row_data, period_list)
result = calculator.evaluate_formula(self._create_mock_report_row("INC001 - EXP001"))
expected = [200.0, 300.0, 400.0] # [1000-800, 1200-900, 1500-1100]
self.assertEqual(result, expected)
result = calculator.evaluate_formula(self._create_mock_report_row("INC001 * 2"))
expected = [2000.0, 2400.0, 3000.0]
self.assertEqual(result, expected)
result = calculator.evaluate_formula(self._create_mock_report_row("INC001 / 10"))
expected = [100.0, 120.0, 150.0]
self.assertEqual(result, expected)
result = calculator.evaluate_formula(self._create_mock_report_row("(INC001 - EXP001) * 0.8"))
expected = [160.0, 240.0, 320.0] # [(1000-800)*0.8, (1200-900)*0.8, (1500-1100)*0.8]
self.assertEqual(result, expected)
result = calculator.evaluate_formula(self._create_mock_report_row("abs(NEG_VAL)"))
expected = [100.0, 200.0, 150.0]
self.assertEqual(result, expected)
result = calculator.evaluate_formula(self._create_mock_report_row("max(INC001, EXP001)"))
expected = [1000.0, 1200.0, 1500.0] # INC001 is always larger
self.assertEqual(result, expected)
result = calculator.evaluate_formula(self._create_mock_report_row("min(INC001, EXP001)"))
expected = [800.0, 900.0, 1100.0] # EXP001 is always smaller
self.assertEqual(result, expected)
def test_handle_division_by_zero(self):
row_data = {
"NUMERATOR": [100.0, 200.0, 300.0],
"ZERO_VAL": [0.0, 0.0, 0.0],
}
period_list = [
{"key": "2023_q1", "from_date": "2023-01-01", "to_date": "2023-03-31"},
{"key": "2023_q2", "from_date": "2023-04-01", "to_date": "2023-06-30"},
{"key": "2023_q3", "from_date": "2023-07-01", "to_date": "2023-09-30"},
]
calculator = FormulaCalculator(row_data, period_list)
result = calculator.evaluate_formula(self._create_mock_report_row("NUMERATOR / ZERO_VAL"))
expected = [0.0, 0.0, 0.0]
self.assertEqual(result, expected)
# 2. DATA HANDLING TESTS
def test_handle_missing_values(self):
row_data = {
"SHORT_DATA": [100.0, 200.0], # Only 2 periods instead of 3
"NORMAL_DATA": [50.0, 60.0, 70.0],
}
period_list = [
{"key": "2023_q1", "from_date": "2023-01-01", "to_date": "2023-03-31"},
{"key": "2023_q2", "from_date": "2023-04-01", "to_date": "2023-06-30"},
{"key": "2023_q3", "from_date": "2023-07-01", "to_date": "2023-09-30"},
]
calculator = FormulaCalculator(row_data, period_list)
result = calculator.evaluate_formula(self._create_mock_report_row("SHORT_DATA + NORMAL_DATA"))
expected = [150.0, 260.0, 70.0] # [100+50, 200+60, 0+70]
self.assertEqual(result, expected)
# Empty row_data
empty_calculator = FormulaCalculator({}, period_list)
result = empty_calculator.evaluate_formula(self._create_mock_report_row("MISSING_CODE * 2"))
expected = [0.0, 0.0, 0.0]
self.assertEqual(result, expected)
# None values
row_data_with_none = {
"WITH_NONE": [100.0, None, 300.0],
"NORMAL": [10.0, 20.0, 30.0],
}
none_calculator = FormulaCalculator(row_data_with_none, period_list)
result = none_calculator.evaluate_formula(self._create_mock_report_row("WITH_NONE + NORMAL"))
expected = [110.0, 20.0, 330.0] # [100+10, 0+20, 300+30]
self.assertEqual(result, expected)
# Zero periods
zero_period_calculator = FormulaCalculator({"TEST": [100.0]}, [])
result = zero_period_calculator.evaluate_formula(self._create_mock_report_row("TEST * 2"))
expected = [] # No periods means no results
self.assertEqual(result, expected)
def test_handle_invalid_reference_codes(self):
"""Test formula calculator handles invalid reference codes"""
row_data = {
"VALID_CODE": [100.0, 200.0, 300.0],
"123_INVALID": [50.0, 60.0, 70.0], # Starts with number - invalid identifier
"VALID-DASH": [25.0, 30.0, 35.0], # Contains dash - invalid identifier
}
period_list = [
{"key": "2023_q1", "from_date": "2023-01-01", "to_date": "2023-03-31"},
{"key": "2023_q2", "from_date": "2023-04-01", "to_date": "2023-06-30"},
{"key": "2023_q3", "from_date": "2023-07-01", "to_date": "2023-09-30"},
]
calculator = FormulaCalculator(row_data, period_list)
# Test with valid reference code
result = calculator.evaluate_formula(self._create_mock_report_row("VALID_CODE * 2"))
expected = [200.0, 400.0, 600.0]
self.assertEqual(result, expected)
# Test with invalid reference code - should return 0.0 (code won't be in context)
result = calculator.evaluate_formula(self._create_mock_report_row("INVALID_CODE * 2"))
expected = [0.0, 0.0, 0.0]
self.assertEqual(result, expected)
# Test reference code case sensitivity
result = calculator.evaluate_formula(
self._create_mock_report_row("valid_code * 2")
) # lowercase version
expected = [0.0, 0.0, 0.0] # Should fail since codes are case-sensitive
self.assertEqual(result, expected)
def test_handle_mismatched_period_data_lengths(self):
"""Test scenarios with mismatched period data"""
# Test when row_data has more values than periods
row_data_extra = {
"EXTRA_DATA": [100.0, 200.0, 300.0, 400.0, 500.0], # 5 values
}
period_list_short = [
{"key": "2023_q1", "from_date": "2023-01-01", "to_date": "2023-03-31"},
{"key": "2023_q2", "from_date": "2023-04-01", "to_date": "2023-06-30"},
] # Only 2 periods
calculator_extra = FormulaCalculator(row_data_extra, period_list_short)
result = calculator_extra.evaluate_formula(self._create_mock_report_row("EXTRA_DATA * 2"))
expected = [200.0, 400.0] # Only processes first 2 values
self.assertEqual(result, expected)
# Test when all row data arrays have different lengths
row_data_mixed = {
"SHORT": [100.0], # 1 value
"MEDIUM": [200.0, 300.0], # 2 values
"LONG": [400.0, 500.0, 600.0], # 3 values
}
period_list_three = [
{"key": "2023_q1", "from_date": "2023-01-01", "to_date": "2023-03-31"},
{"key": "2023_q2", "from_date": "2023-04-01", "to_date": "2023-06-30"},
{"key": "2023_q3", "from_date": "2023-07-01", "to_date": "2023-09-30"},
]
calculator_mixed = FormulaCalculator(row_data_mixed, period_list_three)
result = calculator_mixed.evaluate_formula(self._create_mock_report_row("SHORT + MEDIUM + LONG"))
# Period 0: 100 + 200 + 400 = 700
# Period 1: 0 + 300 + 500 = 800
# Period 2: 0 + 0 + 600 = 600
expected = [700.0, 800.0, 600.0]
self.assertEqual(result, expected)
# 3. COMPLEX EXPRESSIONS
def test_evaluate_complex_expressions(self):
row_data = {
"REVENUE": [10000.0, 12000.0, 15000.0],
"COST": [6000.0, 7200.0, 9000.0],
"TAX_RATE": [0.25, 0.25, 0.30], # 25%, 25%, 30%
}
period_list = [
{"key": "2023_q1", "from_date": "2023-01-01", "to_date": "2023-03-31"},
{"key": "2023_q2", "from_date": "2023-04-01", "to_date": "2023-06-30"},
{"key": "2023_q3", "from_date": "2023-07-01", "to_date": "2023-09-30"},
]
calculator = FormulaCalculator(row_data, period_list)
result = calculator.evaluate_formula(
self._create_mock_report_row("(REVENUE - COST) * (1 - TAX_RATE)")
)
expected = [
(10000 - 6000) * (1 - 0.25),
(12000 - 7200) * (1 - 0.25),
(15000 - 9000) * (1 - 0.30),
]
self.assertEqual(result, expected)
result = calculator.evaluate_formula(self._create_mock_report_row("round(REVENUE / COST, 2)"))
expected = [
round(10000 / 6000, 2),
round(12000 / 7200, 2),
round(15000 / 9000, 2),
]
self.assertEqual(result, expected)
result = calculator.evaluate_formula(
self._create_mock_report_row("REVENUE + COST * TAX_RATE - 100")
) # Tests PEMDAS order
expected = [
10000 + 6000 * 0.25 - 100,
12000 + 7200 * 0.25 - 100,
15000 + 9000 * 0.30 - 100,
]
self.assertEqual(result, expected)
result = calculator.evaluate_formula(
self._create_mock_report_row("((REVENUE + COST) * (TAX_RATE + 0.1)) / 2")
)
expected = [
((10000 + 6000) * (0.25 + 0.1)) / 2,
((12000 + 7200) * (0.25 + 0.1)) / 2,
((15000 + 9000) * (0.30 + 0.1)) / 2,
]
self.assertEqual(result, expected)
result = calculator.evaluate_formula(self._create_mock_report_row("REVENUE * 2.5 + 100"))
expected = [
10000 * 2.5 + 100,
12000 * 2.5 + 100,
15000 * 2.5 + 100,
]
self.assertEqual(result, expected)
def test_evaluate_nested_function_combinations(self):
row_data = {
"BASE": [4.0],
"POSITIVE": [16.0], # Use positive number for sqrt
"DECIMAL": [2.7],
}
period_list = [{"key": "2023_q1", "from_date": "2023-01-01", "to_date": "2023-03-31"}]
calculator = FormulaCalculator(row_data, period_list)
result = calculator.evaluate_formula(self._create_mock_report_row("round(sqrt(POSITIVE), 2)"))
expected = round((16.0**0.5), 2) # round(sqrt(16), 2) = round(4.0, 2) = 4.0
self.assertEqual(result[0], expected)
result = calculator.evaluate_formula(
self._create_mock_report_row("max(POSITIVE, min(BASE, DECIMAL))")
)
expected = max(16.0, min(4.0, 2.7)) # max(16.0, 2.7) = 16.0
self.assertEqual(result[0], expected)
result = calculator.evaluate_formula(
self._create_mock_report_row("pow(max(BASE, 2), min(DECIMAL, 3))")
)
expected = pow(max(4.0, 2), min(2.7, 3)) # pow(4.0, 2.7)
self.assertAlmostEqual(result[0], expected, places=2)
# 4. FINANCIAL DOMAIN
def test_calculate_financial_use_cases(self):
row_data = {
"REVENUE_Q1": [1000000.0],
"REVENUE_Q2": [1200000.0],
"EXPENSES": [800000.0],
"BUDGET_VARIANCE": [-50000.0],
"ACTUAL_COSTS": [123456.78],
"GROWTH_RATE": [1.15], # 15% growth
"YEARS": [5.0],
}
period_list = [{"key": "2023_q1", "from_date": "2023-01-01", "to_date": "2023-03-31"}]
calculator = FormulaCalculator(row_data, period_list)
# Best quarterly performance
result = calculator.evaluate_formula(self._create_mock_report_row("max(REVENUE_Q1, REVENUE_Q2)"))
self.assertEqual(result[0], 1200000.0)
# Absolute variance (remove negative sign for reporting)
result = calculator.evaluate_formula(self._create_mock_report_row("abs(BUDGET_VARIANCE)"))
self.assertEqual(result[0], 50000.0)
# Rounded reporting figures
result = calculator.evaluate_formula(self._create_mock_report_row("round(ACTUAL_COSTS)"))
self.assertEqual(result[0], 123457.0) # Rounded to nearest whole number
# Conservative estimates
result = calculator.evaluate_formula(self._create_mock_report_row("floor(ACTUAL_COSTS / 1000)"))
self.assertEqual(result[0], 123.0) # Conservative thousands
# Compound growth calculations
result = calculator.evaluate_formula(self._create_mock_report_row("pow(GROWTH_RATE, YEARS)"))
expected = flt(1.15**5, get_currency_precision())
self.assertEqual(result[0], expected)
# Profit calculation with rounding
result = calculator.evaluate_formula(
self._create_mock_report_row("round((REVENUE_Q1 - EXPENSES) / REVENUE_Q1 * 100)")
)
self.assertEqual(result[0], 20.0) # 20% profit margin
def test_calculate_common_financial_patterns(self):
"""Test patterns commonly used in financial calculations"""
row_data = {
"ACTUAL": [100000.0],
"BUDGET": [80000.0],
"PREVIOUS_YEAR": [90000.0],
"LOWER_BOUND": [50000.0],
"UPPER_BOUND": [150000.0],
}
period_list = [{"key": "2023_q1", "from_date": "2023-01-01", "to_date": "2023-03-31"}]
calculator = FormulaCalculator(row_data, period_list)
result = calculator.evaluate_formula(
self._create_mock_report_row("(ACTUAL - BUDGET) / (BUDGET + 0.0001) * 100")
)
expected = (100000.0 - 80000.0) / (80000.0 + 0.0001) * 100
self.assertAlmostEqual(result[0], expected, places=2)
# conditional logic simulation: max(0, ACTUAL - BUDGET) (similar to IF positive)
result = calculator.evaluate_formula(self._create_mock_report_row("max(0, ACTUAL - BUDGET)"))
expected = max(0, 100000.0 - 80000.0) # 20000.0
self.assertEqual(result[0], expected)
# clamping patterns: min(max(ACTUAL, LOWER_BOUND), UPPER_BOUND)
result = calculator.evaluate_formula(
self._create_mock_report_row("min(max(ACTUAL, LOWER_BOUND), UPPER_BOUND)")
)
expected = min(max(100000.0, 50000.0), 150000.0) # min(100000.0, 150000.0) = 100000.0
self.assertEqual(result[0], expected)
# year-over-year growth calculation
result = calculator.evaluate_formula(
self._create_mock_report_row("(ACTUAL - PREVIOUS_YEAR) / PREVIOUS_YEAR * 100")
)
expected = (100000.0 - 90000.0) / 90000.0 * 100
self.assertAlmostEqual(result[0], expected, places=2)
# 5. EDGE CASES
def test_handle_error_cases(self):
"""Test formula calculator error handling for various edge cases"""
row_data = {
"NORMAL": [100.0, 200.0, 300.0],
}
period_list = [
{"key": "2023_q1", "from_date": "2023-01-01", "to_date": "2023-03-31"},
{"key": "2023_q2", "from_date": "2023-04-01", "to_date": "2023-06-30"},
{"key": "2023_q3", "from_date": "2023-07-01", "to_date": "2023-09-30"},
]
calculator = FormulaCalculator(row_data, period_list)
# Test invalid syntax - should return 0.0 for all periods
result = calculator.evaluate_formula(self._create_mock_report_row("NORMAL + +")) # Invalid syntax
expected = [0.0, 0.0, 0.0]
self.assertEqual(result, expected)
# Test undefined variable - should return 0.0 for all periods
result = calculator.evaluate_formula(self._create_mock_report_row("UNDEFINED_VAR * 2"))
expected = [0.0, 0.0, 0.0]
self.assertEqual(result, expected)
# Test empty formula - should return 0.0 for all periods
result = calculator.evaluate_formula(self._create_mock_report_row(""))
expected = [0.0, 0.0, 0.0]
self.assertEqual(result, expected)
# Test whitespace and formatting tolerance
result = calculator.evaluate_formula(
self._create_mock_report_row(" NORMAL + 100 ")
) # Extra spaces
expected = [200.0, 300.0, 400.0]
self.assertEqual(result, expected)
# Test extremely long formulas
long_formula = "NORMAL + " + " + ".join(["10"] * 100) # Very long formula
result = calculator.evaluate_formula(self._create_mock_report_row(long_formula))
expected = [1100.0, 1200.0, 1300.0] # 100 + (100 * 10) = 1100 added to each value
self.assertEqual(result, expected)
# Test Unicode characters in formula (should fail gracefully)
result = calculator.evaluate_formula(
self._create_mock_report_row("NORMAL + ∞")
) # Unicode infinity symbol
expected = [0.0, 0.0, 0.0]
self.assertEqual(result, expected)
def test_evaluate_math_function_edge_cases(self):
"""Test edge cases for mathematical functions"""
row_data = {
"ZERO": [0.0],
"SMALL_DECIMAL": [0.0001],
}
period_list = [{"key": "2023_q1", "from_date": "2023-01-01", "to_date": "2023-03-31"}]
calculator = FormulaCalculator(row_data, period_list)
# Test sqrt with zero values
result = calculator.evaluate_formula(self._create_mock_report_row("sqrt(ZERO)"))
self.assertEqual(result[0], 0.0)
# Test very small numbers precision
result = calculator.evaluate_formula(self._create_mock_report_row("SMALL_DECIMAL * SMALL_DECIMAL"))
expected = 0.0001 * 0.0001
# Depends on currency precision
self.assertTrue(result[0] == 0.0 or abs(result[0] - expected) < 1e-6)
# 6. OTHER
def test_prevent_security_vulnerabilities(self):
row_data = {"TEST_VAL": [100.0]}
period_list = [{"key": "2023_q1", "from_date": "2023-01-01", "to_date": "2023-03-31"}]
calculator = FormulaCalculator(row_data, period_list)
# Test that potentially harmful expressions are safely handled
# These should all return 0.0 due to safe evaluation failures
harmful_expressions = [
"__import__('os').system('ls')", # Import attempts
"eval('1+1')", # Nested eval attempts
"exec('print(1)')", # Exec attempts
"open('/etc/passwd')", # File operations
"globals()", # Global namespace access
"locals()", # Local namespace access
]
for expr in harmful_expressions:
with self.subTest(expression=expr):
result = calculator.evaluate_formula(self._create_mock_report_row(expr))
self.assertEqual(result, [0.0], f"Harmful expression '{expr}' should return [0.0]")
# Only safe mathematical operations work
safe_expressions = [
"TEST_VAL + 50",
"abs(TEST_VAL - 200)",
"min(TEST_VAL, 50)",
"max(TEST_VAL, 150)",
"round(TEST_VAL / 3, 2)",
]
for expr in safe_expressions:
with self.subTest(expression=expr):
result = calculator.evaluate_formula(self._create_mock_report_row(expr))
self.assertNotEqual(result, [0.0], f"Safe expression '{expr}' should not return [0.0]")
self.assertIsInstance(result[0], float, f"Safe expression '{expr}' should return a float")
def test_build_context_validation(self):
row_data = {
"TEST1": [100.0, 200.0, 300.0],
"TEST2": [10.0, 20.0, 30.0],
}
period_list = [
{"key": "2023_q1", "from_date": "2023-01-01", "to_date": "2023-03-31"},
{"key": "2023_q2", "from_date": "2023-04-01", "to_date": "2023-06-30"},
{"key": "2023_q3", "from_date": "2023-07-01", "to_date": "2023-09-30"},
]
calculator = FormulaCalculator(row_data, period_list)
# Test that context for each period contains the correct values
context_0 = calculator._build_context(0)
self.assertEqual(context_0["TEST1"], 100.0)
self.assertEqual(context_0["TEST2"], 10.0)
context_1 = calculator._build_context(1)
self.assertEqual(context_1["TEST1"], 200.0)
self.assertEqual(context_1["TEST2"], 20.0)
context_2 = calculator._build_context(2)
self.assertEqual(context_2["TEST1"], 300.0)
self.assertEqual(context_2["TEST2"], 30.0)
# Verify all expected math functions are available in context
math_functions = ["abs", "round", "min", "max", "sum", "sqrt", "pow", "ceil", "floor"]
for func_name in math_functions:
self.assertIn(func_name, context_0)
self.assertTrue(callable(context_0[func_name]))
class TestFilterExpressionParser(FinancialReportTemplateTestCase):
"""Test cases for FilterExpressionParser class"""
def _create_mock_report_row(self, formula: str, reference_code: str = "TEST_ROW"):
class MockReportRow:
def __init__(self, formula, ref_code):
self.calculation_formula = formula
self.reference_code = ref_code
self.data_source = "Account Data"
self.idx = 1
self.reverse_sign = 0
return MockReportRow(formula, reference_code)
# 1. BASIC PARSING
def test_parse_simple_equality_condition(self):
parser = FilterExpressionParser()
# Test simple equality condition
simple_formula = '["account_type", "=", "Income"]'
# Test with mock table
from frappe.query_builder import DocType
account_table = DocType("Account")
mock_row = self._create_mock_report_row(simple_formula)
condition = parser.build_condition(mock_row, account_table)
self.assertIsNotNone(condition)
# Verify the condition contains the expected field and value
condition_str = str(condition)
self.assertIn("account_type", condition_str)
self.assertIn("Income", condition_str)
def test_parse_logical_and_or_conditions(self):
parser = FilterExpressionParser()
from frappe.query_builder import DocType
account_table = DocType("Account")
# Test AND condition
and_formula = """{"and": [["account_type", "=", "Income"], ["is_group", "=", 0]]}"""
mock_row_and = self._create_mock_report_row(and_formula)
condition = parser.build_condition(mock_row_and, account_table)
self.assertIsNotNone(condition)
condition_str = str(condition)
self.assertIn("account_type", condition_str)
self.assertIn("is_group", condition_str)
self.assertIn("AND", condition_str)
# Test OR condition
or_formula = """{"or": [["root_type", "=", "Asset"], ["root_type", "=", "Liability"]]}"""
mock_row_or = self._create_mock_report_row(or_formula)
condition = parser.build_condition(mock_row_or, account_table)
self.assertIsNotNone(condition)
condition_str = str(condition)
self.assertIn("root_type", condition_str)
self.assertIn("Asset", condition_str)
self.assertIn("Liability", condition_str)
self.assertIn("OR", condition_str)
# 2. OPERATOR SUPPORT
def test_parse_valid_operators(self):
parser = FilterExpressionParser()
from frappe.query_builder import DocType
account_table = DocType("Account")
test_cases = [
('["account_name", "!=", "Cash"]', "!="),
('["account_number", "like", "1000"]', "like"),
('["account_type", "in", ["Income", "Expense"]]', "in"),
('["account_type", "not in", ["Asset", "Liability"]]', "not in"),
('["account_name", "not like", "Expense"]', "not like"),
('["account_number", ">=", 1000]', ">="),
('["account_number", ">", 0]', ">"),
('["account_number", "<=", 5000]', "<="),
('["account_number", "<", 100]', "<"),
('["is_group", "=", 0]', "="),
]
for formula, expected_op in test_cases:
mock_row = self._create_mock_report_row(formula)
condition = parser.build_condition(mock_row, account_table)
self.assertIsNotNone(condition, f"Failed to build condition for operator {expected_op}")
def test_build_logical_condition_with_reduce(self):
parser = FilterExpressionParser()
from frappe.query_builder import DocType
account_table = DocType("Account")
# Test AND logic with multiple conditions
and_formula = '{"and": [["account_type", "=", "Income"], ["is_group", "=", 0], ["disabled", "=", 0]]}'
mock_row_and = self._create_mock_report_row(and_formula)
condition = parser.build_condition(mock_row_and, account_table)
self.assertIsNotNone(condition)
condition_str = str(condition)
self.assertEqual(condition_str.count("AND"), 2)
# Test OR logic with multiple conditions
or_formula = '{"or": [["root_type", "=", "Asset"], ["root_type", "=", "Liability"], ["root_type", "=", "Income"]]}'
mock_row_or = self._create_mock_report_row(or_formula)
condition = parser.build_condition(mock_row_or, account_table)
self.assertIsNotNone(condition)
condition_str = str(condition)
self.assertEqual(condition_str.count("OR"), 2)
def test_operator_value_compatibility(self):
parser = FilterExpressionParser()
from frappe.query_builder import DocType
account_table = DocType("Account")
# Test "in" operator with list value - should work
in_formula = '["account_type", "in", ["Income", "Expense"]]'
mock_row_in = self._create_mock_report_row(in_formula)
condition = parser.build_condition(mock_row_in, account_table)
self.assertIsNotNone(condition) # Should work with list
# Test numeric operators with proper values
numeric_formulas = [
'["tax_rate", ">", 10.0]',
'["tax_rate", ">=", 0]',
'["tax_rate", "<", 50.0]',
'["tax_rate", "<=", 100.0]',
]
for formula in numeric_formulas:
mock_row = self._create_mock_report_row(formula)
condition = parser.build_condition(mock_row, account_table)
self.assertIsNotNone(condition)
# 3. COMPLEX STRUCTURES
def test_parse_complex_nested_filters(self):
"""Test complex nested filter expressions"""
parser = FilterExpressionParser()
from frappe.query_builder import DocType
account_table = DocType("Account")
# Complex nested condition: ((Income OR Expense) AND NOT Other) AND is_group=0
complex_formula = """{
"and": [
{
"and": [
{
"or": [
["root_type", "=", "Income"],
["root_type", "=", "Expense"]
]
},
["account_category", "!=", "Other Income"]
]
},
["is_group", "=", 0]
]
}"""
mock_row_complex = self._create_mock_report_row(complex_formula)
condition = parser.build_condition(mock_row_complex, account_table)
self.assertIsNotNone(condition)
condition_str = str(condition)
self.assertIn("root_type", condition_str)
self.assertIn("account_category", condition_str)
self.assertIn("is_group", condition_str)
self.assertIn("AND", condition_str)
self.assertIn("OR", condition_str)
def test_parse_deeply_nested_conditions(self):
parser = FilterExpressionParser()
from frappe.query_builder import DocType
account_table = DocType("Account")
# Triple nesting: AND containing OR containing AND
deep_nested = """{
"and": [
{
"or": [
{
"and": [
["account_type", "=", "Income Account"],
["is_group", "=", 0]
]
},
["root_type", "=", "Asset"]
]
},
["disabled", "=", 0]
]
}"""
mock_row_deep = self._create_mock_report_row(deep_nested)
condition = parser.build_condition(mock_row_deep, account_table)
self.assertIsNotNone(condition)
condition_str = str(condition)
self.assertIn("account_type", condition_str)
self.assertIn("root_type", condition_str)
self.assertIn("disabled", condition_str)
self.assertIn("AND", condition_str)
self.assertIn("OR", condition_str)
# 4. VALUE TYPES
def test_parse_different_value_types(self):
"""Test different value types in conditions"""
parser = FilterExpressionParser()
from frappe.query_builder import DocType
account_table = DocType("Account")
test_cases = [
'["tax_rate", ">=", 10.50]', # Float
'["is_group", "=", 1]', # Integer
'["account_name", "=", ""]', # Empty string
'["account_type", "in", ["Income Account", "Expense Account"]]', # List value
]
for formula in test_cases:
mock_row = self._create_mock_report_row(formula)
condition = parser.build_condition(mock_row, account_table)
self.assertIsNotNone(condition, f"Failed to build condition for {formula}")
# 5. EDGE CASES
def test_parse_special_characters_in_values(self):
"""Test special characters in filter values"""
parser = FilterExpressionParser()
from frappe.query_builder import DocType
account_table = DocType("Account")
test_cases = [
('["account_name", "=", "John\'s Account"]', "apostrophe"),
('["account_number", "like", "%100%"]', "wildcards"),
('["account_name", "=", "Test & Development"]', "ampersand"),
]
for formula, _case_type in test_cases:
mock_row = self._create_mock_report_row(formula)
condition = parser.build_condition(mock_row, account_table)
self.assertIsNotNone(condition, f"Failed to build condition for {_case_type} case")
def test_parse_logical_operator_edge_cases(self):
"""Test edge cases for logical operators"""
parser = FilterExpressionParser()
from frappe.query_builder import DocType
account_table = DocType("Account")
# Test empty conditions list - should return None
empty_and = '{"and": []}'
mock_row_empty = self._create_mock_report_row(empty_and)
condition = parser.build_condition(mock_row_empty, account_table)
self.assertIsNone(condition)
# Test single condition in logical operator
single_condition = '{"and": [["account_type", "=", "Bank"]]}'
mock_row_single = self._create_mock_report_row(single_condition)
condition = parser.build_condition(mock_row_single, account_table)
self.assertIsNotNone(condition)
# Test case sensitivity - should be invalid
wrong_case = '{"AND": [["account_type", "=", "Bank"]]}'
mock_row_wrong = self._create_mock_report_row(wrong_case)
condition = parser.build_condition(mock_row_wrong, account_table)
self.assertIsNone(condition) # Should return None due to invalid logical operator
def test_build_condition_accepts_document_instance(self):
parser = FilterExpressionParser()
account_table = frappe.qb.DocType("Account")
row_obj = frappe._dict(
{
"doctype": "Financial Report Row",
"reference_code": "DOCROW1",
"display_name": "Doc Row",
"data_source": "Account Data",
"balance_type": "Closing Balance",
"calculation_formula": '["account_type", "=", "Income"]',
}
)
# Unsaved child doc is sufficient for validation
row_doc = frappe.get_doc(row_obj)
cond = parser.build_condition(row_doc, account_table)
self.assertIsNotNone(cond)
# Also accepts plain frappe._dict object
cond = parser.build_condition(row_obj, account_table)
self.assertIsNotNone(cond)
# 6. ERROR HANDLING
def test_parse_invalid_filter_expressions(self):
"""Test handling of invalid filter expressions"""
parser = FilterExpressionParser()
from frappe.query_builder import DocType
account_table = DocType("Account")
# Test malformed expressions - all should return None
invalid_expressions = [
'["incomplete"]', # Missing operator and value
'{"invalid": "structure"}', # Wrong structure
"not_a_list_or_dict", # Invalid format
'["field", "=", "value", "extra"]', # Too many elements - actually might work due to slicing
'["field"]', # Single element
'["field", "="]', # Missing value - actually gets handled as empty value
'{"AND": [["field", "=", "value"]]}', # Wrong case
'{"and": [["field", "=", "value"]], "or": [["field2", "=", "value2"]]}', # Multiple keys
'{"xor": [["field", "=", "value"]]}', # Invalid logical operator
'{"and": "not_a_list"}', # Non-list value for logical operator
"not even close to valid syntax", # Unparseable string
]
for expr in invalid_expressions:
mock_row = self._create_mock_report_row(expr)
condition = parser.build_condition(mock_row, account_table)
self.assertIsNone(condition, f"Expression {expr} should be invalid and return None")
def test_parse_malformed_logical_conditions(self):
"""Test malformed logical conditions"""
parser = FilterExpressionParser()
from frappe.query_builder import DocType
account_table = DocType("Account")
malformed_expressions = [
'{"and": [["field", "=", "value"]], "or": [["field2", "=", "value2"]]}', # Multiple keys
'{"xor": [["field", "=", "value"]]}', # Invalid logical operator
'{"and": "not_a_list"}', # Non-list value for logical operator
]
for expr in malformed_expressions:
mock_row = self._create_mock_report_row(expr)
condition = parser.build_condition(mock_row, account_table)
self.assertIsNone(condition, f"Malformed expression {expr} should return None")
# Test mixed types in conditions - should return None due to validation failure
mixed_types = '{"and": [["account_type", "=", "Bank"], "string", 123]}'
mock_row_mixed = self._create_mock_report_row(mixed_types)
condition = parser.build_condition(mock_row_mixed, account_table)
# Should return None because invalid sub-conditions cause validation to fail
self.assertIsNone(condition)
def test_handle_exception_robustness(self):
"""Test exception handling for various inputs"""
parser = FilterExpressionParser()
from frappe.query_builder import DocType
account_table = DocType("Account")
problematic_inputs = [
"not even close to valid syntax", # Unparseable string
'{"field": "value"}', # JSON-like but not proper format
]
for test_input in problematic_inputs:
mock_row = self._create_mock_report_row(test_input)
condition = parser.build_condition(mock_row, account_table)
self.assertIsNone(condition, f"Input {test_input} should result in None")
# 7. BUILD CONDITIONS
def test_build_condition_field_validation(self):
"""Test field validation behavior"""
parser = FilterExpressionParser()
from frappe.query_builder import DocType
account_table = DocType("Account")
# Test with existing field - should work
valid_formula = '["account_name", "=", "test"]'
mock_row_valid = self._create_mock_report_row(valid_formula)
condition = parser.build_condition(mock_row_valid, account_table)
self.assertIsNotNone(condition)
# Test with invalid formula - should return None
invalid_formula = "invalid formula"
mock_row_invalid = self._create_mock_report_row(invalid_formula)
condition = parser.build_condition(mock_row_invalid, account_table)
self.assertIsNone(condition)
class TestFinancialQueryBuilder(FinancialReportTemplateTestCase):
def test_fetch_balances_with_journal_entries(self):
company = "_Test Company"
cash_account = "_Test Cash - _TC"
bank_account = "_Test Bank - _TC"
# Create journal entries in different periods
# October: Transfer 1000 from Bank to Cash
jv_oct = make_journal_entry(
account1=cash_account,
account2=bank_account,
amount=1000,
posting_date="2024-10-15",
company=company,
submit=True,
)
# November: Transfer 500 from Bank to Cash
jv_nov = make_journal_entry(
account1=cash_account,
account2=bank_account,
amount=500,
posting_date="2024-11-20",
company=company,
submit=True,
)
# December: No transactions (test zero movement period)
try:
# Set up filters and periods for Q4 2024
filters = {
"company": company,
"from_fiscal_year": "2024",
"to_fiscal_year": "2024",
"period_start_date": "2024-10-01",
"period_end_date": "2024-12-31",
"filter_based_on": "Date Range",
"periodicity": "Monthly",
}
periods = [
{"key": "2024_oct", "from_date": "2024-10-01", "to_date": "2024-10-31"},
{"key": "2024_nov", "from_date": "2024-11-01", "to_date": "2024-11-30"},
{"key": "2024_dec", "from_date": "2024-12-01", "to_date": "2024-12-31"},
]
query_builder = FinancialQueryBuilder(filters, periods)
# Create account objects as expected by fetch_account_balances
accounts = [
frappe._dict({"name": cash_account, "account_name": "Cash", "account_number": "1001"}),
frappe._dict({"name": bank_account, "account_name": "Bank", "account_number": "1002"}),
]
# Fetch balances using the full workflow
balances_data = query_builder.fetch_account_balances(accounts)
# Verify Cash account balances
cash_data = balances_data.get(cash_account)
self.assertIsNotNone(cash_data, "Cash account should exist in results")
# October: movement = +1000 (debit)
oct_cash = cash_data.get_period("2024_oct")
self.assertIsNotNone(oct_cash, "October period should exist for cash")
self.assertEqual(oct_cash.movement, 1000.0, "October cash movement should be 1000")
# November: movement = +500
nov_cash = cash_data.get_period("2024_nov")
self.assertIsNotNone(nov_cash, "November period should exist for cash")
self.assertEqual(nov_cash.movement, 500.0, "November cash movement should be 500")
self.assertEqual(
nov_cash.opening, oct_cash.closing, "November opening should equal October closing"
)
# December: movement = 0 (no transactions)
dec_cash = cash_data.get_period("2024_dec")
self.assertIsNotNone(dec_cash, "December period should exist for cash")
self.assertEqual(dec_cash.movement, 0.0, "December cash movement should be 0")
self.assertEqual(
dec_cash.closing,
nov_cash.closing,
"December closing should equal November closing when no movement",
)
# Verify Bank account balances (opposite direction)
bank_data = balances_data.get(bank_account)
self.assertIsNotNone(bank_data, "Bank account should exist in results")
oct_bank = bank_data.get_period("2024_oct")
self.assertEqual(oct_bank.movement, -1000.0, "October bank movement should be -1000")
nov_bank = bank_data.get_period("2024_nov")
self.assertEqual(nov_bank.movement, -500.0, "November bank movement should be -500")
finally:
# Clean up: cancel journal entries
jv_nov.cancel()
jv_oct.cancel()
def test_opening_balance_from_previous_period_closing(self):
company = "_Test Company"
cash_account = "_Test Cash - _TC"
sales_account = "Sales - _TC"
posting_date_2023 = "2023-06-15"
# Create journal entry in prior period (2023)
# Cash Dr 5000, Sales Cr 5000
jv_2023 = make_journal_entry(
account1=cash_account,
account2=sales_account,
amount=5000,
posting_date=posting_date_2023,
company=company,
submit=True,
)
pcv = None
jv_2024 = None
original_pcv_setting = frappe.db.get_single_value(
"Accounts Settings", "use_legacy_controller_for_pcv"
)
try:
# Create Period Closing Voucher for 2023
# This will create Account Closing Balance entries
closing_account = frappe.db.get_value(
"Account",
{
"company": company,
"root_type": "Liability",
"is_group": 0,
"account_type": ["not in", ["Payable", "Receivable"]],
},
"name",
)
fy_2023 = get_fiscal_year(posting_date_2023, company=company)
frappe.db.set_single_value("Accounts Settings", "use_legacy_controller_for_pcv", 1)
pcv = frappe.get_doc(
{
"doctype": "Period Closing Voucher",
"transaction_date": "2023-12-31",
"period_start_date": fy_2023[1],
"period_end_date": fy_2023[2],
"company": company,
"fiscal_year": fy_2023[0],
"cost_center": "_Test Cost Center - _TC",
"closing_account_head": closing_account,
"remarks": "Test Period Closing",
}
)
pcv.insert()
pcv.submit()
pcv.reload()
# Now create a small transaction in 2024 to ensure the account appears
jv_2024 = make_journal_entry(
account1=cash_account,
account2=sales_account,
amount=100,
posting_date="2024-01-15",
company=company,
submit=True,
)
# Set up filters for Q1 2024 (after the period closing)
filters = {
"company": company,
"from_fiscal_year": "2024",
"to_fiscal_year": "2024",
"period_start_date": "2024-01-01",
"period_end_date": "2024-03-31",
"filter_based_on": "Date Range",
"periodicity": "Monthly",
"ignore_closing_entries": True, # Don't include PCV entries in movements
}
periods = [
{"key": "2024_jan", "from_date": "2024-01-01", "to_date": "2024-01-31"},
{"key": "2024_feb", "from_date": "2024-02-01", "to_date": "2024-02-29"},
{"key": "2024_mar", "from_date": "2024-03-01", "to_date": "2024-03-31"},
]
query_builder = FinancialQueryBuilder(filters, periods)
accounts = [
frappe._dict({"name": cash_account, "account_name": "Cash", "account_number": "1001"}),
]
balances_data = query_builder.fetch_account_balances(accounts)
# Verify Cash account has opening balance from 2023 transactions
cash_data = balances_data.get(cash_account)
self.assertIsNotNone(cash_data, "Cash account should exist in results")
jan_cash = cash_data.get_period("2024_jan")
self.assertIsNotNone(jan_cash, "January period should exist")
# Opening balance should be from prior period
# Cash had 5000 debit in 2023, so opening in 2024 should be >= 5000
# (may be higher if there were other test transactions)
self.assertEqual(
jan_cash.opening,
5000.0,
"January opening should equal to balance from 2023 (5000)",
)
# Verify running balance logic
# Movement in January is 100 (from jv_2024)
self.assertEqual(jan_cash.movement, 100.0, "January movement should be 100")
self.assertEqual(
jan_cash.closing, jan_cash.opening + jan_cash.movement, "Closing = Opening + Movement"
)
# February and March should have no movement but carry the balance
feb_cash = cash_data.get_period("2024_feb")
self.assertEqual(feb_cash.opening, jan_cash.closing, "Feb opening = Jan closing")
self.assertEqual(feb_cash.movement, 0.0, "February should have no movement")
self.assertEqual(feb_cash.closing, feb_cash.opening, "Feb closing = opening when no movement")
mar_cash = cash_data.get_period("2024_mar")
self.assertEqual(mar_cash.opening, feb_cash.closing, "Mar opening = Feb closing")
self.assertEqual(mar_cash.movement, 0.0, "March should have no movement")
self.assertEqual(mar_cash.closing, mar_cash.opening, "Mar closing = opening when no movement")
# Set up filters for Q2 2024
filters_q2 = {
"company": company,
"from_fiscal_year": "2024",
"to_fiscal_year": "2024",
"period_start_date": "2024-04-01",
"period_end_date": "2024-06-30",
"filter_based_on": "Date Range",
"periodicity": "Monthly",
"ignore_closing_entries": True,
}
periods_q2 = [
{"key": "2024_apr", "from_date": "2024-04-01", "to_date": "2024-04-30"},
{"key": "2024_may", "from_date": "2024-05-01", "to_date": "2024-05-31"},
{"key": "2024_jun", "from_date": "2024-06-01", "to_date": "2024-06-30"},
]
query_builder_q2 = FinancialQueryBuilder(filters_q2, periods_q2)
balances_data_q2 = query_builder_q2.fetch_account_balances(accounts)
# Verify Cash account in Q2
cash_data_q2 = balances_data_q2.get(cash_account)
self.assertIsNotNone(cash_data_q2, "Cash account should exist in Q2 results")
apr_cash = cash_data_q2.get_period("2024_apr")
self.assertIsNotNone(apr_cash, "April period should exist")
# Opening balance in April should equal closing in March
self.assertEqual(
apr_cash.opening,
mar_cash.closing,
"April opening should equal March closing balance",
)
self.assertEqual(apr_cash.closing, apr_cash.opening, "April closing = opening when no movement")
finally:
# Clean up
frappe.db.set_single_value(
"Accounts Settings", "use_legacy_controller_for_pcv", original_pcv_setting or 0
)
if jv_2024:
jv_2024.cancel()
if pcv:
pcv.reload()
if pcv.docstatus == 1:
pcv.cancel()
jv_2023.cancel()
def test_account_with_gl_entries_but_no_prior_closing_balance(self):
company = "_Test Company"
cash_account = "_Test Cash - _TC"
bank_account = "_Test Bank - _TC"
# Create journal entries WITHOUT any prior Period Closing Voucher
# This ensures the account exists in gl_dict but NOT in balances_data
jv = make_journal_entry(
account1=cash_account,
account2=bank_account,
amount=2500,
posting_date="2024-07-15",
company=company,
submit=True,
)
try:
# Set up filters - use a period with no prior PCV
filters = {
"company": company,
"from_fiscal_year": "2024",
"to_fiscal_year": "2024",
"period_start_date": "2024-07-01",
"period_end_date": "2024-09-30",
"filter_based_on": "Date Range",
"periodicity": "Monthly",
}
periods = [
{"key": "2024_jul", "from_date": "2024-07-01", "to_date": "2024-07-31"},
{"key": "2024_aug", "from_date": "2024-08-01", "to_date": "2024-08-31"},
{"key": "2024_sep", "from_date": "2024-09-01", "to_date": "2024-09-30"},
]
query_builder = FinancialQueryBuilder(filters, periods)
# Use accounts that have GL entries but may not have Account Closing Balance
accounts = [
frappe._dict({"name": cash_account, "account_name": "Cash", "account_number": "1001"}),
frappe._dict({"name": bank_account, "account_name": "Bank", "account_number": "1002"}),
]
balances_data = query_builder.fetch_account_balances(accounts)
# Verify accounts are present in results even without prior closing balance
cash_data = balances_data.get(cash_account)
self.assertIsNotNone(cash_data, "Cash account should exist in results")
bank_data = balances_data.get(bank_account)
self.assertIsNotNone(bank_data, "Bank account should exist in results")
# Verify July has the movement from journal entry
jul_cash = cash_data.get_period("2024_jul")
self.assertIsNotNone(jul_cash, "July period should exist for cash")
self.assertEqual(jul_cash.movement, 2500.0, "July cash movement should be 2500")
jul_bank = bank_data.get_period("2024_jul")
self.assertIsNotNone(jul_bank, "July period should exist for bank")
self.assertEqual(jul_bank.movement, -2500.0, "July bank movement should be -2500")
# Verify subsequent periods exist with zero movement
aug_cash = cash_data.get_period("2024_aug")
self.assertIsNotNone(aug_cash, "August period should exist for cash")
self.assertEqual(aug_cash.movement, 0.0, "August cash movement should be 0")
self.assertEqual(aug_cash.opening, jul_cash.closing, "August opening = July closing")
sep_cash = cash_data.get_period("2024_sep")
self.assertIsNotNone(sep_cash, "September period should exist for cash")
self.assertEqual(sep_cash.movement, 0.0, "September cash movement should be 0")
self.assertEqual(sep_cash.opening, aug_cash.closing, "September opening = August closing")
finally:
jv.cancel()
| {
"repo_id": "frappe/erpnext",
"file_path": "erpnext/accounts/doctype/financial_report_template/test_financial_report_engine.py",
"license": "GNU General Public License v3.0",
"lines": 1720,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
frappe/erpnext:erpnext/accounts/doctype/financial_report_template/test_financial_report_template.py | # Copyright (c) 2025, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
import frappe
from frappe.tests import IntegrationTestCase
from frappe.tests.utils import make_test_records
# On IntegrationTestCase, the doctype test records and all
# link-field test record dependencies are recursively loaded
# Use these module variables to add/remove to/from that list
EXTRA_TEST_RECORD_DEPENDENCIES = [] # eg. ["User"]
IGNORE_TEST_RECORD_DEPENDENCIES = [] # eg. ["User"]
class TestFinancialReportTemplate(IntegrationTestCase):
pass
class FinancialReportTemplateTestCase(IntegrationTestCase):
"""Utility class with common setup and helper methods for all test classes"""
@classmethod
def setUpClass(cls):
"""Set up test data"""
make_test_records("Company")
make_test_records("Fiscal Year")
cls.create_test_template()
@classmethod
def create_test_template(cls):
"""Create a test financial report template"""
if not frappe.db.exists("Financial Report Template", "Test P&L Template"):
template = frappe.get_doc(
{
"doctype": "Financial Report Template",
"template_name": "Test P&L Template",
"report_type": "Profit and Loss Statement",
"rows": [
{
"reference_code": "INC001",
"display_name": "Income",
"indentation_level": 0,
"data_source": "Account Data",
"balance_type": "Closing Balance",
"bold_text": 1,
"calculation_formula": '["root_type", "=", "Income"]',
},
{
"reference_code": "EXP001",
"display_name": "Expenses",
"indentation_level": 0,
"data_source": "Account Data",
"balance_type": "Closing Balance",
"bold_text": 1,
"calculation_formula": '["root_type", "=", "Expense"]',
},
{
"reference_code": "NET001",
"display_name": "Net Profit/Loss",
"indentation_level": 0,
"data_source": "Calculated Amount",
"bold_text": 1,
"calculation_formula": "INC001 - EXP001",
},
],
}
)
template.insert()
cls.test_template = frappe.get_doc("Financial Report Template", "Test P&L Template")
@staticmethod
def create_test_template_with_rows(rows_data):
"""Helper method to create test template with specific rows"""
template_name = f"Test Template {frappe.generate_hash()[:8]}"
template = frappe.get_doc(
{"doctype": "Financial Report Template", "template_name": template_name, "rows": rows_data}
)
return template
| {
"repo_id": "frappe/erpnext",
"file_path": "erpnext/accounts/doctype/financial_report_template/test_financial_report_template.py",
"license": "GNU General Public License v3.0",
"lines": 69,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
frappe/erpnext:erpnext/accounts/doctype/process_period_closing_voucher/process_period_closing_voucher.py | # Copyright (c) 2025, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
import copy
from datetime import timedelta
import frappe
from frappe import qb
from frappe.model.document import Document
from frappe.query_builder.functions import Count, Max, Min, Sum
from frappe.utils import flt, get_datetime
from frappe.utils.scheduler import is_scheduler_inactive
from erpnext.accounts.doctype.account_closing_balance.account_closing_balance import (
make_closing_entries,
)
class ProcessPeriodClosingVoucher(Document):
# begin: auto-generated types
# This code is auto-generated. Do not modify anything in this block.
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from frappe.types import DF
from erpnext.accounts.doctype.process_period_closing_voucher_detail.process_period_closing_voucher_detail import (
ProcessPeriodClosingVoucherDetail,
)
amended_from: DF.Link | None
bs_closing_balance: DF.JSON | None
normal_balances: DF.Table[ProcessPeriodClosingVoucherDetail]
p_l_closing_balance: DF.JSON | None
parent_pcv: DF.Link
status: DF.Literal["Queued", "Running", "Paused", "Completed", "Cancelled"]
z_opening_balances: DF.Table[ProcessPeriodClosingVoucherDetail]
# end: auto-generated types
def on_discard(self):
self.db_set("status", "Cancelled")
def validate(self):
self.status = "Queued"
self.populate_processing_tables()
def populate_processing_tables(self):
self.generate_pcv_dates()
self.generate_opening_balances_dates()
def get_dates(self, start, end):
return [start + timedelta(days=x) for x in range((end - start).days + 1)]
def generate_pcv_dates(self):
self.normal_balances = []
pcv = frappe.get_doc("Period Closing Voucher", self.parent_pcv)
dates = self.get_dates(get_datetime(pcv.period_start_date), get_datetime(pcv.period_end_date))
for x in dates:
self.append(
"normal_balances",
{"processing_date": x, "status": "Queued", "report_type": "Profit and Loss"},
)
self.append(
"normal_balances", {"processing_date": x, "status": "Queued", "report_type": "Balance Sheet"}
)
def generate_opening_balances_dates(self):
self.z_opening_balances = []
pcv = frappe.get_doc("Period Closing Voucher", self.parent_pcv)
if pcv.is_first_period_closing_voucher():
gl = qb.DocType("GL Entry")
min = qb.from_(gl).select(Min(gl.posting_date)).where(gl.company.eq(pcv.company)).run()[0][0]
max = qb.from_(gl).select(Max(gl.posting_date)).where(gl.company.eq(pcv.company)).run()[0][0]
dates = self.get_dates(get_datetime(min), get_datetime(max))
for x in dates:
self.append(
"z_opening_balances",
{"processing_date": x, "status": "Queued", "report_type": "Balance Sheet"},
)
def on_submit(self):
start_pcv_processing(self.name)
def on_cancel(self):
cancel_pcv_processing(self.name)
@frappe.whitelist()
def start_pcv_processing(docname: str):
if frappe.db.get_value("Process Period Closing Voucher", docname, "status") in ["Queued", "Running"]:
frappe.db.set_value("Process Period Closing Voucher", docname, "status", "Running")
if normal_balances := frappe.db.get_all(
"Process Period Closing Voucher Detail",
filters={"parent": docname, "status": "Queued"},
fields=["processing_date", "report_type", "parentfield"],
order_by="parentfield, idx, processing_date",
limit=4,
):
if not is_scheduler_inactive():
for x in normal_balances:
frappe.db.set_value(
"Process Period Closing Voucher Detail",
{
"processing_date": x.processing_date,
"parent": docname,
"report_type": x.report_type,
"parentfield": x.parentfield,
},
"status",
"Running",
)
frappe.enqueue(
method="erpnext.accounts.doctype.process_period_closing_voucher.process_period_closing_voucher.process_individual_date",
queue="long",
timeout="3600",
is_async=True,
enqueue_after_commit=True,
docname=docname,
date=x.processing_date,
report_type=x.report_type,
parentfield=x.parentfield,
)
else:
frappe.db.set_value("Process Period Closing Voucher", docname, "status", "Completed")
@frappe.whitelist()
def pause_pcv_processing(docname: str):
ppcv = qb.DocType("Process Period Closing Voucher")
qb.update(ppcv).set(ppcv.status, "Paused").where(ppcv.name.eq(docname)).run()
if queued_dates := frappe.db.get_all(
"Process Period Closing Voucher Detail",
filters={"parent": docname, "status": "Queued"},
pluck="name",
):
ppcvd = qb.DocType("Process Period Closing Voucher Detail")
qb.update(ppcvd).set(ppcvd.status, "Paused").where(ppcvd.name.isin(queued_dates)).run()
@frappe.whitelist()
def cancel_pcv_processing(docname: str):
ppcv = qb.DocType("Process Period Closing Voucher")
qb.update(ppcv).set(ppcv.status, "Cancelled").where(ppcv.name.eq(docname)).run()
if queued_dates := frappe.db.get_all(
"Process Period Closing Voucher Detail",
filters={"parent": docname, "status": "Queued"},
pluck="name",
):
ppcvd = qb.DocType("Process Period Closing Voucher Detail")
qb.update(ppcvd).set(ppcvd.status, "Cancelled").where(ppcvd.name.isin(queued_dates)).run()
@frappe.whitelist()
def resume_pcv_processing(docname: str):
ppcv = qb.DocType("Process Period Closing Voucher")
qb.update(ppcv).set(ppcv.status, "Running").where(ppcv.name.eq(docname)).run()
if paused_dates := frappe.db.get_all(
"Process Period Closing Voucher Detail",
filters={"parent": docname, "status": "Paused"},
pluck="name",
):
ppcvd = qb.DocType("Process Period Closing Voucher Detail")
qb.update(ppcvd).set(ppcvd.status, "Queued").where(ppcvd.name.isin(paused_dates)).run()
start_pcv_processing(docname)
def update_default_dimensions(dimension_fields, gl_entry, dimension_values):
for i, dimension in enumerate(dimension_fields):
gl_entry[dimension] = dimension_values[i]
def get_gle_for_pl_account(pcv, acc, balances, dimensions):
balance_in_account_currency = flt(balances.debit_in_account_currency) - flt(
balances.credit_in_account_currency
)
balance_in_company_currency = flt(balances.debit) - flt(balances.credit)
gl_entry = frappe._dict(
{
"company": pcv.company,
"posting_date": pcv.period_end_date,
"account": acc,
"account_currency": balances.account_currency,
"debit_in_account_currency": abs(balance_in_account_currency)
if balance_in_account_currency < 0
else 0,
"debit": abs(balance_in_company_currency) if balance_in_company_currency < 0 else 0,
"credit_in_account_currency": abs(balance_in_account_currency)
if balance_in_account_currency > 0
else 0,
"credit": abs(balance_in_company_currency) if balance_in_company_currency > 0 else 0,
"is_period_closing_voucher_entry": 1,
"voucher_type": "Period Closing Voucher",
"voucher_no": pcv.name,
"fiscal_year": pcv.fiscal_year,
"remarks": pcv.remarks,
"is_opening": "No",
}
)
# update dimensions
update_default_dimensions(get_dimensions(), gl_entry, dimensions)
return gl_entry
def get_gle_for_closing_account(pcv, dimension_balance, dimensions):
balance_in_company_currency = flt(dimension_balance.balance_in_company_currency)
debit = balance_in_company_currency if balance_in_company_currency > 0 else 0
credit = abs(balance_in_company_currency) if balance_in_company_currency < 0 else 0
gl_entry = frappe._dict(
{
"company": pcv.company,
"posting_date": pcv.period_end_date,
"account": pcv.closing_account_head,
"account_currency": frappe.db.get_value("Account", pcv.closing_account_head, "account_currency"),
"debit_in_account_currency": debit,
"debit": debit,
"credit_in_account_currency": credit,
"credit": credit,
"is_period_closing_voucher_entry": 1,
"voucher_type": "Period Closing Voucher",
"voucher_no": pcv.name,
"fiscal_year": pcv.fiscal_year,
"remarks": pcv.remarks,
"is_opening": "No",
}
)
# update dimensions
update_default_dimensions(get_dimensions(), gl_entry, dimensions)
return gl_entry
@frappe.whitelist()
def schedule_next_date(docname: str):
if to_process := frappe.db.get_all(
"Process Period Closing Voucher Detail",
filters={"parent": docname, "status": "Queued"},
fields=["processing_date", "report_type", "parentfield"],
order_by="parentfield, idx, processing_date",
limit=1,
):
if not is_scheduler_inactive():
frappe.db.set_value(
"Process Period Closing Voucher Detail",
{
"processing_date": to_process[0].processing_date,
"parent": docname,
"report_type": to_process[0].report_type,
"parentfield": to_process[0].parentfield,
},
"status",
"Running",
)
frappe.enqueue(
method="erpnext.accounts.doctype.process_period_closing_voucher.process_period_closing_voucher.process_individual_date",
queue="long",
timeout="3600",
is_async=True,
enqueue_after_commit=True,
docname=docname,
date=to_process[0].processing_date,
report_type=to_process[0].report_type,
parentfield=to_process[0].parentfield,
)
else:
ppcvd = qb.DocType("Process Period Closing Voucher Detail")
total_no_of_dates = (
qb.from_(ppcvd).select(Count(ppcvd.star)).where(ppcvd.parent.eq(docname)).run()[0][0]
)
completed = (
qb.from_(ppcvd)
.select(Count(ppcvd.star))
.where(ppcvd.parent.eq(docname) & ppcvd.status.eq("Completed"))
.run()[0][0]
)
# Ensure both normal and opening balances are processed for all dates
if total_no_of_dates == completed:
summarize_and_post_ledger_entries(docname)
def make_dict_json_compliant(dimension_wise_balance) -> dict:
"""
convert tuple -> str
JSON doesn't support dictionary with tuple keys
"""
converted_dict = {}
for k, v in dimension_wise_balance.items():
str_key = [str(x) for x in k]
str_key = ",".join(str_key)
converted_dict[str_key] = v
return converted_dict
def get_consolidated_gles(balances, report_type) -> list:
gl_entries = []
for x in balances:
if x.report_type == report_type:
closing_balances = [frappe._dict(gle) for gle in frappe.json.loads(x.closing_balance)]
gl_entries.extend(closing_balances)
return gl_entries
def get_gl_entries(docname):
"""
Calculate total closing balance of all P&L accounts across PCV start and end date
"""
ppcv = frappe.get_doc("Process Period Closing Voucher", docname)
# calculate balance
gl_entries = get_consolidated_gles(ppcv.normal_balances, "Profit and Loss")
pl_dimension_wise_acc_balance = build_dimension_wise_balance_dict(gl_entries)
# save
json_dict = make_dict_json_compliant(pl_dimension_wise_acc_balance)
frappe.db.set_value(
"Process Period Closing Voucher", docname, "p_l_closing_balance", frappe.json.dumps(json_dict)
)
# build gl map
pcv = frappe.get_doc("Period Closing Voucher", ppcv.parent_pcv)
pl_accounts_reverse_gle = []
closing_account_gle = []
for dimensions, account_balances in pl_dimension_wise_acc_balance.items():
for acc, balances in account_balances.items():
balance_in_company_currency = flt(balances.debit) - flt(balances.credit)
if balance_in_company_currency:
pl_accounts_reverse_gle.append(get_gle_for_pl_account(pcv, acc, balances, dimensions))
closing_account_gle.append(get_gle_for_closing_account(pcv, account_balances["balances"], dimensions))
return pl_accounts_reverse_gle, closing_account_gle
def calculate_balance_sheet_balance(docname):
"""
Calculate total closing balance of all P&L accounts across PCV start and end date.
If it is first PCV, opening entries are also considered
"""
ppcv = frappe.get_doc("Process Period Closing Voucher", docname)
gl_entries = get_consolidated_gles(ppcv.normal_balances + ppcv.z_opening_balances, "Balance Sheet")
# build dimension wise dictionary from all GLE's
bs_dimension_wise_acc_balance = build_dimension_wise_balance_dict(gl_entries)
# save
json_dict = make_dict_json_compliant(bs_dimension_wise_acc_balance)
frappe.db.set_value(
"Process Period Closing Voucher", docname, "bs_closing_balance", frappe.json.dumps(json_dict)
)
return bs_dimension_wise_acc_balance
def get_p_l_closing_entries(pl_gles, pcv):
pl_closing_entries = copy.deepcopy(pl_gles)
for d in pl_gles:
# reverse debit and credit
gle_copy = copy.deepcopy(d)
gle_copy.debit = d.credit
gle_copy.credit = d.debit
gle_copy.debit_in_account_currency = d.credit_in_account_currency
gle_copy.credit_in_account_currency = d.debit_in_account_currency
gle_copy.is_period_closing_voucher_entry = 0
gle_copy.period_closing_voucher = pcv.name
pl_closing_entries.append(gle_copy)
return pl_closing_entries
def get_bs_closing_entries(dimension_wise_balance, pcv):
closing_entries = []
for dimensions, account_balances in dimension_wise_balance.items():
for acc, balances in account_balances.items():
balance_in_company_currency = flt(balances.debit) - flt(balances.credit)
if acc != "balances" and balance_in_company_currency:
closing_entries.append(get_closing_entry(pcv, acc, balances, dimensions))
return closing_entries
def get_closing_account_closing_entry(closing_account_gle, pcv):
closing_entries_for_closing_account = copy.deepcopy(closing_account_gle)
for d in closing_entries_for_closing_account:
d.period_closing_voucher = pcv.name
return closing_entries_for_closing_account
def summarize_and_post_ledger_entries(docname):
# P&L accounts
pl_accounts_reverse_gle, closing_account_gle = get_gl_entries(docname)
gl_entries = pl_accounts_reverse_gle + closing_account_gle
from erpnext.accounts.general_ledger import make_gl_entries
if gl_entries:
make_gl_entries(gl_entries, merge_entries=False)
pcv_name = frappe.db.get_value("Process Period Closing Voucher", docname, "parent_pcv")
pcv = frappe.get_doc("Period Closing Voucher", pcv_name)
# Balance sheet accounts
bs_dimension_wise_acc_balance = calculate_balance_sheet_balance(docname)
pl_closing_entries = get_p_l_closing_entries(pl_accounts_reverse_gle, pcv)
bs_closing_entries = get_bs_closing_entries(bs_dimension_wise_acc_balance, pcv)
closing_entries_for_closing_account = get_closing_account_closing_entry(closing_account_gle, pcv)
closing_entries = pl_closing_entries + bs_closing_entries + closing_entries_for_closing_account
make_closing_entries(closing_entries, pcv.name, pcv.company, pcv.period_end_date)
frappe.db.set_value("Period Closing Voucher", pcv.name, "gle_processing_status", "Completed")
frappe.db.set_value("Process Period Closing Voucher", docname, "status", "Completed")
def get_closing_entry(pcv, account, balances, dimensions):
closing_entry = frappe._dict(
{
"company": pcv.company,
"closing_date": pcv.period_end_date,
"period_closing_voucher": pcv.name,
"account": account,
"account_currency": balances.account_currency,
"debit_in_account_currency": flt(balances.debit_in_account_currency),
"debit": flt(balances.debit),
"credit_in_account_currency": flt(balances.credit_in_account_currency),
"credit": flt(balances.credit),
"is_period_closing_voucher_entry": 0,
}
)
# update dimensions
update_default_dimensions(get_dimensions(), closing_entry, dimensions)
return closing_entry
def get_dimensions():
from erpnext.accounts.doctype.accounting_dimension.accounting_dimension import (
get_accounting_dimensions,
)
default_dimensions = ["cost_center", "finance_book", "project"]
dimensions = default_dimensions + get_accounting_dimensions()
return dimensions
def get_dimension_key(res):
return tuple([res.get(dimension) for dimension in get_dimensions()])
def build_dimension_wise_balance_dict(gl_entries):
dimension_balances = frappe._dict()
for x in gl_entries:
dimension_key = get_dimension_key(x)
dimension_balances.setdefault(dimension_key, frappe._dict()).setdefault(
x.account,
frappe._dict(
{
"debit_in_account_currency": 0,
"credit_in_account_currency": 0,
"debit": 0,
"credit": 0,
"account_currency": x.account_currency,
}
),
)
dimension_balances[dimension_key][x.account].debit_in_account_currency += flt(
x.debit_in_account_currency
)
dimension_balances[dimension_key][x.account].credit_in_account_currency += flt(
x.credit_in_account_currency
)
dimension_balances[dimension_key][x.account].debit += flt(x.debit)
dimension_balances[dimension_key][x.account].credit += flt(x.credit)
# dimension-wise total balances
dimension_balances[dimension_key].setdefault(
"balances",
frappe._dict(
{
"balance_in_account_currency": 0,
"balance_in_company_currency": 0,
}
),
)
balance_in_account_currency = flt(x.debit_in_account_currency) - flt(x.credit_in_account_currency)
balance_in_company_currency = flt(x.debit) - flt(x.credit)
dimension_balances[dimension_key][
"balances"
].balance_in_account_currency += balance_in_account_currency
dimension_balances[dimension_key][
"balances"
].balance_in_company_currency += balance_in_company_currency
return dimension_balances
def process_individual_date(docname: str, date, report_type, parentfield):
current_date_status = frappe.db.get_value(
"Process Period Closing Voucher Detail",
{"processing_date": date, "report_type": report_type, "parentfield": parentfield},
"status",
)
if current_date_status != "Running":
return
pcv_name = frappe.db.get_value("Process Period Closing Voucher", docname, "parent_pcv")
company = frappe.db.get_value("Period Closing Voucher", pcv_name, "company")
dimensions = get_dimensions()
accounts = frappe.db.get_all(
"Account", filters={"company": company, "report_type": report_type}, pluck="name"
)
# summarize
gle = qb.DocType("GL Entry")
query = qb.from_(gle).select(gle.account)
for dim in dimensions:
query = query.select(gle[dim])
query = query.select(
Sum(gle.debit).as_("debit"),
Sum(gle.credit).as_("credit"),
Sum(gle.debit_in_account_currency).as_("debit_in_account_currency"),
Sum(gle.credit_in_account_currency).as_("credit_in_account_currency"),
gle.account_currency,
).where(
(gle.company.eq(company))
& (gle.is_cancelled.eq(0))
& (gle.posting_date.eq(date))
& (gle.account.isin(accounts))
)
if parentfield == "z_opening_balances":
query = query.where(gle.is_opening.eq("Yes"))
query = query.groupby(gle.account)
for dim in dimensions:
query = query.groupby(gle[dim])
res = query.run(as_dict=True)
# save results
frappe.db.set_value(
"Process Period Closing Voucher Detail",
{"processing_date": date, "parent": docname, "report_type": report_type, "parentfield": parentfield},
"closing_balance",
frappe.json.dumps(res),
)
frappe.db.set_value(
"Process Period Closing Voucher Detail",
{"processing_date": date, "parent": docname, "report_type": report_type, "parentfield": parentfield},
"status",
"Completed",
)
# chain call
schedule_next_date(docname)
| {
"repo_id": "frappe/erpnext",
"file_path": "erpnext/accounts/doctype/process_period_closing_voucher/process_period_closing_voucher.py",
"license": "GNU General Public License v3.0",
"lines": 467,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | license |
frappe/erpnext:erpnext/accounts/doctype/process_period_closing_voucher/test_process_period_closing_voucher.py | # Copyright (c) 2025, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
# import frappe
from frappe.tests import IntegrationTestCase
# On IntegrationTestCase, the doctype test records and all
# link-field test record dependencies are recursively loaded
# Use these module variables to add/remove to/from that list
EXTRA_TEST_RECORD_DEPENDENCIES = [] # eg. ["User"]
IGNORE_TEST_RECORD_DEPENDENCIES = [] # eg. ["User"]
class IntegrationTestProcessPeriodClosingVoucher(IntegrationTestCase):
"""
Integration tests for ProcessPeriodClosingVoucher.
Use this class for testing interactions between multiple components.
"""
pass
| {
"repo_id": "frappe/erpnext",
"file_path": "erpnext/accounts/doctype/process_period_closing_voucher/test_process_period_closing_voucher.py",
"license": "GNU General Public License v3.0",
"lines": 15,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
frappe/erpnext:erpnext/accounts/doctype/process_period_closing_voucher_detail/process_period_closing_voucher_detail.py | # Copyright (c) 2025, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
# import frappe
from frappe.model.document import Document
class ProcessPeriodClosingVoucherDetail(Document):
# begin: auto-generated types
# This code is auto-generated. Do not modify anything in this block.
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from frappe.types import DF
closing_balance: DF.JSON | None
parent: DF.Data
parentfield: DF.Data
parenttype: DF.Data
processing_date: DF.Date | None
report_type: DF.Literal["Profit and Loss", "Balance Sheet"]
status: DF.Literal["Queued", "Running", "Paused", "Completed", "Cancelled"]
# end: auto-generated types
pass
| {
"repo_id": "frappe/erpnext",
"file_path": "erpnext/accounts/doctype/process_period_closing_voucher_detail/process_period_closing_voucher_detail.py",
"license": "GNU General Public License v3.0",
"lines": 19,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | license |
frappe/erpnext:erpnext/accounts/doctype/tax_withholding_entry/tax_withholding_entry.py | # Copyright (c) 2025, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from collections import defaultdict, deque
from math import inf
import frappe
from frappe import _
from frappe.model.document import Document
from frappe.query_builder.functions import IfNull, Sum
from frappe.utils import flt
import erpnext
from erpnext.accounts.utils import get_advance_payment_doctypes
DOCTYPE = "Tax Withholding Entry"
class TaxWithholdingEntry(Document):
# begin: auto-generated types
# This code is auto-generated. Do not modify anything in this block.
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from frappe.types import DF
company: DF.Link | None
conversion_rate: DF.Float
created_by_migration: DF.Check
currency: DF.Link | None
lower_deduction_certificate: DF.Link | None
parent: DF.Data
parentfield: DF.Data
parenttype: DF.Data
party: DF.DynamicLink | None
party_type: DF.Link | None
status: DF.Literal["", "Settled", "Under Withheld", "Over Withheld", "Duplicate", "Cancelled"]
tax_id: DF.Data | None
tax_rate: DF.Percent
tax_withholding_category: DF.Link | None
tax_withholding_group: DF.Link | None
taxable_amount: DF.Currency
taxable_date: DF.Date | None
taxable_doctype: DF.Link | None
taxable_name: DF.DynamicLink | None
under_withheld_reason: DF.Literal["", "Threshold Exemption", "Lower Deduction Certificate"]
withholding_amount: DF.Currency
withholding_date: DF.Date | None
withholding_doctype: DF.Link | None
withholding_name: DF.DynamicLink | None
# end: auto-generated types
def set_status(self, status=None):
if not status:
status = self.get_status()
self.status = status
def get_status(self):
if self.docstatus == 2:
return "Cancelled"
# Reasons are genuine allowed reasons for under deduction.
# Hence if a reason is provided, consider it as matched.
if not self.withholding_name and not self.under_withheld_reason:
return "Under Withheld"
elif not self.taxable_name:
return "Over Withheld"
else:
return "Settled"
def validate_adjustments(self):
if self.is_taxable_different and self.is_withholding_different:
frappe.throw(
_(
"Row #{0}: Cannot create entry with different taxable AND withholding document links."
).format(self.idx)
)
def validate_tax_withheld_amount(self):
if not self.withholding_name or self.under_withheld_reason:
return
precision = self.precision("withholding_amount")
allowance = 0.5
tax_to_withheld = flt(self.taxable_amount * (self.tax_rate / 100), precision)
diff = abs(tax_to_withheld - self.withholding_amount)
if diff > allowance:
frappe.throw(
_("Row #{0}: Withholding Amount {1} does not match calculated amount {2}.").format(
self.idx, self.withholding_amount, tax_to_withheld
)
)
@property
def is_taxable_different(self):
return self.taxable_doctype != self.parenttype or self.taxable_name != self.parent
@property
def is_withholding_different(self):
return self.withholding_doctype != self.parenttype or self.withholding_name != self.parent
def _process_tax_withholding_adjustments(self):
if self.status != "Settled":
return
# adjust old taxable (under-withheld)
if self.is_taxable_different:
self._adjust_against_old_entries(field_type="taxable")
# adjust old withholding (over-withheld)
elif self.is_withholding_different:
self._adjust_against_old_entries(field_type="withholding")
def _adjust_against_old_entries(self, field_type: str) -> set:
"""
Find old entries that need adjustment and update them.
The logic reads like: "Match up old incomplete entries with this new entry"
Args:
field_type: Either "taxable" or "withholding" - determines which fields to use
"""
doctype_field = f"{field_type}_doctype"
docname_field = f"{field_type}_name"
amount_field = f"{field_type}_amount"
status_to_find = "Under Withheld" if field_type == "taxable" else "Over Withheld"
if not self.tax_rate:
amount_field = "taxable_amount"
# old entries
old_entries = frappe.get_all(
DOCTYPE,
filters={
# NOTE: Allow offsetting across different categories
# Change Filters
"tax_withholding_category": self.tax_withholding_category,
"status": status_to_find,
doctype_field: self.get(doctype_field),
docname_field: self.get(docname_field),
"docstatus": 1,
},
fields="*",
)
value_direction = -1 if self.get(amount_field) < 0 else 1
remaining_amount = abs(self.get(amount_field))
docs_needing_reindex = set()
precision = self.precision("taxable_amount")
# update
for old_entry_data in old_entries:
old_entry = frappe.get_doc(DOCTYPE, **old_entry_data)
old_amount = abs(old_entry.get(amount_field))
if old_entry.get(amount_field) * value_direction < 0:
# sign of old entry's amount is different
continue
amount_we_can_match = min(old_amount, remaining_amount)
proportion = amount_we_can_match / old_amount if old_amount else 0
values_to_update = self._get_values_to_update(old_entry, proportion, field_type)
if old_amount <= amount_we_can_match:
# complete adjustment
frappe.db.set_value(DOCTYPE, old_entry.name, values_to_update)
else:
# partial adjustment
# Calculate balance values for both taxable and withholding amounts
balance_amount = (old_amount - amount_we_can_match) * value_direction
balance_values = self._get_balance_values_to_update(old_entry, proportion, field_type)
balance_values[amount_field] = balance_amount
frappe.db.set_value(DOCTYPE, old_entry.name, balance_values)
# new entry
# For partial adjustments, we need to proportionally adjust both taxable and withholding amounts
values_to_update["withholding_amount"] = old_entry.withholding_amount * proportion
values_to_update["taxable_amount"] = old_entry.taxable_amount * proportion
# If tax rate has changed, recalculate based on new rate
if self.tax_rate != old_entry.tax_rate:
if not self.tax_rate:
# Zero rate means no withholding
values_to_update["withholding_amount"] = 0
else:
values_to_update["taxable_amount"] = flt(
values_to_update["withholding_amount"] * 100 / self.tax_rate,
precision,
)
new_entry = frappe.copy_doc(old_entry)
new_entry.update(values_to_update)
new_entry.insert()
docs_needing_reindex.add((old_entry.parenttype, old_entry.parent))
remaining_amount -= amount_we_can_match
if remaining_amount <= 0:
break
else:
frappe.throw(
_("Row #{0}: Could not find enough {1} entries to match. Remaining amount: {2}").format(
self.idx, status_to_find, remaining_amount
)
)
_reset_idx(docs_needing_reindex)
def _get_values_to_update(self, old_entry, proportion: float, field_type: str):
field_to_update = "withholding" if field_type == "taxable" else "taxable"
values = {
f"{field_to_update}_amount": self.get(f"{field_to_update}_amount") * proportion,
f"{field_to_update}_doctype": self.get(f"{field_to_update}_doctype"),
f"{field_to_update}_name": self.get(f"{field_to_update}_name"),
f"{field_to_update}_date": self.get(f"{field_to_update}_date"),
"tax_rate": self.tax_rate,
"status": "Duplicate",
"under_withheld_reason": None,
}
if field_to_update == "taxable":
values.update(
currency=self.currency,
conversion_rate=self.conversion_rate,
under_withheld_reason=self.under_withheld_reason,
lower_deduction_certificate=self.lower_deduction_certificate,
)
# NOTE: Allow offsetting across different categories
# Update Tax Withholding Category values
return values
def _get_balance_values_to_update(self, old_entry, proportion: float, field_type: str):
"""Calculate the balance amounts for both taxable and withholding fields for partial adjustments"""
field_to_update = "withholding" if field_type == "taxable" else "taxable"
field = f"{field_to_update}_amount"
proportion = 1 - proportion
amount = flt(old_entry.get(field) * proportion, self.precision(field))
return {field: amount}
# CANCEL
def _clear_old_references(self):
if self.status not in ["Settled", "Duplicate"]:
return
filters = {
"tax_withholding_category": self.tax_withholding_category,
"taxable_doctype": self.taxable_doctype,
"taxable_name": self.taxable_name,
"withholding_doctype": self.withholding_doctype,
"withholding_name": self.withholding_name,
"name": ["!=", self.name],
"docstatus": 1,
}
if self.is_taxable_different:
frappe.db.set_value(
DOCTYPE,
filters,
{
"withholding_name": "",
"withholding_doctype": "",
"withholding_amount": 0,
"withholding_date": None,
"under_withheld_reason": "",
"lower_deduction_certificate": "",
"status": "Under Withheld",
},
)
elif self.is_withholding_different:
if self.taxable_amount < 0:
# Special handling for return invoice cancellation
self._handle_return_invoice_cancellation(filters)
else:
frappe.db.set_value(
DOCTYPE,
filters,
{
"taxable_name": "",
"taxable_doctype": "",
"taxable_date": None,
"status": "Over Withheld",
},
)
def _handle_return_invoice_cancellation(self, filters):
# Get old entries that need adjustment - inspired by _adjust_against_old_entries
old_entries = frappe.get_all(
DOCTYPE,
filters=filters,
fields="*",
)
docs_needing_reindex = set()
for entry in old_entries:
frappe.db.set_value(
DOCTYPE,
entry.name,
{
"taxable_doctype": entry.withholding_doctype,
"taxable_name": entry.withholding_name,
"taxable_date": entry.withholding_date,
},
)
# cases where withholding amount is zero
if entry.withholding_amount == 0:
continue
new_entry = frappe.copy_doc(frappe.get_doc(DOCTYPE, **entry))
values_to_update = {
"taxable_amount": abs(entry.taxable_amount),
"withholding_amount": 0,
"status": "Under Withheld",
"under_withheld_reason": "",
"taxable_doctype": entry.withholding_doctype,
"taxable_name": entry.withholding_name,
"taxable_date": entry.withholding_date,
"withholding_doctype": "",
"withholding_name": "",
"withholding_date": None,
}
new_entry.update(values_to_update)
new_entry.insert()
docs_needing_reindex.add((entry.parenttype, entry.parent))
_reset_idx(docs_needing_reindex)
from erpnext.accounts.doctype.tax_withholding_category.tax_withholding_category import (
TaxWithholdingDetails,
get_tax_id_for_party,
)
class TaxWithholdingController:
def __init__(self, doc):
self.doc = doc
self.entries = []
self.precision = self.doc.precision("withholding_amount", "tax_withholding_entries")
def _get_category_details(self):
"""Get tax withholding category details for the current document"""
category_names = self._get_category_names()
return TaxWithholdingDetails(
category_names,
self.doc.tax_withholding_group,
self.doc.posting_date,
self.party_type,
self.party,
self.doc.company,
).get()
def _get_category_names(self):
category_names = set(
item.tax_withholding_category
for item in self.doc.items
if item.tax_withholding_category and item.apply_tds
)
return category_names
def calculate(self):
self.category_details = self._get_category_details()
self._update_taxable_amounts()
if not self.doc.override_tax_withholding_entries:
self._generate_withholding_entries()
self._process_withholding_entries()
def _generate_withholding_entries(self):
self.doc.tax_withholding_entries = []
self._evaluate_thresholds()
for category in self.category_details.values():
self.entries += self._create_entries_for_category(category)
self.doc.extend("tax_withholding_entries", self.entries)
def _create_entries_for_category(self, category):
entries = []
if not category.taxable_amount:
return entries
# Case 1: Threshold not crossed - create under withheld entry
if not category.threshold_crossed:
entries.append(self._create_under_withheld_entry(category))
category.taxable_amount = 0
return entries
# Case 2: Tax on excess amount - handle threshold exemption first
if category.unused_threshold:
entries.append(self._create_threshold_exemption_entry(category))
if category.taxable_amount <= 0:
return entries
# Case 3: Process remaining amount with historical entries
open_entries = self._get_open_entries_for_category(category)
under_entries = open_entries["under_withheld"]
over_entries = open_entries["over_withheld"]
# Case 4: Adjust Under and Over Withheld Entries
entries.extend(self._adjust_under_over_withheld(under_entries, over_entries, category))
# Case 4: Lower Deduction Certificate processing
if category.ldc_unutilized_amount:
entries.extend(self._process_ldc_entries(under_entries, over_entries, category))
# Case 5: Regular tax withholding processing
entries.extend(self._merge_entries(under_entries, over_entries, category))
return entries
def _create_under_withheld_entry(self, category):
"""Create an under withheld entry when threshold is not crossed"""
return {
**self._create_default_entry(category),
"taxable_amount": category.taxable_amount,
"withholding_doctype": "",
"withholding_name": "",
"withholding_date": "",
"withholding_amount": 0,
}
def _create_threshold_exemption_entry(self, category):
"""Create entry for amount below threshold (tax on excess)"""
taxable_amount = min(category.unused_threshold, category.taxable_amount)
category.taxable_amount -= taxable_amount
return {
**self._create_default_entry(category),
"taxable_amount": taxable_amount,
"under_withheld_reason": "Threshold Exemption",
}
def _get_open_entries_for_category(self, category):
"""Get historical under withheld and over withheld entries for processing"""
entries = self._get_historical_entries(category)
linked_payments = self._get_linked_payments()
open_entries = {"under_withheld": deque(), "over_withheld": deque()}
# Process historical entries
self._categorize_historical_entries(entries, linked_payments, open_entries)
# Add current document as under withheld
current_entry = self._create_default_entry(category)
current_entry.update(
{
"taxable_amount": category.taxable_amount,
"withholding_doctype": "",
"withholding_name": "",
"withholding_date": "",
}
)
open_entries["under_withheld"].appendleft(current_entry)
return open_entries
def _categorize_historical_entries(self, entries, linked_payments, open_entries):
"""Categorize historical entries into under withheld and over withheld"""
for entry in entries:
if entry.status == "Under Withheld":
open_entries["under_withheld"].append(entry)
continue
# Handle over withheld entries
key = (entry.withholding_doctype, entry.withholding_name)
if key in linked_payments:
# Calculate proportion for linked payments
# TODO: whether it should be entry.taxable_amount only or do we need proportion
total_value = entry.taxable_amount - entry.withholding_amount
if not total_value:
continue
proportion = linked_payments[key] / total_value
# for handling rounding adjustments
proportion = min(proportion, 1)
entry.withholding_amount *= proportion
open_entries["over_withheld"].appendleft(entry)
continue
# only linked payment entries are allowed
if entry.withholding_doctype in ["Payment Entry", "Journal Entry"]:
continue
open_entries["over_withheld"].append(entry)
def _process_ldc_entries(self, under_entries, over_entries, category):
ldc_config = {
"under_withheld_reason": "Lower Deduction Certificate",
"lower_deduction_certificate": category.ldc_certificate,
}
return self._merge_entries(
under_entries,
over_entries,
category,
tax_rate=category.ldc_rate,
constraint=category.ldc_unutilized_amount,
default_obj=ldc_config,
)
def _update_taxable_amounts(self):
if not self.doc.base_net_total:
return
self._update_amount_for_item()
def _update_amount_for_item(self):
precision = self.doc.precision("base_net_rate", "items")
self._update_item_wise_tax_amount()
for item in self.doc.get("items"):
if not (item.apply_tds and item.tax_withholding_category):
continue
category = self.category_details.get(item.tax_withholding_category)
if category.tax_deduction_basis != "Gross Total":
taxable_amount = item.base_net_amount
else:
taxable_amount = item.base_net_amount + item._item_total_tax_amount
taxable_amount = flt(taxable_amount, precision)
item._base_tax_withholding_net_total = taxable_amount
category["taxable_amount"] += flt(taxable_amount, precision)
def _update_item_wise_tax_amount(self):
for item in self.doc.get("items"):
item._item_total_tax_amount = 0
precision = self.doc.precision("tax_amount", "taxes")
for row in self.doc.get("_item_wise_tax_details", []):
item = row.item
if not (item.apply_tds and item.tax_withholding_category):
continue
if row.tax.is_tax_withholding_account:
continue
item._item_total_tax_amount = flt(item._item_total_tax_amount + row.amount, precision)
def _evaluate_thresholds(self):
"""
Evaluate if thresholds are crossed for each category
Thresholds are crossed when:
- Single transaction threshold is exceeded
- Cumulative threshold is exceeded
- Threshold check is manually overridden
"""
for category in self.category_details.values():
category.threshold_crossed = self._is_threshold_crossed_for_category(category)
category.unused_threshold = self._get_unused_threshold(category)
def _is_threshold_crossed_for_category(self, category):
# Manual override - always cross threshold
if self.doc.ignore_tax_withholding_threshold:
return True
# Transaction threshold only mode
if category.disable_cumulative_threshold:
return category.taxable_amount >= category.single_threshold
# No cumulative threshold set
if category.cumulative_threshold == 0:
return True
# Tax on excess amount - always process
if category.tax_on_excess_amount:
return True
# Standard cumulative/transaction threshold check
return self._check_historical_threshold_status(category)
def _check_historical_threshold_status(self, category):
entry = frappe.qb.DocType(DOCTYPE)
result = frappe._dict(
self._base_threshold_query(category).where(entry.status.isin(["Settled", "Under Withheld"])).run()
)
# NOTE: Once deducted, always deducted. Not checking cumulative threshold again purposefully.
# conservative approach to avoid tax disputes as it can have conflicting views
# https://www.taxtmi.com/forum/issue?id=118627
if result.get("Settled", 0) > 0:
return True
# Check remaining threshold
remaining_threshold = category.cumulative_threshold - result.get("Under Withheld", 0)
if not category.disable_transaction_threshold:
remaining_threshold = min(remaining_threshold, category.single_threshold)
return category.taxable_amount >= remaining_threshold
def _get_unused_threshold(self, category):
"""Calculate unused threshold amount for tax on excess scenarios"""
if not category.tax_on_excess_amount:
return 0
entry = frappe.qb.DocType(DOCTYPE)
result = frappe._dict(
self._base_threshold_query(category)
.where(IfNull(entry.under_withheld_reason, "") == "Threshold Exemption")
.run()
)
return category.cumulative_threshold - result.get("Settled", 0)
def _base_threshold_query(self, category):
entry = frappe.qb.DocType(DOCTYPE)
query = (
frappe.qb.from_(entry)
.select(entry.status, Sum(entry.taxable_amount).as_("taxable_amount"))
.where(entry.party_type == self.party_type)
.where(entry.tax_withholding_category == category.name)
.where(entry.company == self.doc.company)
.where(entry.docstatus == 1)
.groupby(entry.status)
)
# NOTE: This can be a configurable option
# To check if filter by tax_id is needed
tax_id = get_tax_id_for_party(self.party_type, self.party)
query = query.where(entry.tax_id == tax_id) if tax_id else query.where(entry.party == self.party)
return query
def _get_historical_entries(self, category):
entry = frappe.qb.DocType(DOCTYPE)
base_query = (
frappe.qb.from_(entry)
.select("*")
.where(entry.tax_withholding_category == category.name)
.where(entry.party_type == self.party_type)
.where(entry.party == self.party)
.where(entry.company == self.doc.company)
.where(entry.docstatus == 1)
)
over_withheld_query = base_query.where(entry.status == "Over Withheld")
return (
base_query.where(entry.status == "Under Withheld")
.where(entry.taxable_date.between(category.from_date, category.to_date))
.union(over_withheld_query)
.run(as_dict=True)
)
def _get_linked_payments(self):
references = frappe._dict()
for ref in self.doc.advances:
key = (ref.reference_type, ref.reference_name)
references[key] = ref.allocated_amount * self.doc.conversion_rate
return references
def _create_default_entry(self, category):
return frappe._dict(
{
"company": self.doc.company,
"party_type": self.party_type,
"party": self.party,
"tax_withholding_category": category.name,
"tax_withholding_group": category.tax_withholding_group,
"tax_rate": category.tax_rate,
"conversion_rate": self.get_conversion_rate(),
"taxable_doctype": self.doc.doctype,
"taxable_name": self.doc.name,
"taxable_date": self.doc.posting_date,
"taxable_amount": 0,
"withholding_doctype": self.doc.doctype,
"withholding_name": self.doc.name,
"withholding_date": self.doc.posting_date,
"withholding_amount": 0, # Will be computed later
}
)
def update_tax_rows(self):
"""Update tax rows in the parent document based on withholding entries"""
account_amount_map = self._calculate_account_wise_amount()
category_withholding_map = self._get_category_withholding_map()
existing_taxes = {row.account_head: row for row in self.doc.taxes if row.is_tax_withholding_account}
precision = self.doc.precision("tax_amount", "taxes")
conversion_rate = self.get_conversion_rate()
add_deduct_tax = "Deduct"
if self.party_type == "Customer":
add_deduct_tax = "Add"
for account_head, base_amount in account_amount_map.items():
tax_amount = flt(base_amount / conversion_rate, precision)
if not tax_amount:
continue
# Update existing tax row or create new one
if existing_tax := existing_taxes.get(account_head):
existing_tax.tax_amount = tax_amount
existing_tax.dont_recompute_tax = 1
tax_row = existing_tax
for_update = True
else:
tax_row = self._create_tax_row(account_head, tax_amount)
for_update = False
tax_row.add_deduct_tax = add_deduct_tax
# Set item-wise tax breakup for this tax row
self._set_item_wise_tax_for_tds(
tax_row, account_head, category_withholding_map, for_update=for_update
)
self._remove_zero_tax_rows()
self.calculate_taxes_and_totals()
def _create_tax_row(self, account_head, tax_amount):
cost_center = self.doc.cost_center or erpnext.get_default_cost_center(self.doc.company)
return self.doc.append(
"taxes",
{
"is_tax_withholding_account": 1,
"category": "Total",
"charge_type": "Actual",
"account_head": account_head,
"description": account_head,
"cost_center": cost_center,
"tax_amount": tax_amount,
"dont_recompute_tax": 1,
},
)
def _set_item_wise_tax_for_tds(self, tax_row, account_head, category_withholding_map, for_update=False):
# Get all categories for this account (multiple categories can share same account)
categories_for_account = [
cat for cat in self.category_details.values() if cat.account_head == account_head
]
if not categories_for_account:
return
if not hasattr(self.doc, "_item_wise_tax_details"):
self.doc._item_wise_tax_details = []
if for_update:
self.doc._item_wise_tax_details = [
d for d in self.doc._item_wise_tax_details if d.get("tax") != tax_row
]
items = self.doc.get("items") or []
category_totals = {}
for item in items:
if item.apply_tds and item.tax_withholding_category:
item_taxable = item.get("_base_tax_withholding_net_total", 0)
category_totals[item.tax_withholding_category] = (
category_totals.get(item.tax_withholding_category, 0) + item_taxable
)
precision = self.doc.precision("tax_amount", "taxes")
for item in items:
if not (item.apply_tds and item.tax_withholding_category):
continue
category = self.category_details.get(item.tax_withholding_category)
if not category or category.account_head != account_head:
continue
item_base_taxable = item.get("_base_tax_withholding_net_total") or 0
if not category.taxable_amount or not item_base_taxable:
continue
total_taxable_amount = category_totals.get(category.name, 0)
if category.unused_threshold and total_taxable_amount:
# Proportionately deduct unused threshold from item's base taxable
item_threshold_deduction = (
item_base_taxable / total_taxable_amount
) * category.unused_threshold
item_effective_taxable = max(0, item_base_taxable - item_threshold_deduction)
else:
item_effective_taxable = item_base_taxable
withholding_amount = category_withholding_map.get(category.name, 0)
if withholding_amount and category.taxable_amount:
item_proportion = item_effective_taxable / category.taxable_amount
item_tax_amount = flt(withholding_amount * item_proportion, precision)
else:
item_tax_amount = 0
multiplier = -1 if tax_row.add_deduct_tax == "Deduct" else 1
self.doc._item_wise_tax_details.append(
frappe._dict(
item=item,
tax=tax_row,
rate=category.tax_rate,
amount=item_tax_amount * multiplier,
taxable_amount=item_base_taxable,
)
)
def _get_category_withholding_map(self):
category_withholding_map = defaultdict(float)
for entry in self.doc.tax_withholding_entries:
if entry.withholding_name != self.doc.name:
continue
category_withholding_map[entry.tax_withholding_category] += entry.withholding_amount
return category_withholding_map
def _calculate_account_wise_amount(self):
account_amount_map = defaultdict(float)
for entry in self.doc.tax_withholding_entries:
if entry.withholding_name != self.doc.name:
continue
category = self.category_details.get(entry.tax_withholding_category)
account_amount_map[category.account_head] += entry.withholding_amount
return account_amount_map
def _remove_zero_tax_rows(self):
self.doc.taxes = [
row for row in self.doc.taxes if not (row.is_tax_withholding_account and not row.tax_amount)
]
def _adjust_under_over_withheld(
self,
under_entries: deque,
over_entries: deque,
category: dict,
):
"""
Merge under withheld and over withheld entries based on the tax rate and constraint.
If only under and over entries are available, they will be processed against current document.
"""
if not (under_entries and over_entries):
return []
merged_entries = []
while under_entries and over_entries:
under = under_entries[0]
over = over_entries[0]
tax_rate = over.tax_rate
# Calculate tax amount for this taxable amount
tax_amount = self.compute_withheld_amount(
under.taxable_amount,
tax_rate,
round_off_tax_amount=category.round_off_tax_amount,
)
tax_amount = flt(min(tax_amount, over.withholding_amount), self.precision)
if tax_rate == 0:
taxable_amount = min(under.taxable_amount, over.taxable_amount)
else:
taxable_amount = flt(100 / tax_rate * tax_amount, self.precision)
# Create merged entry
merged_entry = under.copy()
merged_entry.update(
{
"taxable_amount": taxable_amount,
"withholding_amount": tax_amount,
"withholding_doctype": over.withholding_doctype,
"withholding_name": over.withholding_name,
"withholding_date": over.withholding_date,
"under_withheld_reason": over.under_withheld_reason,
"tax_rate": tax_rate,
"lower_deduction_certificate": over.lower_deduction_certificate,
}
)
# Consolidate entries by document combination
if self._should_include_entry(merged_entry):
merged_entries.append(merged_entry)
under.taxable_amount -= taxable_amount
over.withholding_amount -= tax_amount
if flt(under.taxable_amount, self.precision) <= 0:
under_entries.popleft()
if flt(over.withholding_amount, self.precision) <= 0:
over_entries.popleft()
return merged_entries
def _merge_entries(
self,
under_entries: deque,
over_entries: deque,
category: dict,
tax_rate: float | None = None,
constraint: float = inf,
default_obj: dict | None = None,
):
"""
Merge under withheld and over withheld entries based on the tax rate and constraint.
If only under and over entries are available, they will be processed against current document.
"""
merged_entries = []
if not ((under_entries or over_entries) and constraint > 0):
return merged_entries
if tax_rate is None:
tax_rate = category.tax_rate
# Process remaining under entries
constraint = self._process_under_withheld_entries(
under_entries, category, tax_rate, constraint, default_obj, merged_entries
)
# Process remaining over entries
self._process_over_withheld_entries(
over_entries, category, tax_rate, constraint, default_obj, merged_entries
)
return merged_entries
def _process_under_withheld_entries(
self, under_entries, category, tax_rate, constraint, default_obj, merged_entries
):
"""
Process remaining Under Withheld Entries - adjust against current document
"""
while under_entries and constraint > 0:
entry = under_entries[0]
value_direction = -1 if entry.taxable_amount < 0 else 1
amount_to_process = min(entry.taxable_amount, constraint)
if amount_to_process * value_direction <= 0:
break
# Create base entry and calculate withholding amount
merged_entry = self._create_base_entry(entry, category, tax_rate, default_obj)
merged_entry.update(
{
"taxable_amount": flt(amount_to_process, self.precision),
"withholding_amount": self.compute_withheld_amount(
amount_to_process,
tax_rate,
round_off_tax_amount=category.round_off_tax_amount,
),
"withholding_doctype": self.doc.doctype,
"withholding_name": self.doc.name,
"withholding_date": self.doc.posting_date,
}
)
# Always include under entries
merged_entries.append(merged_entry)
# Update entry amounts
entry.taxable_amount -= amount_to_process
if flt(entry.taxable_amount * value_direction, self.precision) <= 0:
under_entries.popleft()
# Update constraint
constraint -= amount_to_process
return constraint
def _process_over_withheld_entries(
self, over_entries, category, tax_rate, constraint, default_obj, merged_entries
):
"""
Process remaining Over Withheld Entries - adjust existing over-withheld amounts
"""
while over_entries and constraint > 0:
entry = over_entries[0]
value_direction = -1 if entry.taxable_amount < 0 else 1
amount_to_process = min(entry.taxable_amount, constraint)
if amount_to_process * value_direction <= 0:
break
# Create base entry and calculate withholding amount
merged_entry = self._create_base_entry(entry, category, tax_rate, default_obj)
merged_entry.update(
{
"taxable_amount": flt(amount_to_process, self.precision),
"withholding_amount": self.compute_withheld_amount(
amount_to_process,
tax_rate,
round_off_tax_amount=category.round_off_tax_amount,
),
"withholding_doctype": entry.withholding_doctype,
"withholding_name": entry.withholding_name,
"withholding_date": entry.withholding_date,
"taxable_doctype": "",
"taxable_name": "",
"taxable_date": "",
"conversion_rate": self.get_conversion_rate(),
}
)
# Only include over entries related to current document
if self._should_include_entry(merged_entry):
merged_entries.append(merged_entry)
# Update entry amounts
entry.taxable_amount -= amount_to_process
if flt(entry.taxable_amount * value_direction, self.precision) <= 0:
over_entries.popleft()
# Update constraint
constraint -= amount_to_process
return constraint
def _create_base_entry(self, source_entry, category, tax_rate, default_obj):
entry = {}
if default_obj:
entry.update(default_obj)
entry.update(
{
"taxable_doctype": source_entry.taxable_doctype,
"taxable_name": source_entry.taxable_name,
"taxable_date": source_entry.taxable_date,
"tax_withholding_category": category.name,
"tax_rate": tax_rate,
"party_type": self.party_type,
"party": self.party,
"company": self.doc.company,
}
)
return entry
def _should_include_entry(self, entry):
return entry.get("taxable_name") == self.doc.name or entry.get("withholding_name") == self.doc.name
def compute_withheld_amount(self, taxable_amount, tax_rate, round_off_tax_amount=False):
"""Calculate the withholding amount based on taxable amount and rate"""
amount = taxable_amount * tax_rate / 100
if round_off_tax_amount:
return flt(amount, 0)
return flt(amount, self.precision)
def _process_withholding_entries(self):
"""Final processing - update tax rows and validate"""
self.update_tax_rows()
for entry in self.doc.tax_withholding_entries:
entry: TaxWithholdingEntry
entry.set_status(entry.status)
entry.validate_adjustments()
entry.validate_tax_withheld_amount()
def on_submit(self):
for entry in self.doc.tax_withholding_entries:
entry: TaxWithholdingEntry
entry._process_tax_withholding_adjustments()
def on_cancel(self):
for entry in self.doc.tax_withholding_entries:
entry: TaxWithholdingEntry
entry._clear_old_references()
entry.set_status()
def _is_tax_withholding_applicable(self):
# Clear existing tax withholding amounts before recalculation
self._clear_existing_tax_amounts()
if not self.doc.apply_tds or self.doc.get("is_opening") == "Yes" or not self._get_category_names():
self.doc.tax_withholding_entries = []
return False
return True
def _clear_existing_tax_amounts(self):
for row in self.doc.taxes:
if row.is_tax_withholding_account and row.tax_amount:
row.tax_amount = 0
row.base_tax_amount_after_discount_amount = 0
def calculate_taxes_and_totals(self):
self.doc.calculate_taxes_and_totals()
def get_conversion_rate(self):
return self.doc.get("conversion_rate") or 1
def on_validate(self):
if self._is_tax_withholding_applicable():
self.calculate()
class PurchaseTaxWithholding(TaxWithholdingController):
"""Tax withholding controller for Purchase Invoices"""
def __init__(self, doc):
super().__init__(doc)
self.party_type = "Supplier"
self.party = doc.supplier
class SalesTaxWithholding(TaxWithholdingController):
"""Tax withholding controller for Sales Invoices (TCS)"""
def __init__(self, doc):
super().__init__(doc)
self.party_type = "Customer"
self.party = doc.customer
class PaymentTaxWithholding(TaxWithholdingController):
"""Tax withholding controller for Payment Entries"""
def __init__(self, doc):
super().__init__(doc)
self.party_type = doc.party_type
self.party = doc.party
def _get_category_names(self):
if not self.doc.tax_withholding_category:
return []
return [self.doc.tax_withholding_category]
def _update_taxable_amounts(self):
category = next(iter(self.category_details.values()))
taxable_amount_in_party_currency = self.doc.unallocated_amount
taxable_amount_in_party_currency += sum(
flt(d.allocated_amount)
for d in self.doc.references
if d.reference_doctype in get_advance_payment_doctypes()
)
exchange_rate = self.get_conversion_rate()
taxable_amount = flt(taxable_amount_in_party_currency * exchange_rate, self.precision)
category["taxable_amount"] = taxable_amount
def get_conversion_rate(self):
if self.doc.payment_type == "Receive":
return self.doc.source_exchange_rate or 1
else:
return self.doc.target_exchange_rate or 1
def calculate_taxes_and_totals(self):
self.doc.apply_taxes()
def _get_open_entries_for_category(self, category):
# for payment only over withheld
open_entries = {"under_withheld": deque(), "over_withheld": deque()}
current_entry = frappe._dict(
{
**self._create_default_entry(category),
"taxable_amount": category.taxable_amount,
"taxable_doctype": "",
"taxable_name": "",
"taxable_date": "",
}
)
open_entries["over_withheld"].append(current_entry)
return open_entries
def _is_threshold_crossed_for_category(self, category):
"""For payment entries if apply_tds is checked, return True"""
return True
def _get_unused_threshold(self, category):
"""Always withhold Tax and whenever tax gets deducted adjust it"""
return 0
class JournalTaxWithholding(TaxWithholdingController):
"""Tax withholding controller for Journal Entries"""
def __init__(self, doc):
super().__init__(doc)
self.party = None
self.party_type = None
self.party_account = None
self.party_row = None
self.existing_tds_rows = []
self.precision = None
self.has_multiple_parties = False
self.party_field = None
self.reverse_field = None
self._setup_party_info()
def _setup_party_info(self):
for row in self.doc.get("accounts"):
if row.party_type in ("Customer", "Supplier") and row.party:
if self.party and row.party != self.party:
self.has_multiple_parties = True
if not self.party:
self.party = row.party
self.party_type = row.party_type
self.party_account = row.account
self.party_row = row
if row.get("is_tax_withholding_account"):
self.existing_tds_rows.append(row)
if self.party_type:
self._setup_direction_fields()
def _setup_direction_fields(self):
"""
For Supplier (TDS): party has credit, TDS reduces credit
For Customer (TCS): party has debit, TCS increases debit
"""
if self.party_type == "Supplier":
self.party_field = "credit"
self.reverse_field = "debit"
else: # Customer
self.party_field = "debit"
self.reverse_field = "credit"
self.precision = self.doc.precision(self.party_field, self.party_row)
def _get_category_names(self):
if not self.doc.tax_withholding_category:
return []
return [self.doc.tax_withholding_category]
def _update_taxable_amounts(self):
if not self.category_details:
return
net_amount = self._calculate_net_total()
category = next(iter(self.category_details.values()))
category["taxable_amount"] = net_amount
def _calculate_net_total(self):
from erpnext.accounts.report.general_ledger.general_ledger import get_account_type_map
account_type_map = get_account_type_map(self.doc.company)
return flt(
sum(
d.get(self.reverse_field) - d.get(self.party_field)
for d in self.doc.get("accounts")
if account_type_map.get(d.account) not in ("Tax", "Chargeable")
and d.account != self.party_account
and not d.get("is_tax_withholding_account")
),
self.precision,
)
def get_conversion_rate(self):
return self.party_row.get("exchange_rate", 1.0)
def calculate_taxes_and_totals(self):
self.doc.set_amounts_in_company_currency()
self.doc.set_total_debit_credit()
self.doc.set_against_account()
def update_tax_rows(self):
if not self._should_apply_tds():
self._cleanup_duplicate_tds_rows(None)
return
if self.has_multiple_parties:
frappe.throw(_("Cannot apply TDS against multiple parties in one entry"))
account_amount_map = self._calculate_account_wise_amount()
if not account_amount_map:
return
self._reset_existing_tds()
for account_head, tax_amount in account_amount_map.items():
if not tax_amount:
continue
self._create_or_update_tds_row(account_head, tax_amount)
self._update_party_amount(tax_amount, is_reversal=False)
self._recalculate_totals()
def _should_apply_tds(self):
return self.doc.apply_tds and self.doc.voucher_type in ("Debit Note", "Credit Note")
def _reset_existing_tds(self):
for row in self.existing_tds_rows:
# TDS amount is always in credit (liability to government)
tds_amount = flt(row.get("credit") - row.get("debit"), self.precision)
if not tds_amount:
continue
self._update_party_amount(tds_amount, is_reversal=True)
# zero_out_tds_row
row.update(
{
"credit": 0,
"credit_in_account_currency": 0,
"debit": 0,
"debit_in_account_currency": 0,
}
)
def _update_party_amount(self, amount, is_reversal=False):
amount = flt(amount, self.precision)
amount_in_party_currency = flt(amount / self.party_row.get("exchange_rate", 1), self.precision)
# Determine which field the party amount is in
active_field = self.party_field if self.party_row.get(self.party_field) else self.reverse_field
# If amount is in reverse field, flip the signs
if active_field == self.reverse_field:
amount = -amount
amount_in_party_currency = -amount_in_party_currency
# Direction multiplier based on party type:
# Customer (TCS): +1 (add to debit)
# Supplier (TDS): -1 (subtract from credit)
direction = 1 if self.party_type == "Customer" else -1
# Reversal inverts the direction
if is_reversal:
direction = -direction
adjustment = amount * direction
adjustment_in_party_currency = amount_in_party_currency * direction
active_field_account_currency = f"{active_field}_in_account_currency"
self.party_row.update(
{
active_field: flt(self.party_row.get(active_field) + adjustment, self.precision),
active_field_account_currency: flt(
self.party_row.get(active_field_account_currency) + adjustment_in_party_currency,
self.precision,
),
}
)
def _create_or_update_tds_row(self, account_head, tax_amount):
from erpnext.accounts.utils import get_account_currency
from erpnext.setup.utils import get_exchange_rate as _get_exchange_rate
account_currency = get_account_currency(account_head)
company_currency = frappe.get_cached_value("Company", self.doc.company, "default_currency")
exchange_rate = _get_exchange_rate(account_currency, company_currency, self.doc.posting_date)
tax_amount = flt(tax_amount, self.precision)
tax_amount_in_account_currency = flt(tax_amount / exchange_rate, self.precision)
tax_row = None
for row in self.doc.get("accounts"):
if row.account == account_head and row.get("is_tax_withholding_account"):
tax_row = row
break
if not tax_row:
tax_row = self.doc.append(
"accounts",
{
"account": account_head,
"account_currency": account_currency,
"exchange_rate": exchange_rate,
"cost_center": self.doc.get("cost_center")
or erpnext.get_default_cost_center(self.doc.company),
"credit": 0,
"credit_in_account_currency": 0,
"debit": 0,
"debit_in_account_currency": 0,
"is_tax_withholding_account": 1,
},
)
# TDS/TCS is always credited (liability to government)
tax_row.update(
{
"credit": tax_amount,
"credit_in_account_currency": tax_amount_in_account_currency,
"debit": 0,
"debit_in_account_currency": 0,
}
)
self._cleanup_duplicate_tds_rows(tax_row)
def _cleanup_duplicate_tds_rows(self, current_tax_row):
rows_to_remove = [
row
for row in self.doc.get("accounts")
if row.get("is_tax_withholding_account") and row != current_tax_row
]
for row in rows_to_remove:
self.doc.remove(row)
def _recalculate_totals(self):
self.doc.set_amounts_in_company_currency()
self.doc.set_total_debit_credit()
self.doc.set_against_account()
def _is_tax_withholding_applicable(self):
if not self._should_apply_tds():
self.doc.tax_withholding_entries = []
return False
if not self.doc.tax_withholding_category:
self.doc.tax_withholding_entries = []
return False
return True
def _get_linked_payments(self):
"""Journal Entry doesn't have advances like invoices"""
return frappe._dict()
def _reset_idx(docs_to_reset_idx):
updates = {}
for doctype, docname in docs_to_reset_idx:
names = frappe.get_all(
DOCTYPE,
filters={"parent": docname, "parenttype": doctype, "docstatus": 1},
pluck="name",
)
for idx, name in enumerate(names, start=1):
updates[name] = {"idx": idx}
if updates:
frappe.db.bulk_update(DOCTYPE, updates, update_modified=False)
| {
"repo_id": "frappe/erpnext",
"file_path": "erpnext/accounts/doctype/tax_withholding_entry/tax_withholding_entry.py",
"license": "GNU General Public License v3.0",
"lines": 1163,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | license |
frappe/erpnext:erpnext/accounts/doctype/tax_withholding_entry/test_tax_withholding_entry.py | # Copyright (c) 2025, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
# import frappe
from frappe.tests import IntegrationTestCase
# On IntegrationTestCase, the doctype test records and all
# link-field test record dependencies are recursively loaded
# Use these module variables to add/remove to/from that list
EXTRA_TEST_RECORD_DEPENDENCIES = [] # eg. ["User"]
IGNORE_TEST_RECORD_DEPENDENCIES = [] # eg. ["User"]
class IntegrationTestTaxWithholdingEntry(IntegrationTestCase):
"""
Integration tests for TaxWithholdingEntry.
Use this class for testing interactions between multiple components.
"""
pass
| {
"repo_id": "frappe/erpnext",
"file_path": "erpnext/accounts/doctype/tax_withholding_entry/test_tax_withholding_entry.py",
"license": "GNU General Public License v3.0",
"lines": 15,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
frappe/erpnext:erpnext/accounts/doctype/tax_withholding_group/tax_withholding_group.py | # Copyright (c) 2025, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
# import frappe
from frappe.model.document import Document
class TaxWithholdingGroup(Document):
# begin: auto-generated types
# This code is auto-generated. Do not modify anything in this block.
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from frappe.types import DF
group_name: DF.Data
# end: auto-generated types
pass
| {
"repo_id": "frappe/erpnext",
"file_path": "erpnext/accounts/doctype/tax_withholding_group/tax_withholding_group.py",
"license": "GNU General Public License v3.0",
"lines": 13,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | license |
frappe/erpnext:erpnext/accounts/doctype/tax_withholding_group/test_tax_withholding_group.py | # Copyright (c) 2025, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
# import frappe
from frappe.tests import IntegrationTestCase
# On IntegrationTestCase, the doctype test records and all
# link-field test record dependencies are recursively loaded
# Use these module variables to add/remove to/from that list
EXTRA_TEST_RECORD_DEPENDENCIES = [] # eg. ["User"]
IGNORE_TEST_RECORD_DEPENDENCIES = [] # eg. ["User"]
class IntegrationTestTaxWithholdingGroup(IntegrationTestCase):
"""
Integration tests for TaxWithholdingGroup.
Use this class for testing interactions between multiple components.
"""
pass
| {
"repo_id": "frappe/erpnext",
"file_path": "erpnext/accounts/doctype/tax_withholding_group/test_tax_withholding_group.py",
"license": "GNU General Public License v3.0",
"lines": 15,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
frappe/erpnext:erpnext/accounts/report/custom_financial_statement/custom_financial_statement.py | # Copyright (c) 2025, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from erpnext.accounts.doctype.financial_report_template.financial_report_engine import (
FinancialReportEngine,
)
def execute(filters: dict | None = None):
if filters and filters.report_template:
return FinancialReportEngine().execute(filters)
| {
"repo_id": "frappe/erpnext",
"file_path": "erpnext/accounts/report/custom_financial_statement/custom_financial_statement.py",
"license": "GNU General Public License v3.0",
"lines": 8,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | license |
frappe/erpnext:erpnext/patches/v15_0/migrate_old_item_wise_tax_detail_data_to_table.py | import click
import frappe
from frappe import parse_json
from frappe.model.document import bulk_insert
from frappe.utils import flt
DOCTYPES_TO_PATCH = {
"Sales Taxes and Charges": [
"Sales Invoice",
"POS Invoice",
"Sales Order",
"Delivery Note",
"Quotation",
],
"Purchase Taxes and Charges": [
"Purchase Invoice",
"Purchase Order",
"Purchase Receipt",
"Supplier Quotation",
],
}
TAX_WITHHOLDING_DOCS = (
"Purchase Invoice",
"Purchase Order",
"Purchase Receipt",
)
def execute():
for tax_doctype, doctypes in DOCTYPES_TO_PATCH.items():
for doctype in doctypes:
docnames = frappe.get_all(
tax_doctype,
filters={
"item_wise_tax_detail": ["is", "set"],
"docstatus": ["=", 1],
"parenttype": ["=", doctype],
},
pluck="parent",
)
total_docs = len(docnames)
if not total_docs:
continue
chunk_size = 1000
with click.progressbar(
range(0, total_docs, chunk_size), label=f"Migrating {total_docs} {doctype}s"
) as bar:
for index in bar:
chunk = docnames[index : index + chunk_size]
doc_info = get_doc_details(chunk, doctype)
if not doc_info:
# no valid invoices found
continue
docs = [d.name for d in doc_info] # valid invoices
# Delete existing item-wise tax details to avoid duplicates
delete_existing_tax_details(docs, doctype)
taxes = get_taxes_for_docs(docs, tax_doctype, doctype)
items = get_items_for_docs(docs, doctype)
compiled_docs = compile_docs(doc_info, taxes, items, doctype, tax_doctype)
rows_to_insert = []
for doc in compiled_docs:
if not (doc.taxes and doc.items):
continue
rows_to_insert.extend(ItemTax().get_item_wise_tax_details(doc))
if rows_to_insert:
bulk_insert("Item Wise Tax Detail", rows_to_insert, commit_chunks=True)
def get_taxes_for_docs(parents, tax_doctype, doctype):
tax = frappe.qb.DocType(tax_doctype)
return (
frappe.qb.from_(tax)
.select("*")
.where(tax.parenttype == doctype)
.where(tax.parent.isin(parents))
.run(as_dict=True)
)
def get_items_for_docs(parents, doctype):
item = frappe.qb.DocType(f"{doctype} Item")
additional_fields = []
if doctype in TAX_WITHHOLDING_DOCS:
additional_fields.append(item.apply_tds)
return (
frappe.qb.from_(item)
.select(
item.name,
item.parent,
item.item_code,
item.item_name,
item.base_net_amount,
item.qty,
item.item_tax_rate,
*additional_fields,
)
.where(item.parenttype == doctype)
.where(item.parent.isin(parents))
.run(as_dict=True)
)
def get_doc_details(parents, doctype):
inv = frappe.qb.DocType(doctype)
additional_fields = []
if doctype in TAX_WITHHOLDING_DOCS:
additional_fields.append(inv.base_tax_withholding_net_total)
return (
frappe.qb.from_(inv)
.select(
inv.name,
inv.base_net_total,
inv.company,
*additional_fields,
)
.where(inv.name.isin(parents))
.run(as_dict=True)
)
def compile_docs(doc_info, taxes, items, doctype, tax_doctype):
"""
Compile docs, so that each one could be accessed as if it's a single doc.
"""
response = frappe._dict()
for doc in doc_info:
response[doc.name] = frappe._dict(**doc, taxes=[], items=[], doctype=doctype, tax_doctype=tax_doctype)
for tax in taxes:
response[tax.parent]["taxes"].append(tax)
for item in items:
response[item.parent]["items"].append(item)
return response.values()
def delete_existing_tax_details(doc_names, doctype):
"""
Delete existing Item Wise Tax Detail records for the given documents
to avoid duplicates when re-running the migration.
"""
if not doc_names:
return
frappe.db.delete("Item Wise Tax Detail", {"parent": ["in", doc_names], "parenttype": doctype})
class ItemTax:
def get_item_wise_tax_details(self, doc):
"""
This method calculates tax amounts for each item-tax combination.
"""
item_wise_tax_details = []
company_currency = frappe.get_cached_value("Company", doc.company, "default_currency")
precision = frappe.get_precision(doc.tax_doctype, "tax_amount", currency=company_currency)
tax_differences = frappe._dict()
last_taxable_items = frappe._dict()
# Initialize tax differences with expected amounts
for tax_row in doc.taxes:
if tax_row.base_tax_amount_after_discount_amount:
multiplier = -1 if tax_row.get("add_deduct_tax") == "Deduct" else 1
tax_differences[tax_row.name] = tax_row.base_tax_amount_after_discount_amount * multiplier
idx = 1
for item in doc.get("items"):
item_proportion = item.base_net_amount / doc.base_net_total if doc.base_net_total else 0
for tax_row in doc.taxes:
tax_rate = 0
tax_amount = 0
if not tax_row.base_tax_amount_after_discount_amount:
continue
charge_type = tax_row.charge_type
if tax_row.item_wise_tax_detail:
# tax rate
tax_rate = self._get_item_tax_rate(item, tax_row)
# tax amount
if tax_rate:
multiplier = (
item.qty if charge_type == "On Item Quantity" else item.base_net_amount / 100
)
tax_amount = multiplier * tax_rate
else:
# eg: charge_type == actual
item_key = item.item_code or item.item_name
item_tax_detail = self._get_item_tax_details(tax_row).get(item_key, {})
tax_amount = item_tax_detail.get("tax_amount", 0) * item_proportion
# Actual rows where no item_wise_tax_detail
elif charge_type == "Actual":
if tax_row.get("is_tax_withholding_account"):
if not item.get("apply_tds") or not doc.get("base_tax_withholding_net_total"):
item_proportion = 0
else:
item_proportion = item.base_net_amount / doc.base_tax_withholding_net_total
tax_amount = tax_row.base_tax_amount_after_discount_amount * item_proportion
if tax_row.get("add_deduct_tax") == "Deduct":
tax_amount *= -1
tax_doc = get_item_tax_doc(item, tax_row, tax_rate, tax_amount, idx, precision)
item_wise_tax_details.append(tax_doc)
# Update tax differences and track last taxable item
if tax_amount:
tax_differences[tax_row.name] -= tax_amount
last_taxable_items[tax_row.name] = tax_doc
idx += 1
# Handle rounding errors by applying differences to last taxable items
self._handle_rounding_differences(tax_differences, last_taxable_items)
return item_wise_tax_details
def _handle_rounding_differences(self, tax_differences, last_taxable_items):
"""
Handle rounding errors by applying the difference to the last taxable item
"""
for tax_row, diff in tax_differences.items():
if not diff or tax_row not in last_taxable_items:
continue
rounded_difference = flt(diff, 5)
if abs(rounded_difference) <= 0.5:
last_item_tax_doc = last_taxable_items[tax_row]
last_item_tax_doc.amount = flt(last_item_tax_doc.amount + rounded_difference, 5)
def _get_item_tax_details(self, tax_row):
# temp cache
if not getattr(tax_row, "__tax_details", None):
tax_row.__tax_details = parse_item_wise_tax_details(tax_row.get("item_wise_tax_detail") or "{}")
return tax_row.__tax_details
def _get_item_tax_rate(self, item, tax_row):
# NOTE: Use item tax rate as same item code
# could have different tax rates in same invoice
item_tax_rates = frappe.parse_json(item.item_tax_rate or {})
if item_tax_rates and tax_row.account_head in item_tax_rates:
return item_tax_rates[tax_row.account_head]
return flt(tax_row.rate)
def get_item_tax_doc(item, tax, rate, tax_value, idx, precision=2):
return frappe.get_doc(
{
"doctype": "Item Wise Tax Detail",
"name": frappe.generate_hash(),
"idx": idx,
"item_row": item.name,
"tax_row": tax.name,
"rate": rate,
"amount": flt(tax_value, precision),
"taxable_amount": item.base_net_amount,
"docstatus": tax.docstatus,
"parent": tax.parent,
"parenttype": tax.parenttype,
"parentfield": "item_wise_tax_details",
}
)
def parse_item_wise_tax_details(item_wise_tax_detail):
updated_tax_details = {}
try:
item_iterator = parse_json(item_wise_tax_detail)
except Exception:
return updated_tax_details
else:
# This is stale data from 2009 found in a database
if isinstance(item_iterator, int | float):
return updated_tax_details
for item, tax_data in item_iterator.items():
if isinstance(tax_data, list) and len(tax_data) >= 2:
updated_tax_details[item] = frappe._dict(
tax_rate=tax_data[0] or 0,
tax_amount=tax_data[1] or 0,
)
elif isinstance(tax_data, str):
updated_tax_details[item] = frappe._dict(
tax_rate=flt(tax_data),
tax_amount=0.0,
)
elif isinstance(tax_data, dict):
updated_tax_details[item] = tax_data
return updated_tax_details
| {
"repo_id": "frappe/erpnext",
"file_path": "erpnext/patches/v15_0/migrate_old_item_wise_tax_detail_data_to_table.py",
"license": "GNU General Public License v3.0",
"lines": 251,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
frappe/erpnext:erpnext/patches/v15_0/toggle_legacy_controller_for_period_closing.py | import frappe
def execute():
"""
Description:
Enable Legacy controller for Period Closing Voucher
"""
frappe.db.set_single_value("Accounts Settings", "use_legacy_controller_for_pcv", 1)
| {
"repo_id": "frappe/erpnext",
"file_path": "erpnext/patches/v15_0/toggle_legacy_controller_for_period_closing.py",
"license": "GNU General Public License v3.0",
"lines": 7,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | documentation |
frappe/erpnext:erpnext/patches/v16_0/fix_barcode_typo.py | import frappe
def execute():
frappe.qb.update("Item Barcode").set("barcode_type", "EAN-13").where(
frappe.qb.Field("barcode_type") == "EAN-12"
).run()
| {
"repo_id": "frappe/erpnext",
"file_path": "erpnext/patches/v16_0/fix_barcode_typo.py",
"license": "GNU General Public License v3.0",
"lines": 5,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
frappe/erpnext:erpnext/patches/v16_0/migrate_account_freezing_settings_to_company.py | import frappe
def execute():
rows = frappe.db.sql(
"""
SELECT field, value
FROM `tabSingles`
WHERE doctype='Accounts Settings'
AND field IN ('acc_frozen_upto', 'frozen_accounts_modifier')
""",
as_dict=True,
)
values = {row["field"]: row["value"] for row in rows}
frozen_till = values.get("acc_frozen_upto")
modifier = values.get("frozen_accounts_modifier")
if not frozen_till and not modifier:
return
for company in frappe.get_all("Company", pluck="name"):
frappe.db.set_value(
"Company",
company,
{
"accounts_frozen_till_date": frozen_till,
"role_allowed_for_frozen_entries": modifier,
},
)
| {
"repo_id": "frappe/erpnext",
"file_path": "erpnext/patches/v16_0/migrate_account_freezing_settings_to_company.py",
"license": "GNU General Public License v3.0",
"lines": 25,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
frappe/erpnext:erpnext/patches/v16_0/migrate_budget_records_to_new_structure.py | import frappe
from frappe.utils import add_months, flt, get_first_day, get_last_day
from erpnext.accounts.doctype.accounting_dimension.accounting_dimension import (
get_accounting_dimensions,
)
def execute():
remove_old_property_setter()
budget_names = frappe.db.get_list(
"Budget",
filters={"docstatus": ["in", [0, 1]]},
pluck="name",
)
for budget in budget_names:
migrate_single_budget(budget)
def remove_old_property_setter():
old_property_setter = frappe.db.get_value(
"Property Setter",
{
"doc_type": "Budget",
"field_name": "naming_series",
"property": "options",
"value": "Budget-.YYYY.-",
},
"name",
)
if old_property_setter:
frappe.delete_doc("Property Setter", old_property_setter, force=1)
def migrate_single_budget(budget_name):
budget_doc = frappe.get_doc("Budget", budget_name)
account_rows = frappe.get_all(
"Budget Account",
filters={"parent": budget_name},
fields=["account", "budget_amount"],
order_by="idx asc",
)
if not account_rows:
return
frappe.db.delete("Budget Account", filters={"parent": budget_doc.name})
percentage_allocations = get_percentage_allocations(budget_doc)
fiscal_year = frappe.get_cached_value(
"Fiscal Year",
budget_doc.fiscal_year,
["name", "year_start_date", "year_end_date"],
as_dict=True,
)
for row in account_rows:
create_new_budget_from_row(budget_doc, fiscal_year, row, percentage_allocations)
if budget_doc.docstatus == 1:
budget_doc.cancel()
else:
frappe.delete_doc("Budget", budget_name)
def get_percentage_allocations(budget_doc):
if budget_doc.monthly_distribution:
distribution_doc = frappe.get_cached_doc("Monthly Distribution", budget_doc.monthly_distribution)
return [flt(row.percentage_allocation) for row in distribution_doc.percentages]
return [100 / 12] * 12
def create_new_budget_from_row(budget_doc, fiscal_year, account_row, percentage_allocations):
new_budget = frappe.new_doc("Budget")
core_fields = ["budget_against", "company", "cost_center", "project"]
for field in core_fields:
new_budget.set(field, budget_doc.get(field))
for fieldname in get_accounting_dimensions():
if budget_doc.get(fieldname):
new_budget.set(fieldname, budget_doc.get(fieldname))
new_budget.from_fiscal_year = fiscal_year.name
new_budget.to_fiscal_year = fiscal_year.name
new_budget.budget_start_date = fiscal_year.year_start_date
new_budget.budget_end_date = fiscal_year.year_end_date
new_budget.account = account_row.account
new_budget.budget_amount = flt(account_row.budget_amount)
new_budget.distribution_frequency = "Monthly"
new_budget.distribute_equally = 1 if len(set(percentage_allocations)) == 1 else 0
copy_fields = [
"applicable_on_material_request",
"action_if_annual_budget_exceeded_on_mr",
"action_if_accumulated_monthly_budget_exceeded_on_mr",
"applicable_on_purchase_order",
"action_if_annual_budget_exceeded_on_po",
"action_if_accumulated_monthly_budget_exceeded_on_po",
"applicable_on_booking_actual_expenses",
"action_if_annual_budget_exceeded",
"action_if_accumulated_monthly_budget_exceeded",
"applicable_on_cumulative_expense",
"action_if_annual_exceeded_on_cumulative_expense",
"action_if_accumulated_monthly_exceeded_on_cumulative_expense",
]
for field in copy_fields:
new_budget.set(field, budget_doc.get(field))
current_start = fiscal_year.year_start_date
for percentage in percentage_allocations:
new_budget.append(
"budget_distribution",
{
"start_date": get_first_day(current_start),
"end_date": get_last_day(current_start),
"percent": percentage,
"amount": new_budget.budget_amount * percentage / 100,
},
)
current_start = add_months(current_start, 1)
new_budget.flags.ignore_validate = True
new_budget.flags.ignore_links = True
new_budget.insert(ignore_permissions=True, ignore_mandatory=True)
if budget_doc.docstatus == 1:
new_budget.submit()
| {
"repo_id": "frappe/erpnext",
"file_path": "erpnext/patches/v16_0/migrate_budget_records_to_new_structure.py",
"license": "GNU General Public License v3.0",
"lines": 105,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
frappe/erpnext:erpnext/patches/v16_0/migrate_tax_withholding_data.py | # Copyright (c) 2025, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
"""
Migration patch for Tax Withholding Entry data.
This patch migrates historical TDS/TCS data from the old structure to the new
Tax Withholding Entry child table structure.
Old Structure:
- Purchase Invoice: taxes table (is_tax_withholding_account), tax_withheld_vouchers, advance_tax
- Sales Invoice: taxes table (TDS amount without is_tax_withholding_account checkbox)
- Payment Entry: advance_taxes_and_charges table with allocated_amount
- Journal Entry: accounts table with is_tax_withholding_account (but not reliable)
New Structure:
- All doctypes: tax_withholding_entries child table with detailed tracking
"""
from collections import defaultdict
import frappe
from frappe.query_builder import Case
from frappe.query_builder.functions import IfNull, Max, Sum
from frappe.utils import flt, now
def execute():
tds_accounts = get_tds_accounts()
if not tds_accounts:
return
tax_rate_map = get_tax_rate_map()
column_cache = get_column_cache()
party_tax_id_cache = {}
# Clean up any existing migration entries
frappe.db.delete("Tax Withholding Entry", filters={"created_by_migration": 1})
# Migrate data from each document type
# Purchase Invoice migration also handles Payment Entry TDS (allocated and unallocated)
PurchaseInvoiceMigrator(tds_accounts, tax_rate_map, column_cache, party_tax_id_cache).migrate()
migrate_sales_invoices(tds_accounts, tax_rate_map, column_cache, party_tax_id_cache)
migrate_journal_entries(tds_accounts, tax_rate_map, column_cache, party_tax_id_cache)
# Copy tax_withholding_category to item level
copy_category_to_items_for_purchase(column_cache)
copy_category_to_items_for_sales(column_cache)
def get_column_cache():
return {
"Supplier": {
"pan": frappe.db.has_column("Supplier", "pan"),
"tax_id": frappe.db.has_column("Supplier", "tax_id"),
},
"Customer": {
"pan": frappe.db.has_column("Customer", "pan"),
"tax_id": frappe.db.has_column("Customer", "tax_id"),
},
"Purchase Invoice": {
"tax_withholding_category": frappe.db.has_column("Purchase Invoice", "tax_withholding_category"),
},
"Purchase Invoice Item": {
"tax_withholding_category": frappe.db.has_column(
"Purchase Invoice Item", "tax_withholding_category"
),
},
"Sales Invoice": {
"tax_withholding_category": frappe.db.has_column("Sales Invoice", "tax_withholding_category"),
},
"Sales Invoice Item": {
"tax_withholding_category": frappe.db.has_column(
"Sales Invoice Item", "tax_withholding_category"
),
},
}
def get_tds_accounts():
twa = frappe.qb.DocType("Tax Withholding Account")
result = (
frappe.qb.from_(twa).select(twa.account, twa.company, twa.parent.as_("category")).run(as_dict=True)
)
if not result:
return None
# Build lookup: {(company, account): category}
# If account has multiple categories, set to None (ambiguous)
account_map = {}
for row in result:
key = (row.company, row.account)
if key in account_map:
# Multiple categories use same account - ambiguous
account_map[key] = None
else:
account_map[key] = row.category
# Also build account set by company for quick lookup
accounts_by_company = {}
for row in result:
accounts_by_company.setdefault(row.company, set()).add(row.account)
return {"account_map": account_map, "accounts_by_company": accounts_by_company}
def get_tax_rate_map():
twr = frappe.qb.DocType("Tax Withholding Rate")
twc = frappe.qb.DocType("Tax Withholding Category")
rates = (
frappe.qb.from_(twr)
.join(twc)
.on(twr.parent == twc.name)
.select(
twr.parent,
twr.tax_withholding_rate,
twr.from_date,
twr.to_date,
twc.tax_on_excess_amount,
)
.run(as_dict=True)
)
rate_map = {}
for rate in rates:
rate_map.setdefault(rate.parent, []).append(rate)
return rate_map
def get_tax_rate_for_date(tax_rate_map, category, posting_date):
if not category or category not in tax_rate_map or not posting_date:
return 0, False
for rate in tax_rate_map[category]:
if rate.from_date and rate.to_date and rate.from_date <= posting_date <= rate.to_date:
return (rate.tax_withholding_rate, bool(rate.tax_on_excess_amount))
return 0, False
def get_party_tax_id(party_type, party, column_cache, party_tax_id_cache):
if not party:
return None
cache_key = (party_type, party)
if cache_key in party_tax_id_cache:
return party_tax_id_cache[cache_key]
tax_id = None
doctype_cols = column_cache.get(party_type, {})
if doctype_cols.get("pan"):
tax_id = frappe.db.get_value(party_type, party, "pan")
elif doctype_cols.get("tax_id"):
tax_id = frappe.db.get_value(party_type, party, "tax_id")
party_tax_id_cache[cache_key] = tax_id
return tax_id
def determine_status(taxable_name, withholding_name, under_withheld_reason, is_duplicate=False):
"""Determine the status of a Tax Withholding Entry."""
if is_duplicate:
return "Duplicate"
# If under_withheld_reason is specified, it's settled (legitimate reason for under deduction)
if under_withheld_reason:
return "Settled"
# If both taxable and withholding are specified, it's settled
if taxable_name and withholding_name:
return "Settled"
# If only taxable is specified, it's under withheld (tax not yet deducted)
if taxable_name and not withholding_name:
return "Under Withheld"
# If only withholding is specified, it's over withheld (deducted but no taxable doc)
if withholding_name and not taxable_name:
return "Over Withheld"
return ""
def bulk_insert_entries(all_entries):
"""
Bulk insert Tax Withholding Entries.
all_entries: dict of {(parent_doctype, parent_name): [entries]}
"""
if not all_entries:
return
# Get existing names to avoid collisions
existing_names = set(frappe.get_all("Tax Withholding Entry", pluck="name"))
def generate_unique_name():
while True:
name = frappe.generate_hash(length=10)
if name not in existing_names:
existing_names.add(name)
return name
# Prepare all entries with proper fields
fields = [
"name",
"creation",
"modified",
"modified_by",
"owner",
"docstatus",
"parent",
"parentfield",
"parenttype",
"idx",
"company",
"party_type",
"party",
"tax_id",
"tax_withholding_category",
"tax_withholding_group",
"taxable_amount",
"tax_rate",
"withholding_amount",
"taxable_doctype",
"taxable_name",
"taxable_date",
"withholding_doctype",
"withholding_name",
"withholding_date",
"status",
"under_withheld_reason",
"currency",
"conversion_rate",
"created_by_migration",
]
current_time = now()
current_user = frappe.session.user
values = []
for (parent_doctype, parent_name), entries in all_entries.items():
for idx, entry in enumerate(entries, start=1):
# Determine status
status = determine_status(
entry.get("taxable_name"),
entry.get("withholding_name"),
entry.get("under_withheld_reason"),
entry.get("is_duplicate", False),
)
values.append(
(
generate_unique_name(), # name
current_time, # creation
current_time, # modified
current_user, # modified_by
current_user, # owner
1, # docstatus (submitted)
parent_name, # parent
"tax_withholding_entries", # parentfield
parent_doctype, # parenttype
idx, # idx
entry.get("company"),
entry.get("party_type"),
entry.get("party"),
entry.get("tax_id"),
entry.get("tax_withholding_category"),
entry.get("tax_withholding_group", ""),
flt(entry.get("taxable_amount"), 2),
flt(entry.get("tax_rate"), 4),
flt(entry.get("withholding_amount"), 2),
entry.get("taxable_doctype", ""),
entry.get("taxable_name", ""),
entry.get("taxable_date"),
entry.get("withholding_doctype", ""),
entry.get("withholding_name", ""),
entry.get("withholding_date"),
status,
entry.get("under_withheld_reason", ""),
entry.get("currency", ""),
flt(entry.get("conversion_rate"), 9) or 1,
1, # created_by_migration
)
)
if values:
frappe.db.bulk_insert("Tax Withholding Entry", fields, values, ignore_duplicates=True)
# =============================================================================
# PURCHASE INVOICE MIGRATION
# =============================================================================
class PurchaseInvoiceMigrator:
def __init__(self, tds_accounts, tax_rate_map, column_cache, party_tax_id_cache):
self.tds_accounts = tds_accounts
self.tax_rate_map = tax_rate_map
self.column_cache = column_cache
self.party_tax_id_cache = party_tax_id_cache
# Build TDS account set
self.all_tds_accounts = set()
for accounts in tds_accounts["accounts_by_company"].values():
self.all_tds_accounts.update(accounts)
# Raw query results
self._invoices_with_tds = []
self._all_withheld_vouchers = []
self._all_advance_taxes = []
self._pe_tds_entries = []
# Lookups
self.invoice_taxes = {} # {invoice_name: {"info": row, "tax_rows": [], "tds_total": float}}
self.withheld_by_invoice = defaultdict(list) # {parent_invoice: [vouchers]}
self.advance_by_invoice = defaultdict(list) # {parent_invoice: [advance_taxes]}
self.pe_allocated = defaultdict(float) # {pe_name: total_allocated}
self.pe_taxes = {} # {pe_name: {"info": row, "tax_rows": [], "tds_total": float}}
self.invoice_info = {} # {invoice_name: row}
# Sets for tracking
self.invoices_with_twv = set()
self.invoices_with_advance_tax = set()
self.tds_paid_by_other = {} # {taxable_invoice: withholding_invoice}
# Date lookups
self.pe_dates = {}
self.pi_dates = {}
self._pi_names_for_dates = set()
self._pe_names_for_dates = set()
# Output
self.all_entries = {} # {(doctype, name): [entries]}
def migrate(self):
if not self.all_tds_accounts:
return
self._fetch_data()
self._build_lookups()
self._fetch_dates()
self._process_invoices()
self._process_pe_overwithheld()
bulk_insert_entries(self.all_entries)
# -------------------------------------------------------------------------
# Data Fetching
# -------------------------------------------------------------------------
def _fetch_data(self):
pi = frappe.qb.DocType("Purchase Invoice")
ptc = frappe.qb.DocType("Purchase Taxes and Charges")
twv = frappe.qb.DocType("Tax Withheld Vouchers")
at = frappe.qb.DocType("Advance Tax")
pe = frappe.qb.DocType("Payment Entry")
atc = frappe.qb.DocType("Advance Taxes and Charges")
# Query 1: Invoices with TDS in taxes table
self._invoices_with_tds = (
frappe.qb.from_(pi)
.join(ptc)
.on(ptc.parent == pi.name)
.select(
pi.name,
pi.supplier,
pi.company,
pi.posting_date,
pi.base_net_total,
pi.base_tax_withholding_net_total,
pi.tax_withholding_category,
pi.is_return,
pi.currency,
pi.conversion_rate,
ptc.account_head,
ptc.base_tax_amount_after_discount_amount,
)
.where(pi.docstatus == 1)
.where(ptc.account_head.isin(list(self.all_tds_accounts)))
.run(as_dict=True)
)
# Query 2: Tax withheld vouchers
self._all_withheld_vouchers = (
frappe.qb.from_(twv)
# JV / PE withholding possible
.left_join(pi)
.on(twv.parent == pi.name)
.select(
twv.parent,
twv.voucher_type,
twv.voucher_name,
twv.taxable_amount,
pi.supplier,
pi.company,
pi.posting_date,
pi.base_net_total,
pi.base_tax_withholding_net_total,
pi.tax_withholding_category,
pi.is_return,
pi.currency,
pi.conversion_rate,
)
.where(twv.parenttype == "Purchase Invoice")
.where(pi.docstatus == 1)
.run(as_dict=True)
)
# Query 3: Advance taxes with PI info
self._all_advance_taxes = (
frappe.qb.from_(at)
.join(pi)
.on(at.parent == pi.name)
.select(
at.parent,
at.reference_type,
at.reference_name,
at.account_head,
at.allocated_amount,
pi.supplier,
pi.company,
pi.posting_date,
pi.base_net_total,
pi.base_tax_withholding_net_total,
pi.tax_withholding_category,
pi.is_return,
pi.currency,
pi.conversion_rate,
)
.where(at.parenttype == "Purchase Invoice")
.where(at.reference_type == "Payment Entry")
.where(pi.docstatus == 1)
.run(as_dict=True)
)
# Query 4: Payment Entries with TDS
self._pe_tds_entries = (
frappe.qb.from_(pe)
.join(atc)
.on(atc.parent == pe.name)
.select(
pe.name.as_("payment_entry"),
pe.party_type,
pe.party,
pe.company,
pe.posting_date,
pe.paid_amount,
pe.tax_withholding_category,
pe.paid_from_account_currency,
pe.paid_to_account_currency,
pe.source_exchange_rate,
pe.target_exchange_rate,
pe.payment_type,
atc.account_head,
atc.base_tax_amount,
atc.add_deduct_tax,
)
.where(pe.docstatus == 1)
.where(pe.apply_tds == 1)
.where(atc.account_head.isin(list(self.all_tds_accounts)))
.run(as_dict=True)
)
# -------------------------------------------------------------------------
# Lookup Building
# -------------------------------------------------------------------------
def _build_lookups(self):
self._build_invoice_taxes_lookup()
self._build_withheld_vouchers_lookup()
self._build_advance_taxes_lookup()
self._build_pe_taxes_lookup()
self._build_invoice_info_lookup()
def _build_invoice_taxes_lookup(self):
for row in self._invoices_with_tds:
inv_name = row.name
if inv_name not in self.invoice_taxes:
self.invoice_taxes[inv_name] = {"info": row, "tax_rows": [], "tds_total": 0}
self.invoice_taxes[inv_name]["tax_rows"].append(row)
self.invoice_taxes[inv_name]["tds_total"] += abs(flt(row.base_tax_amount_after_discount_amount))
def _build_withheld_vouchers_lookup(self):
for row in self._all_withheld_vouchers:
self.withheld_by_invoice[row.parent].append(row)
self.invoices_with_twv.add(row.parent)
# Track cross-invoice TDS payment
if row.voucher_name != row.parent and row.parent in self.invoice_taxes:
if row.voucher_name not in self.tds_paid_by_other:
self.tds_paid_by_other[row.voucher_name] = row.parent
if row.voucher_type != "Purchase Invoice":
continue
self._pi_names_for_dates.add(row.voucher_name)
def _build_advance_taxes_lookup(self):
for row in self._all_advance_taxes:
self.advance_by_invoice[row.parent].append(row)
pe_name = row.reference_name
self.pe_allocated[pe_name] += flt(row.allocated_amount)
self._pe_names_for_dates.add(pe_name)
self.invoices_with_advance_tax.add(row.parent)
def _build_pe_taxes_lookup(self):
for row in self._pe_tds_entries:
pe_name = row.payment_entry
self._pe_names_for_dates.add(pe_name)
if pe_name not in self.pe_taxes:
self.pe_taxes[pe_name] = {"info": row, "tax_rows": [], "tds_total": 0}
self.pe_taxes[pe_name]["tax_rows"].append(row)
amount = abs(flt(row.base_tax_amount))
if row.add_deduct_tax == "Deduct":
self.pe_taxes[pe_name]["tds_total"] += amount
else:
self.pe_taxes[pe_name]["tds_total"] -= amount
def _build_invoice_info_lookup(self):
for row in self._invoices_with_tds:
if row.name not in self.invoice_info:
self.invoice_info[row.name] = row
for row in self._all_withheld_vouchers:
if row.parent not in self.invoice_info:
self.invoice_info[row.parent] = row
for row in self._all_advance_taxes:
if row.parent not in self.invoice_info:
self.invoice_info[row.parent] = row
def _fetch_dates(self):
pe = frappe.qb.DocType("Payment Entry")
pi = frappe.qb.DocType("Purchase Invoice")
if self._pe_names_for_dates:
pe_date_rows = (
frappe.qb.from_(pe)
.select(pe.name, pe.posting_date)
.where(pe.name.isin(list(self._pe_names_for_dates)))
.run(as_dict=True)
)
self.pe_dates = {row.name: row.posting_date for row in pe_date_rows}
if self._pi_names_for_dates:
pi_date_rows = (
frappe.qb.from_(pi)
.select(pi.name, pi.posting_date)
.where(pi.name.isin(list(self._pi_names_for_dates)))
.run(as_dict=True)
)
self.pi_dates = {row.name: row.posting_date for row in pi_date_rows}
# -------------------------------------------------------------------------
# Invoice Processing
# -------------------------------------------------------------------------
def _process_invoices(self):
all_invoice_names = (
set(self.invoice_taxes.keys()) | self.invoices_with_twv | self.invoices_with_advance_tax
)
for invoice_name in all_invoice_names:
self._process_invoice(invoice_name)
def _process_invoice(self, invoice_name):
info = self.invoice_info.get(invoice_name)
if not info:
return
# Build context for this invoice
ctx = self._build_invoice_context(invoice_name, info)
entries = []
# Process advance tax allocations (PE -> PI)
entries.extend(self._process_advance_taxes(invoice_name, ctx))
# Process TDS paid in current invoice
if ctx["total_tds_in_invoice"]:
entries.extend(self._process_invoice_tds(invoice_name, ctx))
# Handle under-withheld (TWV exists but no TDS)
elif invoice_name in self.invoices_with_twv:
entry = self._process_underwithheld(invoice_name, ctx)
if entry:
entries.append(entry)
if entries:
self._add_entries("Purchase Invoice", invoice_name, entries)
def _build_invoice_context(self, invoice_name, info):
# Get category
category = info.tax_withholding_category
if not category and invoice_name in self.invoice_taxes:
for tax_row in self.invoice_taxes[invoice_name]["tax_rows"]:
account_key = (info.company, tax_row.account_head)
category = self.tds_accounts["account_map"].get(account_key)
if category:
break
# Get party info
party_type = "Supplier"
party = info.supplier
tax_id = get_party_tax_id(party_type, party, self.column_cache, self.party_tax_id_cache)
tax_rate, tax_on_excess = get_tax_rate_for_date(self.tax_rate_map, category, info.posting_date)
# Current invoice's taxable amount
current_taxable = abs(info.base_tax_withholding_net_total or info.base_net_total)
# Get TDS total
has_tds = invoice_name in self.invoice_taxes
total_tds_in_invoice = abs(self.invoice_taxes[invoice_name]["tds_total"]) if has_tds else 0
# Get related data
advance_taxes = self.advance_by_invoice.get(invoice_name, [])
withheld_vouchers = self.withheld_by_invoice.get(invoice_name, [])
# Calculate past taxable from withheld vouchers
past_taxable_total = sum(
flt(v.taxable_amount) for v in withheld_vouchers if v.voucher_name != invoice_name
)
return {
"info": info,
"category": category,
"party_type": party_type,
"party": party,
"tax_id": tax_id,
"tax_rate": tax_rate,
"tax_on_excess": tax_on_excess,
"current_taxable": current_taxable,
"past_taxable_total": past_taxable_total,
"total_tds_in_invoice": total_tds_in_invoice,
"advance_taxes": advance_taxes,
"withheld_vouchers": withheld_vouchers,
}
def _process_advance_taxes(self, invoice_name, ctx):
entries = []
info = ctx["info"]
for adv_tax in ctx["advance_taxes"]:
pe_name = adv_tax.reference_name
pe_date = self.pe_dates.get(pe_name)
allocated_amount = flt(adv_tax.allocated_amount)
if allocated_amount <= 0:
continue
if ctx["tax_rate"]:
taxable_for_allocation = flt(allocated_amount * 100 / ctx["tax_rate"], 2)
else:
taxable_for_allocation = ctx["current_taxable"]
ctx["current_taxable"] += taxable_for_allocation
# Entry in Purchase Invoice
entry_in_pi = self._create_entry(
ctx,
taxable_amount=taxable_for_allocation,
withholding_amount=allocated_amount,
taxable_doctype="Purchase Invoice",
taxable_name=invoice_name,
taxable_date=info.posting_date,
withholding_doctype="Payment Entry",
withholding_name=pe_name,
withholding_date=pe_date,
)
entries.append(entry_in_pi)
# Duplicate entry in Payment Entry
entry_in_pe = entry_in_pi.copy()
entry_in_pe["is_duplicate"] = True
self._add_entries("Payment Entry", pe_name, [entry_in_pe])
return entries
def _process_invoice_tds(self, invoice_name, ctx):
entries = []
info = ctx["info"]
tax_rate = ctx["tax_rate"]
tds_for_past = 0
tds_for_current_and_past = abs(ctx["total_tds_in_invoice"])
# Calculate TDS split between current and past invoices
if not ctx["tax_on_excess"] and ctx["past_taxable_total"] > 0 and tax_rate:
tds_for_past = flt(ctx["past_taxable_total"] * tax_rate / 100, 2)
tds_for_current = max(0, tds_for_current_and_past - tds_for_past)
else:
tds_for_current = tds_for_current_and_past
# Entry for current invoice
if tds_for_current:
if tax_rate:
calc_taxable = flt(tds_for_current * 100 / tax_rate, 2)
else:
calc_taxable = ctx["current_taxable"]
# Handle threshold exemption for tax_on_excess categories
if ctx["tax_on_excess"] and calc_taxable < ctx["current_taxable"]:
taxable_exemption_amount = flt(ctx["current_taxable"] - calc_taxable, 2)
# Create threshold exemption entry (no TDS on threshold amount)
threshold_entry = self._create_entry(
ctx,
taxable_amount=taxable_exemption_amount,
withholding_amount=0,
taxable_doctype="Purchase Invoice",
taxable_name=invoice_name,
taxable_date=info.posting_date,
withholding_doctype="Purchase Invoice",
withholding_name=invoice_name,
withholding_date=info.posting_date,
under_withheld_reason="Threshold Exemption",
)
entries.append(threshold_entry)
main_entry = self._create_entry(
ctx,
taxable_amount=calc_taxable,
withholding_amount=tds_for_current,
taxable_doctype="Purchase Invoice",
taxable_name=invoice_name,
taxable_date=info.posting_date,
withholding_doctype="Purchase Invoice",
withholding_name=invoice_name,
withholding_date=info.posting_date,
)
entries.append(main_entry)
# Process withheld vouchers (TDS for past invoices paid here)
if tds_for_past:
entries.extend(self._process_withheld_vouchers(invoice_name, ctx))
return entries
def _process_withheld_vouchers(self, invoice_name, ctx):
entries = []
info = ctx["info"]
tax_rate = ctx["tax_rate"]
for voucher in ctx["withheld_vouchers"]:
if voucher.voucher_name == invoice_name:
continue
voucher_date = self.pi_dates.get(voucher.voucher_name) or info.posting_date
voucher_taxable = flt(voucher.taxable_amount)
voucher_tds = flt(voucher_taxable * tax_rate / 100, 2) if tax_rate else 0
# Entry in current invoice
entry_in_current = self._create_entry(
ctx,
taxable_amount=voucher_taxable,
withholding_amount=voucher_tds,
taxable_doctype=voucher.voucher_type,
taxable_name=voucher.voucher_name,
taxable_date=voucher_date,
withholding_doctype="Purchase Invoice",
withholding_name=invoice_name,
withholding_date=info.posting_date,
)
entries.append(entry_in_current)
# Duplicate entry in past invoice
entry_in_past = entry_in_current.copy()
entry_in_past["is_duplicate"] = True
self._add_entries(voucher.voucher_type, voucher.voucher_name, [entry_in_past])
return entries
def _process_underwithheld(self, invoice_name, ctx):
# Skip if TDS was paid by another invoice
if invoice_name in self.tds_paid_by_other:
return None
info = ctx["info"]
taxable = info.base_tax_withholding_net_total or info.base_net_total
tax_on_excess = ctx["tax_on_excess"]
under_withheld_reason = "Threshold Exemption" if tax_on_excess else ""
return self._create_entry(
ctx,
taxable_amount=taxable,
withholding_amount=0,
taxable_doctype="Purchase Invoice",
taxable_name=invoice_name,
taxable_date=info.posting_date,
withholding_doctype="",
withholding_name="",
withholding_date=None,
under_withheld_reason=under_withheld_reason,
)
# -------------------------------------------------------------------------
# Payment Entry Over-Withheld Processing
# -------------------------------------------------------------------------
def _process_pe_overwithheld(self):
for pe_name, data in self.pe_taxes.items():
info = data["info"]
total_tds = data["tds_total"]
if not total_tds:
continue
# Calculate unallocated TDS
total_allocated = self.pe_allocated.get(pe_name, 0)
unallocated_tds = total_tds - total_allocated
if unallocated_tds <= 0:
continue
# Get category
category = info.tax_withholding_category
party_type = info.party_type
party = info.party
tax_id = get_party_tax_id(party_type, party, self.column_cache, self.party_tax_id_cache)
tax_rate, _ = get_tax_rate_for_date(self.tax_rate_map, category, info.posting_date)
if tax_rate:
unallocated_taxable = flt(unallocated_tds * 100 / tax_rate, 2)
else:
unallocated_taxable = info.paid_amount
# Get currency and conversion rate based on payment type
if info.payment_type == "Receive":
currency = info.paid_from_account_currency
conversion_rate = info.source_exchange_rate or 1
else:
currency = info.paid_to_account_currency
conversion_rate = info.target_exchange_rate or 1
entry = {
"company": info.company,
"party_type": party_type,
"party": party,
"tax_id": tax_id,
"tax_withholding_category": category,
"taxable_amount": unallocated_taxable,
"tax_rate": tax_rate,
"withholding_amount": unallocated_tds,
"taxable_doctype": "",
"taxable_name": "",
"taxable_date": None,
"withholding_doctype": "Payment Entry",
"withholding_name": pe_name,
"withholding_date": info.posting_date,
"currency": currency,
"conversion_rate": conversion_rate,
}
self._add_entries("Payment Entry", pe_name, [entry])
# -------------------------------------------------------------------------
# Helper Methods
# -------------------------------------------------------------------------
def _create_entry(self, ctx, **kwargs):
is_return = ctx["info"].is_return
if is_return:
if "taxable_amount" in kwargs:
kwargs["taxable_amount"] = -kwargs["taxable_amount"]
if "withholding_amount" in kwargs:
kwargs["withholding_amount"] = -kwargs["withholding_amount"]
return {
"company": ctx["info"].company,
"party_type": ctx["party_type"],
"party": ctx["party"],
"tax_id": ctx["tax_id"],
"tax_withholding_category": ctx["category"],
"tax_rate": ctx["tax_rate"],
"currency": ctx["info"].currency,
"conversion_rate": ctx["info"].conversion_rate or 1,
**kwargs,
}
def _add_entries(self, parent_doctype, parent_name, entries):
key = (parent_doctype, parent_name)
if key not in self.all_entries:
self.all_entries[key] = []
self.all_entries[key].extend(entries)
# =============================================================================
# SALES INVOICE MIGRATION
# =============================================================================
def migrate_sales_invoices(tds_accounts, tax_rate_map, column_cache, party_tax_id_cache):
"""
Migrate Sales Invoice TCS data.
Sales Invoice TCS is simpler - only tax on excess amount for current invoice.
No tax_withheld_vouchers or advance_tax tables.
Tax is identified from GL Entry on TCS accounts.
"""
gle = frappe.qb.DocType("GL Entry")
si = frappe.qb.DocType("Sales Invoice")
cust = frappe.qb.DocType("Customer")
# Build conditions for TCS accounts
all_tcs_accounts = set()
for _company, accounts in tds_accounts["accounts_by_company"].items():
all_tcs_accounts.update(accounts)
if not all_tcs_accounts:
return
# Get Sales Invoices with TCS amounts aggregated
# Use conditional sum to aggregate TCS amounts only from TCS accounts
tcs_accounts_list = list(all_tcs_accounts)
tcs_entries = (
frappe.qb.from_(si)
.left_join(gle)
.on((gle.voucher_no == si.name) & (gle.voucher_type == "Sales Invoice") & (gle.is_cancelled == 0))
.join(cust)
.on(si.customer == cust.name)
.select(
si.name.as_("invoice_name"),
si.posting_date,
si.customer,
si.company,
si.base_grand_total,
si.is_return,
si.currency,
si.conversion_rate,
cust.tax_withholding_category,
Sum(Case().when(gle.account.isin(tcs_accounts_list), gle.credit - gle.debit).else_(0)).as_(
"total_tcs"
),
Max(Case().when(gle.account.isin(tcs_accounts_list), gle.account).else_(None)).as_("account"),
)
.where(si.docstatus == 1)
.groupby(si.name)
.run(as_dict=True)
)
all_entries = {}
category_wise_invoices = defaultdict(list)
for row in tcs_entries:
total_tcs = row.total_tcs
net_grand_total = abs(row.base_grand_total - total_tcs)
# Get category
category = row.tax_withholding_category
if not category:
account_key = (row.company, row.account)
category = tds_accounts["account_map"].get(account_key)
# Party info
party_type = "Customer"
party = row.customer
tax_id = get_party_tax_id(party_type, party, column_cache, party_tax_id_cache)
tax_rate, _ = get_tax_rate_for_date(tax_rate_map, category, row.posting_date)
if not tax_rate:
continue
# Back-calculate taxable amount
calculated_taxable = 0
if tax_rate and total_tcs:
calculated_taxable = flt(total_tcs * 100 / tax_rate, 2)
# For returns, amounts are negative
if row.is_return:
total_tcs = -abs(total_tcs)
calculated_taxable = -abs(calculated_taxable)
entries = []
# Handle threshold exemption for categories
# NOTE: Default tax_on_excess is True for Sales Invoice
if abs(calculated_taxable) < net_grand_total:
taxable_exemption_amount = flt(net_grand_total - abs(calculated_taxable), 2)
if row.is_return:
taxable_exemption_amount = -abs(taxable_exemption_amount)
threshold_entry = {
"company": row.company,
"party_type": party_type,
"party": party,
"tax_id": tax_id,
"tax_withholding_category": category,
"taxable_amount": taxable_exemption_amount,
"tax_rate": tax_rate,
"withholding_amount": 0,
"taxable_doctype": "Sales Invoice",
"taxable_name": row.invoice_name,
"taxable_date": row.posting_date,
"withholding_doctype": "Sales Invoice",
"withholding_name": row.invoice_name,
"withholding_date": row.posting_date,
"under_withheld_reason": "Threshold Exemption",
"currency": row.currency,
"conversion_rate": row.conversion_rate or 1,
}
entries.append(threshold_entry)
entry = {
"company": row.company,
"party_type": party_type,
"party": party,
"tax_id": tax_id,
"tax_withholding_category": category,
"taxable_amount": calculated_taxable,
"tax_rate": tax_rate,
"withholding_amount": total_tcs,
"taxable_doctype": "Sales Invoice",
"taxable_name": row.invoice_name,
"taxable_date": row.posting_date,
"withholding_doctype": "Sales Invoice",
"withholding_name": row.invoice_name,
"withholding_date": row.posting_date,
"currency": row.currency,
"conversion_rate": row.conversion_rate or 1,
}
entries.append(entry)
all_entries[("Sales Invoice", row.invoice_name)] = entries
category_wise_invoices[category].append(row.invoice_name)
bulk_insert_entries(all_entries)
# Update Sales Invoice and Sales Invoice Item
for category, invoices in category_wise_invoices.items():
frappe.db.set_value("Sales Invoice", {"name": ("in", invoices)}, {"apply_tds": 1})
frappe.db.set_value(
"Sales Invoice Item",
{"parent": ("in", invoices), "parenttype": "Sales Invoice"},
{"tax_withholding_category": category, "apply_tds": 1},
)
# =============================================================================
# JOURNAL ENTRY MIGRATION
# =============================================================================
def migrate_journal_entries(tds_accounts, tax_rate_map, column_cache, party_tax_id_cache):
"""
Migrate Journal Entry TDS data.
For Journal Entry, we rely on GL Entry to identify TDS transactions.
Party info is obtained from JE Account rows that are NOT TDS accounts.
"""
gle = frappe.qb.DocType("GL Entry")
je = frappe.qb.DocType("Journal Entry")
jea = frappe.qb.DocType("Journal Entry Account")
# Build conditions for TDS accounts
all_tds_accounts = set()
for _company, accounts in tds_accounts["accounts_by_company"].items():
all_tds_accounts.update(accounts)
if not all_tds_accounts:
return
# Get Journal Entries with TDS entries in GL
tds_gl_entries = (
frappe.qb.from_(gle)
.join(je)
.on(gle.voucher_no == je.name)
.select(
gle.voucher_no.as_("journal_entry"),
gle.account,
gle.credit,
gle.debit,
gle.posting_date,
je.company,
je.tax_withholding_category,
je.voucher_type,
je.total_debit,
)
.where(gle.voucher_type == "Journal Entry")
.where(gle.is_cancelled == 0)
.where(je.docstatus == 1)
.where(gle.account.isin(list(all_tds_accounts)))
.run(as_dict=True)
)
# Get all JE parties in bulk - only from non-TDS account rows
je_names = list({row.journal_entry for row in tds_gl_entries})
je_parties = {}
if je_names:
# Get party from JE Account rows that are NOT TDS accounts
jea_party_rows = (
frappe.qb.from_(jea)
.select(
jea.parent,
jea.party_type,
jea.party,
jea.account,
jea.account_currency,
jea.exchange_rate,
)
.where(jea.parent.isin(je_names))
.where(jea.party_type.isnotnull())
.where(jea.party_type != "")
.where(jea.party.isnotnull())
.where(jea.party != "")
.where(jea.account.notin(list(all_tds_accounts))) # Exclude TDS account rows
.run(as_dict=True)
)
for row in jea_party_rows:
if row.parent not in je_parties:
je_parties[row.parent] = (row.party_type, row.party, row.account_currency, row.exchange_rate)
# Group by journal entry
je_taxes = {}
for row in tds_gl_entries:
if row.journal_entry not in je_taxes:
je_taxes[row.journal_entry] = {"info": row, "gl_rows": []}
je_taxes[row.journal_entry]["gl_rows"].append(row)
all_entries = {}
category_wise_jes = defaultdict(set)
for je_name, data in je_taxes.items():
info = data["info"]
# Assume TCS not allowed in Journal Entry
# Calculate total TDS (credit - debit)
total_tds = sum(flt(row.credit) - flt(row.debit) for row in data["gl_rows"])
if total_tds <= 0:
# Ignore TDS payment entries
continue
# Get category
category = info.tax_withholding_category
if not category:
for gl_row in data["gl_rows"]:
account_key = (info.company, gl_row.account)
category = tds_accounts["account_map"].get(account_key)
if category:
break
# Get party from JE accounts (non-TDS rows)
party_type = None
party = None
account_currency = ""
exchange_rate = 1
if je_name in je_parties:
party_type, party, account_currency, exchange_rate = je_parties[je_name]
tax_id = get_party_tax_id(party_type, party, column_cache, party_tax_id_cache) if party else None
tax_rate, tax_on_excess = get_tax_rate_for_date(tax_rate_map, category, info.posting_date)
# Back-calculate taxable amount
calculated_taxable = 0
if tax_rate and total_tds:
calculated_taxable = min(flt(total_tds * 100 / tax_rate, 2), info.total_debit)
entries = []
# Handle threshold exemption for tax_on_excess categories
if tax_on_excess and calculated_taxable < info.total_debit:
taxable_exemption_amount = flt(info.total_debit - abs(calculated_taxable), 2)
threshold_entry = {
"company": info.company,
"party_type": party_type,
"party": party,
"tax_id": tax_id,
"tax_withholding_category": category,
"taxable_amount": taxable_exemption_amount,
"tax_rate": tax_rate,
"withholding_amount": 0,
"taxable_doctype": "Journal Entry",
"taxable_name": je_name,
"taxable_date": info.posting_date,
"withholding_doctype": "Journal Entry",
"withholding_name": je_name,
"withholding_date": info.posting_date,
"under_withheld_reason": "Threshold Exemption",
"currency": account_currency or "",
"conversion_rate": flt(exchange_rate, 9) or 1,
}
entries.append(threshold_entry)
entry = {
"company": info.company,
"party_type": party_type,
"party": party,
"tax_id": tax_id,
"tax_withholding_category": category,
"taxable_amount": calculated_taxable,
"tax_rate": tax_rate,
"withholding_amount": total_tds,
"taxable_doctype": "Journal Entry",
"taxable_name": je_name,
"taxable_date": info.posting_date,
"withholding_doctype": "Journal Entry",
"withholding_name": je_name,
"withholding_date": info.posting_date,
"currency": account_currency or "",
"conversion_rate": flt(exchange_rate, 9) or 1,
}
entries.append(entry)
all_entries[("Journal Entry", je_name)] = entries
category_wise_jes[category].add(je_name)
bulk_insert_entries(all_entries)
for category, je_names in category_wise_jes.items():
frappe.db.set_value(
"Journal Entry",
{"name": ("in", list(je_names))},
{"apply_tds": 1, "tax_withholding_category": category},
)
# =============================================================================
# ITEM LEVEL CATEGORY COPY
# =============================================================================
def copy_category_to_items_for_purchase(column_cache):
parent_doctype = "Purchase Invoice"
item_doctype = "Purchase Invoice Item"
parent_cols = column_cache.get(parent_doctype, {})
item_cols = column_cache.get(item_doctype, {})
if not parent_cols.get("tax_withholding_category"):
return
if not item_cols.get("tax_withholding_category"):
return
parent = frappe.qb.DocType(parent_doctype)
item = frappe.qb.DocType(item_doctype, alias="item")
(
frappe.qb.update(item)
.join(parent)
.on(item.parent == parent.name)
.set(item.tax_withholding_category, parent.tax_withholding_category)
.where(parent.tax_withholding_category.isnotnull())
.where(parent.tax_withholding_category != "")
.where(item.apply_tds == 1)
.where(IfNull(item.tax_withholding_category, "") == "")
.run()
)
def copy_category_to_items_for_sales(column_cache):
parent_doctype = "Sales Invoice"
item_doctype = "Sales Invoice Item"
item_cols = column_cache.get(item_doctype, {})
if not item_cols.get("tax_withholding_category"):
return
parent = frappe.qb.DocType(parent_doctype)
item = frappe.qb.DocType(item_doctype, alias="item")
customer = frappe.qb.DocType("Customer", alias="customer")
(
frappe.qb.update(item)
.join(parent)
.on(item.parent == parent.name)
.join(customer)
.on(parent.customer == customer.name)
.set(item.tax_withholding_category, customer.tax_withholding_category)
.where(customer.tax_withholding_category.isnotnull())
.where(customer.tax_withholding_category != "")
.where(IfNull(item.tax_withholding_category, "") == "")
.run()
)
| {
"repo_id": "frappe/erpnext",
"file_path": "erpnext/patches/v16_0/migrate_tax_withholding_data.py",
"license": "GNU General Public License v3.0",
"lines": 1075,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | license |
frappe/erpnext:erpnext/patches/v16_0/populate_budget_distribution_total.py | import frappe
from frappe.utils import flt
def execute():
budgets = frappe.get_all("Budget", filters={"docstatus": ["in", [0, 1]]}, fields=["name"])
for b in budgets:
doc = frappe.get_doc("Budget", b.name)
total = sum(flt(row.amount) for row in doc.budget_distribution)
doc.db_set("budget_distribution_total", total, update_modified=False)
| {
"repo_id": "frappe/erpnext",
"file_path": "erpnext/patches/v16_0/populate_budget_distribution_total.py",
"license": "GNU General Public License v3.0",
"lines": 8,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
frappe/erpnext:erpnext/patches/v16_0/set_company_wise_warehouses.py | import frappe
def execute():
warehouses = frappe.get_single_value(
"Manufacturing Settings",
["default_wip_warehouse", "default_fg_warehouse", "default_scrap_warehouse"],
as_dict=True,
)
for name, warehouse in warehouses.items():
if warehouse:
company = frappe.get_value("Warehouse", warehouse, "company")
frappe.db.set_value("Company", company, name, warehouse)
| {
"repo_id": "frappe/erpnext",
"file_path": "erpnext/patches/v16_0/set_company_wise_warehouses.py",
"license": "GNU General Public License v3.0",
"lines": 11,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
frappe/erpnext:erpnext/patches/v16_0/set_mr_picked_qty.py | import frappe
def execute():
if data := frappe.get_all(
"Pick List Item",
filters={"material_request_item": ["is", "set"], "docstatus": 1},
fields=["material_request_item", {"SUM": "picked_qty", "as": "picked_qty"}],
group_by="material_request_item",
):
data = {d.material_request_item: {"picked_qty": d.picked_qty} for d in data}
frappe.db.bulk_update("Material Request Item", data)
| {
"repo_id": "frappe/erpnext",
"file_path": "erpnext/patches/v16_0/set_mr_picked_qty.py",
"license": "GNU General Public License v3.0",
"lines": 10,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
frappe/erpnext:erpnext/patches/v16_0/set_post_change_gl_entries_on_pos_settings.py | import frappe
def execute():
Singles = frappe.qb.DocType("Singles")
query = (
frappe.qb.from_(Singles)
.select("value")
.where((Singles.doctype == "Accounts Settings") & (Singles.field == "post_change_gl_entries"))
)
result = query.run(as_dict=1)
if result:
post_change_gl_entries = int(result[0].get("value", 1))
frappe.db.set_single_value("POS Settings", "post_change_gl_entries", post_change_gl_entries)
| {
"repo_id": "frappe/erpnext",
"file_path": "erpnext/patches/v16_0/set_post_change_gl_entries_on_pos_settings.py",
"license": "GNU General Public License v3.0",
"lines": 12,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
frappe/erpnext:erpnext/patches/v16_0/set_valuation_method_on_companies.py | import frappe
def execute():
valuation_method = frappe.get_single_value("Stock Settings", "valuation_method")
for company in frappe.get_all("Company", pluck="name"):
frappe.db.set_value("Company", company, "valuation_method", valuation_method)
| {
"repo_id": "frappe/erpnext",
"file_path": "erpnext/patches/v16_0/set_valuation_method_on_companies.py",
"license": "GNU General Public License v3.0",
"lines": 5,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
frappe/erpnext:erpnext/patches/v16_0/update_account_categories_for_existing_accounts.py | # Copyright (c) 2025, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
import frappe
from erpnext.accounts.doctype.account.chart_of_accounts.chart_of_accounts import get_chart_metadata_fields
from erpnext.accounts.doctype.account.chart_of_accounts.verified import standard_chart_of_accounts
from erpnext.accounts.doctype.financial_report_template.financial_report_template import (
sync_financial_report_templates,
)
def execute():
"""
Patch to create default account categories and update existing accounts
with appropriate account categories based on standard chart of accounts mapping
"""
sync_financial_report_templates()
update_account_categories()
def update_account_categories():
account_mapping = get_standard_account_category_mapping()
companies = frappe.get_all("Company", pluck="name")
mapped_account_categories = {}
for company in companies:
map_account_categories_for_company(company, account_mapping, mapped_account_categories)
if not mapped_account_categories:
return
frappe.db.bulk_update("Account", mapped_account_categories)
def get_standard_account_category_mapping():
account_mapping = {}
def _extract_account_mapping(chart_data, prefix=""):
for account_name, account_details in chart_data.items():
if account_name in get_chart_metadata_fields():
continue
if isinstance(account_details, dict) and account_details.get("account_category"):
account_mapping[account_name] = account_details["account_category"]
if isinstance(account_details, dict):
_extract_account_mapping(account_details, prefix)
standard_chart = standard_chart_of_accounts.get()
_extract_account_mapping(standard_chart)
return account_mapping
def map_account_categories_for_company(company, account_mapping, mapped_account_categories):
accounts = frappe.get_all(
"Account",
filters={"company": company, "account_category": ["is", "not set"]},
fields=["name", "account_name"],
)
for account in accounts:
account_category = account_mapping.get(account.account_name)
if account_category:
mapped_account_categories[account.name] = {"account_category": account_category}
| {
"repo_id": "frappe/erpnext",
"file_path": "erpnext/patches/v16_0/update_account_categories_for_existing_accounts.py",
"license": "GNU General Public License v3.0",
"lines": 47,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | license |
frappe/erpnext:erpnext/patches/v16_0/update_corrected_cancelled_status.py | import frappe
def execute():
stock_closing_entry = frappe.qb.DocType("Stock Closing Entry")
call_log = frappe.qb.DocType("Call Log")
# updating stock closing entry status to cancelled from canceled
(
frappe.qb.update(stock_closing_entry)
.set(stock_closing_entry.status, "Cancelled")
.where(stock_closing_entry.status == "Canceled")
).run()
# updating call log status to cancelled from canceled
(frappe.qb.update(call_log).set(call_log.status, "Cancelled").where(call_log.status == "Canceled")).run()
| {
"repo_id": "frappe/erpnext",
"file_path": "erpnext/patches/v16_0/update_corrected_cancelled_status.py",
"license": "GNU General Public License v3.0",
"lines": 12,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
frappe/erpnext:erpnext/patches/v16_0/update_currency_exchange_settings_for_frankfurter.py | import frappe
def execute():
settings_meta = frappe.get_meta("Currency Exchange Settings")
settings = frappe.get_doc("Currency Exchange Settings")
if (
"frankfurter.dev" not in settings_meta.get_options("service_provider").split("\n")
or settings.service_provider != "frankfurter.app"
):
return
settings.service_provider = "frankfurter.dev"
settings.set_parameters_and_result()
settings.flags.ignore_validate = True
settings.save()
| {
"repo_id": "frappe/erpnext",
"file_path": "erpnext/patches/v16_0/update_currency_exchange_settings_for_frankfurter.py",
"license": "GNU General Public License v3.0",
"lines": 13,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
frappe/erpnext:erpnext/patches/v16_0/update_serial_batch_entries.py | import frappe
def execute():
if frappe.db.has_table("Serial and Batch Entry"):
frappe.db.sql(
"""
UPDATE `tabSerial and Batch Entry` SABE, `tabSerial and Batch Bundle` SABB
SET
SABE.posting_datetime = SABB.posting_datetime,
SABE.voucher_type = SABB.voucher_type,
SABE.voucher_no = SABB.voucher_no,
SABE.voucher_detail_no = SABB.voucher_detail_no,
SABE.type_of_transaction = SABB.type_of_transaction,
SABE.is_cancelled = SABB.is_cancelled,
SABE.item_code = SABB.item_code
WHERE SABE.parent = SABB.name
"""
)
| {
"repo_id": "frappe/erpnext",
"file_path": "erpnext/patches/v16_0/update_serial_batch_entries.py",
"license": "GNU General Public License v3.0",
"lines": 17,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | documentation |
frappe/erpnext:erpnext/patches/v16_0/update_tax_withholding_field_in_payment_entry.py | import frappe
from frappe.query_builder import DocType
def execute():
if not frappe.db.has_column("Payment Entry", "apply_tax_withholding_amount"):
return
pe = DocType("Payment Entry")
(frappe.qb.update(pe).set(pe.apply_tds, pe.apply_tax_withholding_amount)).run()
| {
"repo_id": "frappe/erpnext",
"file_path": "erpnext/patches/v16_0/update_tax_withholding_field_in_payment_entry.py",
"license": "GNU General Public License v3.0",
"lines": 7,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
frappe/erpnext:erpnext/stock/dashboard_chart_source/stock_value_by_item_group/stock_value_by_item_group.py | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from typing import Any
import frappe
from frappe import _
from frappe.query_builder.functions import Sum
from frappe.utils.dashboard import cache_source
@frappe.whitelist()
@cache_source
def get(
chart_name: str | None = None,
chart: Any = None,
no_cache: Any = None,
filters: dict | str | None = None,
from_date: Any = None,
to_date: Any = None,
timespan: Any = None,
time_interval: Any = None,
heatmap_year: Any = None,
):
if filters and isinstance(filters, str):
filters = frappe.parse_json(filters)
company = filters.get("company") if filters else None
if not company:
company = frappe.defaults.get_defaults().company
labels, datasets = get_stock_value_by_item_group(company)
return {
"labels": labels,
"datasets": [{"name": _("Stock Value"), "values": datasets}],
}
def get_stock_value_by_item_group(company):
doctype = frappe.qb.DocType("Bin")
item_doctype = frappe.qb.DocType("Item")
warehouse_filters = [["is_group", "=", 0]]
if company:
warehouse_filters.append(["company", "=", company])
warehouses = frappe.get_list("Warehouse", pluck="name", filters=warehouse_filters)
stock_value = Sum(doctype.stock_value)
query = (
frappe.qb.from_(doctype)
.inner_join(item_doctype)
.on(doctype.item_code == item_doctype.name)
.select(item_doctype.item_group, stock_value.as_("stock_value"))
.groupby(item_doctype.item_group)
.orderby(stock_value, order=frappe.qb.desc)
.limit(10)
)
if warehouses:
query = query.where(doctype.warehouse.isin(warehouses))
results = query.run(as_dict=True)
labels = []
datapoints = []
for row in results:
if not row.stock_value:
continue
labels.append(_(row.item_group))
datapoints.append(row.stock_value)
return labels, datapoints
| {
"repo_id": "frappe/erpnext",
"file_path": "erpnext/stock/dashboard_chart_source/stock_value_by_item_group/stock_value_by_item_group.py",
"license": "GNU General Public License v3.0",
"lines": 58,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | license |
frappe/erpnext:erpnext/stock/report/stock_qty_vs_batch_qty/stock_qty_vs_batch_qty.py | # Copyright (c) 2025, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
import json
import frappe
from frappe import _
from erpnext.stock.doctype.batch.batch import get_batch_qty
def execute(filters=None):
if not filters:
filters = {}
columns = get_columns()
data = get_data(filters)
return columns, data
def get_columns() -> list[dict]:
columns = [
{
"label": _("Item Code"),
"fieldname": "item_code",
"fieldtype": "Link",
"options": "Item",
"width": 200,
},
{"label": _("Item Name"), "fieldname": "item_name", "fieldtype": "Data", "width": 200},
{"label": _("Batch"), "fieldname": "batch", "fieldtype": "Link", "options": "Batch", "width": 200},
{"label": _("Batch Qty"), "fieldname": "batch_qty", "fieldtype": "Float", "width": 150},
{"label": _("Stock Qty"), "fieldname": "stock_qty", "fieldtype": "Float", "width": 150},
{"label": _("Difference"), "fieldname": "difference", "fieldtype": "Float", "width": 150},
]
return columns
def get_data(filters=None):
filters = filters or {}
item = filters.get("item")
batch_no = filters.get("batch")
batch_sle_data = (
get_batch_qty(
item_code=item,
batch_no=batch_no,
for_stock_levels=True,
consider_negative_batches=True,
ignore_reserved_stock=True,
)
or []
)
stock_qty_map = {}
for row in batch_sle_data:
batch = row.get("batch_no")
if not batch:
continue
stock_qty_map[batch] = stock_qty_map.get(batch, 0) + (row.get("qty") or 0)
batch = frappe.qb.DocType("Batch")
query = (
frappe.qb.from_(batch)
.select(batch.name, batch.item, batch.item_name, batch.batch_qty)
.where(batch.disabled == 0)
)
if item:
query = query.where(batch.item == item)
if batch_no:
query = query.where(batch.name == batch_no)
batch_records = query.run(as_dict=True) or []
result = []
for row in batch_records:
name = row.get("name")
batch_qty = row.get("batch_qty") or 0
stock_qty = stock_qty_map.get(name, 0)
difference = stock_qty - batch_qty
if difference != 0:
result.append(
{
"item_code": row.get("item"),
"item_name": row.get("item_name"),
"batch": name,
"batch_qty": batch_qty,
"stock_qty": stock_qty,
"difference": difference,
}
)
return result
@frappe.whitelist()
def update_batch_qty(selected_batches: str | None = None):
if not selected_batches:
return
selected_batches = json.loads(selected_batches)
for row in selected_batches:
batch_name = row.get("batch")
batches = get_batch_qty(
batch_no=batch_name,
item_code=row.get("item_code"),
for_stock_levels=True,
consider_negative_batches=True,
ignore_reserved_stock=True,
)
batch_qty = 0.0
if batches:
for batch in batches:
batch_qty += batch.get("qty")
frappe.db.set_value("Batch", batch_name, "batch_qty", batch_qty)
frappe.msgprint(_("Batch Qty updated successfully"), alert=True)
| {
"repo_id": "frappe/erpnext",
"file_path": "erpnext/stock/report/stock_qty_vs_batch_qty/stock_qty_vs_batch_qty.py",
"license": "GNU General Public License v3.0",
"lines": 97,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | license |
frappe/erpnext:erpnext/accounts/doctype/item_wise_tax_detail/item_wise_tax_detail.py | # Copyright (c) 2025, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
# import frappe
from frappe.model.document import Document
class ItemWiseTaxDetail(Document):
# begin: auto-generated types
# This code is auto-generated. Do not modify anything in this block.
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from frappe.types import DF
amount: DF.Currency
item_row: DF.Data
parent: DF.Data
parentfield: DF.Data
parenttype: DF.Data
rate: DF.Float
tax_row: DF.Data
taxable_amount: DF.Currency
# end: auto-generated types
pass
| {
"repo_id": "frappe/erpnext",
"file_path": "erpnext/accounts/doctype/item_wise_tax_detail/item_wise_tax_detail.py",
"license": "GNU General Public License v3.0",
"lines": 20,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | license |
frappe/erpnext:erpnext/controllers/tests/test_item_wise_inventory_account.py | # Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
import frappe
from frappe.tests import IntegrationTestCase
from frappe.utils import add_days, today
from erpnext.manufacturing.doctype.production_plan.test_production_plan import make_bom
from erpnext.manufacturing.doctype.work_order.test_work_order import make_wo_order_test_record
from erpnext.manufacturing.doctype.work_order.work_order import make_stock_entry
from erpnext.stock.doctype.delivery_note.test_delivery_note import create_delivery_note
from erpnext.stock.doctype.item.test_item import make_item
from erpnext.stock.doctype.purchase_receipt.test_purchase_receipt import make_purchase_receipt
class TestItemWiseInventoryAccount(IntegrationTestCase):
def setUp(self):
self.company = make_company()
self.company_abbr = frappe.db.get_value("Company", self.company, "abbr")
self.default_warehouse = frappe.db.get_value(
"Warehouse",
{"company": self.company, "is_group": 0, "warehouse_name": ("like", "%Stores%")},
)
def test_item_account_for_purchase_receipt_entry(self):
items = {
"Stock Item A": {"is_stock_item": 1},
"Stock Item B": {"is_stock_item": 1, "has_serial_no": 1, "serial_no_series": "SER-TT-.####"},
}
for item_name, item_data in items.items():
item = make_item(
item_name,
properties=item_data,
)
account = self.add_inventory_account(item)
items[item_name]["account"] = account
pr = make_purchase_receipt(
item_code="Stock Item A",
qty=5,
rate=100,
warehouse=self.default_warehouse,
company=self.company,
do_not_submit=True,
)
pr.append(
"items",
{
"item_code": "Stock Item B",
"qty": 2,
"rate": 200,
"warehouse": self.default_warehouse,
},
)
pr.submit()
for row in items:
item_code = row
account = items[item_code]["account"]
sle_value = frappe.db.get_value(
"Stock Ledger Entry",
{"voucher_type": "Purchase Receipt", "voucher_no": pr.name, "item_code": item_code},
"stock_value_difference",
)
gl_value = frappe.db.get_value(
"GL Entry",
{
"voucher_type": "Purchase Receipt",
"voucher_no": pr.name,
"account": account,
},
"debit",
)
self.assertEqual(sle_value, gl_value, f"GL Entry not created for {item_code} correctly")
def test_item_account_for_delivery_note_entry(self):
items = {
"Stock Item A": {"is_stock_item": 1},
"Stock Item B": {"is_stock_item": 1, "has_serial_no": 1, "serial_no_series": "SER-TT-.####"},
}
for item_name, item_data in items.items():
item = make_item(
item_name,
properties=item_data,
)
account = self.add_inventory_account(item)
items[item_name]["account"] = account
pr = make_purchase_receipt(
item_code="Stock Item A",
qty=5,
rate=100,
warehouse=self.default_warehouse,
company=self.company,
do_not_submit=True,
)
pr.append(
"items",
{
"item_code": "Stock Item B",
"qty": 2,
"rate": 200,
"warehouse": self.default_warehouse,
},
)
pr.submit()
dn = create_delivery_note(
item_code="Stock Item A",
qty=5,
rate=200,
warehouse=self.default_warehouse,
company=self.company,
cost_center=frappe.db.get_value("Company", self.company, "cost_center"),
expense_account=frappe.db.get_value("Company", self.company, "default_expense_account"),
do_not_submit=True,
)
dn.append(
"items",
{
"item_code": "Stock Item B",
"qty": 2,
"rate": 300,
"warehouse": self.default_warehouse,
},
)
dn.submit()
for row in items:
item_code = row
account = items[item_code]["account"]
sle_value = frappe.db.get_value(
"Stock Ledger Entry",
{"voucher_type": "Delivery Note", "voucher_no": dn.name, "item_code": item_code},
"stock_value_difference",
)
gl_value = (
frappe.db.get_value(
"GL Entry",
{
"voucher_type": "Delivery Note",
"voucher_no": dn.name,
"account": account,
},
"credit",
)
* -1
)
self.assertEqual(sle_value, gl_value, f"GL Entry not created for {item_code} correctly")
def test_item_account_for_backdated_purchase_receipt(self):
items = {
"Bottle Item A": {"is_stock_item": 1},
}
for item_name, item_data in items.items():
item = make_item(
item_name,
properties=item_data,
)
account = self.add_inventory_account(item)
items[item_name]["account"] = account
make_purchase_receipt(
item_code="Bottle Item A",
qty=5,
rate=100,
warehouse=self.default_warehouse,
company=self.company,
)
dn = create_delivery_note(
item_code="Bottle Item A",
qty=5,
rate=200,
warehouse=self.default_warehouse,
company=self.company,
cost_center=frappe.db.get_value("Company", self.company, "cost_center"),
expense_account=frappe.db.get_value("Company", self.company, "default_expense_account"),
)
for row in items:
item_code = row
account = items[item_code]["account"]
sle_value = frappe.db.get_value(
"Stock Ledger Entry",
{"voucher_type": "Delivery Note", "voucher_no": dn.name, "item_code": item_code},
"stock_value_difference",
)
gl_value = (
frappe.db.get_value(
"GL Entry",
{
"voucher_type": "Delivery Note",
"voucher_no": dn.name,
"account": account,
},
"credit",
)
* -1
)
self.assertEqual(sle_value, gl_value, f"GL Entry not created for {item_code} correctly")
make_purchase_receipt(
item_code="Bottle Item A",
posting_date=add_days(today(), -1),
qty=5,
rate=200,
warehouse=self.default_warehouse,
company=self.company,
)
for row in items:
item_code = row
account = items[item_code]["account"]
sle_value = frappe.db.get_value(
"Stock Ledger Entry",
{"voucher_type": "Delivery Note", "voucher_no": dn.name, "item_code": item_code},
"stock_value_difference",
)
gl_value = (
frappe.db.get_value(
"GL Entry",
{
"voucher_type": "Delivery Note",
"voucher_no": dn.name,
"account": account,
},
"credit",
)
* -1
)
self.assertEqual(sle_value, gl_value, f"GL Entry not created for {item_code} correctly")
self.assertEqual(sle_value, 1000.0 * -1, f"GL Entry not created for {item_code} correctly")
def test_item_group_account_for_purchase_receipt_entry(self):
items = {
"Stock Item C": {"is_stock_item": 1, "item_group": "Test Item Group C"},
"Stock Item C1": {"is_stock_item": 1, "item_group": "Test Item Group C", "qty": 3, "rate": 150},
"Stock Item D": {
"is_stock_item": 1,
"has_serial_no": 1,
"serial_no_series": "SER-TT-.####",
"item_group": "Test Item Group D",
"qty": 2,
"rate": 250,
},
"Stock Item D1": {"is_stock_item": 1, "item_group": "Test Item Group D", "qty": 4, "rate": 300},
}
for row in items:
self.make_item_group(items[row]["item_group"])
inventory_account_dict = frappe._dict()
for item_name, item_data in items.items():
item_data = frappe._dict(item_data)
make_item(
item_name,
properties=item_data,
)
item_group = frappe.get_doc("Item Group", item_data.item_group)
account = self.add_inventory_account(item_group, "item_group_defaults")
inventory_account_dict[item_data.item_group] = account
pr = make_purchase_receipt(
item_code="Stock Item C",
qty=5,
rate=100,
warehouse=self.default_warehouse,
company=self.company,
do_not_submit=True,
)
for item_code, values in items.items():
if item_code == "Stock Item C":
continue
pr.append(
"items",
{
"item_code": item_code,
"qty": values.get("qty", 1),
"rate": values.get("rate", 200),
"warehouse": self.default_warehouse,
},
)
pr.submit()
for item_group, account in inventory_account_dict.items():
items = frappe.get_all(
"Item",
filters={"item_group": item_group},
pluck="name",
)
sle_value = frappe.get_all(
"Stock Ledger Entry",
filters={
"voucher_type": "Purchase Receipt",
"voucher_no": pr.name,
"item_code": ("in", items),
},
fields=[{"SUM": "stock_value_difference", "as": "value"}],
)
gl_value = frappe.db.get_value(
"GL Entry",
{
"voucher_type": "Purchase Receipt",
"voucher_no": pr.name,
"account": account,
},
"debit",
)
self.assertEqual(sle_value[0].value, gl_value, f"GL Entry not created for {item_code} correctly")
def test_item_group_account_for_delivery_note_entry(self):
items = {
"Stock Item E": {"is_stock_item": 1, "item_group": "Test Item Group E"},
"Stock Item E1": {"is_stock_item": 1, "item_group": "Test Item Group E", "qty": 3, "rate": 150},
"Stock Item F": {
"is_stock_item": 1,
"has_serial_no": 1,
"serial_no_series": "SER-TT-.####",
"item_group": "Test Item Group F",
"qty": 2,
"rate": 250,
},
"Stock Item F1": {"is_stock_item": 1, "item_group": "Test Item Group F", "qty": 4, "rate": 300},
}
for row in items:
self.make_item_group(items[row]["item_group"])
inventory_account_dict = frappe._dict()
for item_name, item_data in items.items():
item_data = frappe._dict(item_data)
make_item(
item_name,
properties=item_data,
)
item_group = frappe.get_doc("Item Group", item_data.item_group)
account = self.add_inventory_account(item_group, "item_group_defaults")
inventory_account_dict[item_data.item_group] = account
pr = make_purchase_receipt(
item_code="Stock Item E",
qty=5,
rate=100,
warehouse=self.default_warehouse,
company=self.company,
do_not_submit=True,
)
for item_code, values in items.items():
if item_code == "Stock Item E":
continue
pr.append(
"items",
{
"item_code": item_code,
"qty": values.get("qty", 1),
"rate": values.get("rate", 200),
"warehouse": self.default_warehouse,
},
)
pr.submit()
dn = create_delivery_note(
item_code="Stock Item E",
qty=5,
rate=200,
warehouse=self.default_warehouse,
company=self.company,
cost_center=frappe.db.get_value("Company", self.company, "cost_center"),
expense_account=frappe.db.get_value("Company", self.company, "default_expense_account"),
do_not_submit=True,
)
for item_code, values in items.items():
if item_code == "Stock Item E":
continue
dn.append(
"items",
{
"item_code": item_code,
"qty": values.get("qty", 1),
"rate": values.get("rate", 200),
"warehouse": self.default_warehouse,
},
)
dn.submit()
for item_group, account in inventory_account_dict.items():
items = frappe.get_all(
"Item",
filters={"item_group": item_group},
pluck="name",
)
sle_value = frappe.get_all(
"Stock Ledger Entry",
filters={"voucher_type": "Delivery Note", "voucher_no": dn.name, "item_code": ("in", items)},
fields=[{"SUM": "stock_value_difference", "as": "value"}],
)
gl_value = (
frappe.db.get_value(
"GL Entry",
{
"voucher_type": "Delivery Note",
"voucher_no": dn.name,
"account": account,
},
"credit",
)
* -1
)
self.assertEqual(sle_value[0].value, gl_value, f"GL Entry not created for {item_code} correctly")
def make_item_group(self, item_name):
if not frappe.db.exists("Item Group", item_name):
item_group = frappe.get_doc(
{
"doctype": "Item Group",
"item_group_name": item_name,
"is_group": 0,
}
)
item_group.insert()
return item_group
return frappe.get_doc("Item Group", item_name)
def add_inventory_account(self, item, table_name=None):
if not table_name:
table_name = "item_defaults"
account = item.name + " - " + self.company_abbr
if not frappe.db.exists("Account", account):
account_doc = frappe.get_doc(
{
"doctype": "Account",
"account_name": item.name,
"account_type": "Stock",
"company": self.company,
"is_group": 0,
"parent_account": "Stock Assets - " + self.company_abbr,
}
)
account_doc.insert()
if not frappe.db.get_value("Item Default", {"parent": item.name, "company": self.company}):
item.append(
table_name,
{
"company": self.company,
"default_inventory_account": account,
"default_warehouse": self.default_warehouse,
},
)
item.save()
return account
def test_item_account_for_manufacture_entry(self):
items = {
"Stock Item A1": {"is_stock_item": 1},
"Stock Item B1": {"is_stock_item": 1, "has_serial_no": 1, "serial_no_series": "SER-TT-.####"},
}
for item_name, item_data in items.items():
item = make_item(
item_name,
properties=item_data,
)
account = self.add_inventory_account(item)
items[item_name]["account"] = account
make_purchase_receipt(
item_code="Stock Item B1",
qty=5,
rate=100,
warehouse=self.default_warehouse,
company=self.company,
)
bom = make_bom(
item="Stock Item A1",
company=self.company,
source_warehouse=self.default_warehouse,
raw_materials=["Stock Item B1"],
)
wip_warehouse = frappe.db.get_value(
"Warehouse",
{"company": self.company, "is_group": 0, "warehouse_name": ("like", "%Work In Progress%")},
)
fg_warehouse = frappe.db.get_value(
"Warehouse",
{"company": self.company, "is_group": 0, "warehouse_name": ("like", "%Finished Goods%")},
)
wo_order = make_wo_order_test_record(
item="Stock Item A1",
qty=5,
company=self.company,
source_warehouse=self.default_warehouse,
bom=bom.name,
wip_warehouse=wip_warehouse,
fg_warehouse=fg_warehouse,
)
stock_entry = frappe.get_doc(make_stock_entry(wo_order.name, "Material Transfer for Manufacture", 5))
stock_entry.submit()
stock_entry = frappe.get_doc(make_stock_entry(wo_order.name, "Manufacture", 5))
stock_entry.submit()
for row in stock_entry.items:
item_code = row.item_code
account = items[item_code]["account"]
sle_value = frappe.db.get_value(
"Stock Ledger Entry",
{"voucher_type": "Stock Entry", "voucher_no": stock_entry.name, "item_code": item_code},
"stock_value_difference",
)
field = "debit" if row.t_warehouse == fg_warehouse else "credit"
gl_value = frappe.db.get_value(
"GL Entry",
{
"voucher_type": "Stock Entry",
"voucher_no": stock_entry.name,
"account": account,
},
field,
)
if row.s_warehouse:
gl_value = gl_value * -1
self.assertEqual(sle_value, gl_value, f"GL Entry not created for {item_code} correctly")
def make_company():
company = "_Test Company for Item Wise Inventory Account"
if frappe.db.exists("Company", company):
return company
company = frappe.get_doc(
{
"doctype": "Company",
"company_name": "_Test Company for Item Wise Inventory Account",
"abbr": "_TCIWIA",
"default_currency": "INR",
"country": "India",
"enable_perpetual_inventory": 1,
"enable_item_wise_inventory_account": 1,
}
).insert()
return company.name
| {
"repo_id": "frappe/erpnext",
"file_path": "erpnext/controllers/tests/test_item_wise_inventory_account.py",
"license": "GNU General Public License v3.0",
"lines": 504,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
frappe/erpnext:erpnext/patches/v15_0/set_asset_status_if_not_already_set.py | import frappe
from frappe.query_builder import DocType
def execute():
Asset = DocType("Asset")
query = (
frappe.qb.update(Asset)
.set(Asset.status, "Draft")
.where((Asset.docstatus == 0) & ((Asset.status.isnull()) | (Asset.status == "")))
)
query.run()
| {
"repo_id": "frappe/erpnext",
"file_path": "erpnext/patches/v15_0/set_asset_status_if_not_already_set.py",
"license": "GNU General Public License v3.0",
"lines": 10,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
frappe/erpnext:erpnext/controllers/subcontracting_inward_controller.py | import frappe
from frappe import _, bold
from frappe.query_builder import Case
from frappe.utils import flt, get_link_to_form
from erpnext.stock.serial_batch_bundle import get_serial_batch_list_from_item
class SubcontractingInwardController:
def validate_subcontracting_inward(self):
self.validate_inward_order()
self.set_allow_zero_valuation_rate()
self.validate_warehouse_()
self.validate_serial_batch_for_return_or_delivery()
self.validate_delivery()
self.update_customer_provided_item_cost()
def on_submit_subcontracting_inward(self):
self.update_inward_order_item()
self.update_inward_order_received_items()
self.update_inward_order_scrap_items()
self.create_stock_reservation_entries_for_inward()
self.update_inward_order_status()
def on_cancel_subcontracting_inward(self):
self.update_inward_order_item()
self.validate_manufacture_entry_cancel()
self.validate_delivery()
self.validate_receive_from_customer_cancel()
self.update_inward_order_received_items()
self.update_inward_order_scrap_items()
self.remove_reference_for_additional_items()
self.update_inward_order_status()
def validate_purpose(self):
if self.subcontracting_inward_order and self.purpose not in [
"Receive from Customer",
"Return Raw Material to Customer",
"Manufacture",
"Subcontracting Delivery",
"Subcontracting Return",
"Material Transfer for Manufacture",
]:
self.subcontracting_inward_order = None
def validate_inward_order(self):
if self.subcontracting_inward_order:
match self.purpose:
case "Receive from Customer":
self.validate_material_receipt()
case purpose if purpose in ["Return Raw Material to Customer", "Subcontracting Return"]:
self.validate_returns()
case "Material Transfer for Manufacture":
self.validate_material_transfer()
case "Manufacture":
self.validate_manufacture()
def validate_material_receipt(self):
rm_item_fg_combo = []
for item in self.items:
if not frappe.get_cached_value("Item", item.item_code, "is_customer_provided_item"):
frappe.throw(
_("Row #{0}: Item {1} is not a Customer Provided Item.").format(
item.idx,
get_link_to_form("Item", item.item_code),
)
)
if (
item.scio_detail
and frappe.get_cached_value(
"Subcontracting Inward Order Received Item", item.scio_detail, "rm_item_code"
)
!= item.item_code
):
frappe.throw(
_(
"Row #{0}: Item {1} mismatch. Changing of item code is not permitted, add another row instead."
).format(item.idx, get_link_to_form("Item", item.item_code))
)
if not item.scio_detail: # item is additional
if item.against_fg:
if (item.item_code, item.against_fg) not in rm_item_fg_combo:
rm_item_fg_combo.append((item.item_code, item.against_fg))
else:
frappe.throw(
_(
"Row #{0}: Customer Provided Item {1} against Subcontracting Inward Order Item {2} ({3}) cannot be added multiple times."
).format(
item.idx,
get_link_to_form("Item", item.item_code),
bold(item.against_fg),
get_link_to_form(
"Item",
frappe.get_cached_value(
"Subcontracting Inward Order Item", item.against_fg, "item_code"
),
),
)
)
else:
frappe.throw(
_(
"Row #{0}: Please select the Finished Good Item against which this Customer Provided Item will be used."
).format(item.idx)
)
def validate_returns(self):
for item in self.items:
if not item.scio_detail:
frappe.throw(
_("Row #{0}: Item {1} is not a part of Subcontracting Inward Order {2}").format(
item.idx,
get_link_to_form("Item", item.item_code),
get_link_to_form("Subcontracting Inward Order", self.subcontracting_inward_order),
)
)
elif item.item_code != (
frappe.get_cached_value(
"Subcontracting Inward Order Received Item", item.scio_detail, "rm_item_code"
)
or frappe.get_cached_value("Subcontracting Inward Order Item", item.scio_detail, "item_code")
):
frappe.throw(
_("Row #{0}: Item {1} mismatch. Changing of item code is not permitted.").format(
item.idx, get_link_to_form("Item", item.item_code)
)
)
if self.purpose == "Return Raw Material to Customer":
data = frappe.get_value(
"Subcontracting Inward Order Received Item",
item.scio_detail,
["received_qty", "returned_qty", "work_order_qty"],
as_dict=True,
)
if data.returned_qty + item.transfer_qty > data.received_qty - data.work_order_qty:
frappe.throw(
_(
"Row #{0}: Returned quantity cannot be greater than available quantity for Item {1}"
).format(item.idx, get_link_to_form("Item", item.item_code))
)
else:
data = frappe.get_value(
"Subcontracting Inward Order Item",
item.scio_detail,
["returned_qty", "delivered_qty"],
as_dict=True,
)
if item.transfer_qty > data.delivered_qty - data.returned_qty:
frappe.throw(
_(
"Row #{0}: Returned quantity cannot be greater than available quantity to return for Item {1}"
).format(item.idx, get_link_to_form("Item", item.item_code))
)
def validate_material_transfer(self):
customer_warehouse = frappe.get_cached_value(
"Subcontracting Inward Order", self.subcontracting_inward_order, "customer_warehouse"
)
item_codes = []
for item in self.items:
if not frappe.get_cached_value("Item", item.item_code, "is_customer_provided_item"):
continue
elif item.s_warehouse != customer_warehouse:
frappe.throw(
_("Row #{0}: For Customer Provided Item {1}, Source Warehouse must be {2}").format(
item.idx,
get_link_to_form("Item", item.item_code),
get_link_to_form("Warehouse", customer_warehouse),
)
)
elif item.item_code in item_codes:
frappe.throw(
_(
"Row #{0}: Customer Provided Item {1} cannot be added multiple times in the Subcontracting Inward process."
).format(
item.idx,
get_link_to_form("Item", item.item_code),
)
)
else:
work_order_items = frappe.get_all(
"Work Order Item",
{"parent": self.work_order, "docstatus": 1, "is_customer_provided_item": 1},
["item_code", "transferred_qty", "required_qty", "stock_reserved_qty"],
)
wo_item_dict = frappe._dict(
{
wo_item.item_code: frappe._dict(
{
"transferred_qty": wo_item.transferred_qty,
"required_qty": wo_item.required_qty,
"stock_reserved_qty": wo_item.stock_reserved_qty,
}
)
for wo_item in work_order_items
}
)
if wo_item := wo_item_dict.get(item.item_code):
if wo_item.transferred_qty + item.transfer_qty > max(
wo_item.required_qty, wo_item.stock_reserved_qty
):
frappe.throw(
_(
"Row #{0}: Overconsumption of Customer Provided Item {1} against Work Order {2} is not allowed in the Subcontracting Inward process."
).format(
item.idx,
get_link_to_form("Item", item.item_code),
get_link_to_form("Work Order", self.work_order),
)
)
else:
item_codes.append(item.item_code)
else:
frappe.throw(
_("Row #{0}: Customer Provided Item {1} is not a part of Work Order {2}").format(
item.idx,
get_link_to_form("Item", item.item_code),
get_link_to_form("Work Order", self.work_order),
)
)
def validate_manufacture(self):
if next(item for item in self.items if item.is_finished_item).t_warehouse != (
fg_warehouse := frappe.get_cached_value("Work Order", self.work_order, "fg_warehouse")
):
frappe.throw(
_(
"Target Warehouse for Finished Good must be same as Finished Good Warehouse {1} in Work Order {2} linked to the Subcontracting Inward Order."
).format(
get_link_to_form("Warehouse", fg_warehouse),
get_link_to_form("Work Order", self.work_order),
)
)
items = [
item
for item in self.get("items")
if not item.is_finished_item
and not item.is_scrap_item
and frappe.get_cached_value("Item", item.item_code, "is_customer_provided_item")
]
customer_warehouse = frappe.get_cached_value(
"Subcontracting Inward Order", self.subcontracting_inward_order, "customer_warehouse"
)
if frappe.get_cached_value("Work Order", self.work_order, "skip_transfer"):
table = frappe.qb.DocType("Subcontracting Inward Order Received Item")
query = (
frappe.qb.from_(table)
.select(
table.rm_item_code,
(table.received_qty - table.returned_qty).as_("total_qty"),
table.consumed_qty,
table.name,
)
.where(
(table.docstatus == 1)
& (table.parent == self.subcontracting_inward_order)
& (
table.reference_name
== frappe.get_cached_value(
"Work Order", self.work_order, "subcontracting_inward_order_item"
)
)
& (table.rm_item_code.isin([item.item_code for item in items]))
)
)
rm_item_dict = frappe._dict(
{
d.rm_item_code: frappe._dict(
{"name": d.name, "total_qty": d.total_qty, "qty": d.consumed_qty}
)
for d in query.run(as_dict=True)
}
)
item_codes = []
for item in items:
if rm := rm_item_dict.get(item.item_code):
if rm.qty + item.transfer_qty > rm.total_qty:
frappe.throw(
_(
"Row #{0}: Customer Provided Item {1} exceeds quantity available through Subcontracting Inward Order"
).format(item.idx, get_link_to_form("Item", item.item_code), item.transfer_qty)
)
elif item.s_warehouse != customer_warehouse:
frappe.throw(
_(
"Row #{0}: For Customer Provided Item {1}, Source Warehouse must be {2}"
).format(
item.idx,
get_link_to_form("Item", item.item_code),
get_link_to_form("Warehouse", customer_warehouse),
)
)
elif item.item_code in item_codes:
frappe.throw(
_(
"Row #{0}: Customer Provided Item {1} cannot be added multiple times in the Subcontracting Inward process."
).format(
item.idx,
get_link_to_form("Item", item.item_code),
)
)
else:
item_codes.append(item.item_code)
else:
frappe.throw(
_(
"Row #{0}: Customer Provided Item {1} is not a part of Subcontracting Inward Order {2}"
).format(
item.idx,
get_link_to_form("Item", item.item_code),
get_link_to_form("Subcontracting Inward Order", self.subcontracting_inward_order),
)
)
else:
work_order_items = frappe.get_all(
"Work Order Item",
{"parent": self.work_order, "docstatus": 1, "is_customer_provided_item": 1},
["item_code", "transferred_qty", "consumed_qty"],
)
wo_item_dict = frappe._dict(
{
wo_item.item_code: frappe._dict(
{"transferred_qty": wo_item.transferred_qty, "consumed_qty": wo_item.consumed_qty}
)
for wo_item in work_order_items
}
)
item_codes = []
for item in items:
if wo_item := wo_item_dict.get(item.item_code):
if wo_item.consumed_qty + item.transfer_qty > wo_item.transferred_qty:
frappe.throw(
_(
"Row #{0}: Overconsumption of Customer Provided Item {1} against Work Order {2} is not allowed in the Subcontracting Inward process."
).format(
item.idx,
get_link_to_form("Item", item.item_code),
get_link_to_form("Work Order", self.work_order),
)
)
elif item.item_code in item_codes:
frappe.throw(
_(
"Row #{0}: Customer Provided Item {1} cannot be added multiple times in the Subcontracting Inward process."
).format(
item.idx,
get_link_to_form("Item", item.item_code),
)
)
else:
item_codes.append(item.item_code)
else:
frappe.throw(
_("Row #{0}: Customer Provided Item {1} is not a part of Work Order {2}").format(
item.idx,
get_link_to_form("Item", item.item_code),
get_link_to_form("Work Order", self.work_order),
)
)
def set_allow_zero_valuation_rate(self):
if self.subcontracting_inward_order:
if self.purpose in ["Subcontracting Delivery", "Subcontracting Return", "Manufacture"]:
for item in self.items:
if (item.is_finished_item or item.is_scrap_item) and item.valuation_rate == 0:
item.allow_zero_valuation_rate = 1
def validate_warehouse_(self):
if self.subcontracting_inward_order and self.purpose in [
"Receive from Customer",
"Return Raw Material to Customer",
"Material Transfer for Manufacture",
]:
customer_warehouse = frappe.get_cached_value(
"Subcontracting Inward Order", self.subcontracting_inward_order, "customer_warehouse"
)
for item in self.items:
if self.purpose == "Material Transfer for Manufacture" and not frappe.get_cached_value(
"Item", item.item_code, "is_customer_provided_item"
):
continue
if (item.s_warehouse or item.t_warehouse) != customer_warehouse:
if item.t_warehouse:
frappe.throw(
_(
"Row #{0}: Target Warehouse must be same as Customer Warehouse {1} from the linked Subcontracting Inward Order"
).format(item.idx, get_link_to_form("Warehouse", customer_warehouse))
)
else:
frappe.throw(
_(
"Row #{0}: Source Warehouse must be same as Customer Warehouse {1} from the linked Subcontracting Inward Order"
).format(item.idx, get_link_to_form("Warehouse", customer_warehouse))
)
def validate_serial_batch_for_return_or_delivery(self):
if self.subcontracting_inward_order and self.purpose in [
"Return Raw Material to Customer",
"Subcontracting Delivery",
"Subcontracting Return",
]:
for item in self.items:
serial_nos, batch_nos = self.get_serial_nos_and_batches_from_sres(
item.scio_detail, only_pending=self.purpose != "Subcontracting Return"
)
serial_list, batch_list = get_serial_batch_list_from_item(item)
if serial_list and (
incorrect_serial_nos := [sn for sn in serial_list if sn not in serial_nos]
):
frappe.throw(
_(
"Row #{0}: Serial No(s) {1} are not a part of the linked Subcontracting Inward Order. Please select valid Serial No(s)."
).format(
item.idx,
", ".join([get_link_to_form("Serial No", sn) for sn in incorrect_serial_nos]),
)
)
if batch_list and (
incorrect_batch_nos := [bn for bn in batch_list if bn not in list(batch_nos.keys())]
):
frappe.throw(
_(
"Row #{0}: Batch No(s) {1} is not a part of the linked Subcontracting Inward Order. Please select valid Batch No(s)."
).format(
item.idx,
", ".join([get_link_to_form("Batch No", bn) for bn in incorrect_batch_nos]),
)
)
def get_serial_nos_and_batches_from_sres(self, scio_detail, only_pending=True):
serial_nos, batch_nos = [], frappe._dict()
table = frappe.qb.DocType("Stock Reservation Entry")
child_table = frappe.qb.DocType("Serial and Batch Entry")
query = (
frappe.qb.from_(table)
.join(child_table)
.on(table.name == child_table.parent)
.select(child_table.serial_no, child_table.batch_no, child_table.qty)
.where((table.docstatus == 1) & (table.voucher_detail_no == scio_detail))
)
if only_pending:
query = query.where(child_table.qty != child_table.delivered_qty)
else:
query = query.where(child_table.delivered_qty > 0)
for d in query.run(as_dict=True):
if d.serial_no and d.serial_no not in serial_nos:
serial_nos.append(d.serial_no)
if d.batch_no and d.batch_no not in batch_nos:
batch_nos[d.batch_no] = d.qty
return serial_nos, batch_nos
def validate_delivery(self):
if self.purpose == "Subcontracting Delivery":
if self._action in ["save", "submit"]:
self.validate_delivery_on_save()
else:
for item in self.items:
if not item.is_scrap_item:
delivered_qty, returned_qty = frappe.get_value(
"Subcontracting Inward Order Item",
item.scio_detail,
["delivered_qty", "returned_qty"],
)
if returned_qty > delivered_qty:
frappe.throw(
_(
"Row #{0}: Cannot cancel this Stock Entry as returned quantity cannot be greater than delivered quantity for Item {1} in the linked Subcontracting Inward Order"
).format(item.idx, get_link_to_form("Item", item.item_code))
)
def validate_delivery_on_save(self):
allow_delivery_of_overproduced_qty = frappe.get_single_value(
"Selling Settings", "allow_delivery_of_overproduced_qty"
)
for item in self.items:
if not item.scio_detail:
frappe.throw(
_("Row #{0}: Item {1} is not a part of Subcontracting Inward Order {2}").format(
item.idx,
get_link_to_form("Item", item.item_code),
get_link_to_form("Subcontracting Inward Order", self.subcontracting_inward_order),
)
)
from pypika.terms import ValueWrapper
table = frappe.qb.DocType("Subcontracting Inward Order Item")
query = (
frappe.qb.from_(table)
.select(
(
Case()
.when(
(table.produced_qty < table.qty)
| ValueWrapper(allow_delivery_of_overproduced_qty),
table.produced_qty,
)
.else_(table.qty)
- table.delivered_qty
).as_("max_allowed_qty")
)
.where((table.name == item.scio_detail) & (table.docstatus == 1))
)
max_allowed_qty = query.run(pluck="max_allowed_qty")
if max_allowed_qty:
max_allowed_qty = max_allowed_qty[0]
else:
table = frappe.qb.DocType("Subcontracting Inward Order Scrap Item")
query = (
frappe.qb.from_(table)
.select((table.produced_qty - table.delivered_qty).as_("max_allowed_qty"))
.where((table.name == item.scio_detail) & (table.docstatus == 1))
)
max_allowed_qty = query.run(pluck="max_allowed_qty")[0]
if item.transfer_qty > max_allowed_qty:
frappe.throw(
_(
"Row #{0}: Quantity of Item {1} cannot be more than {2} {3} against Subcontracting Inward Order {4}"
).format(
item.idx,
get_link_to_form("Item", item.item_code),
bold(max_allowed_qty),
bold(
frappe.get_cached_value(
"Subcontracting Inward Order Item"
if not item.is_scrap_item
else "Subcontracting Inward Order Scrap Item",
item.scio_detail,
"stock_uom",
)
),
get_link_to_form("Subcontracting Inward Order", self.subcontracting_inward_order),
)
)
def update_customer_provided_item_cost(self):
if self.purpose == "Receive from Customer":
for item in self.items:
item.valuation_rate = 0
item.customer_provided_item_cost = flt(
item.basic_rate + (item.additional_cost / item.transfer_qty), item.precision("basic_rate")
)
def validate_receive_from_customer_cancel(self):
if self.purpose == "Receive from Customer":
for item in self.items:
scio_rm_item = frappe.get_value(
"Subcontracting Inward Order Received Item",
item.scio_detail,
["received_qty", "returned_qty", "work_order_qty"],
as_dict=True,
)
if (
scio_rm_item.received_qty - scio_rm_item.returned_qty - item.transfer_qty
) < scio_rm_item.work_order_qty:
frappe.throw(
_("Row #{0}: Work Order exists against full or partial quantity of Item {1}").format(
item.idx, get_link_to_form("Item", item.item_code)
)
)
def validate_manufacture_entry_cancel(self):
if self.subcontracting_inward_order and self.purpose == "Manufacture":
fg_item_name = frappe.get_cached_value(
"Work Order", self.work_order, "subcontracting_inward_order_item"
)
produced_qty, delivered_qty = frappe.get_value(
"Subcontracting Inward Order Item", fg_item_name, ["produced_qty", "delivered_qty"]
)
if produced_qty < delivered_qty:
frappe.throw(
_(
"Cannot cancel this Manufacturing Stock Entry as quantity of Finished Good produced cannot be less than quantity delivered in the linked Subcontracting Inward Order."
)
)
for item in [item for item in self.items if not item.is_finished_item]:
if item.is_scrap_item:
scio_scrap_item = frappe.get_value(
"Subcontracting Inward Order Scrap Item",
{
"docstatus": 1,
"item_code": item.item_code,
"warehouse": item.t_warehouse,
"reference_name": fg_item_name,
},
["produced_qty", "delivered_qty"],
as_dict=True,
)
if (
scio_scrap_item
and scio_scrap_item.delivered_qty > scio_scrap_item.produced_qty - item.transfer_qty
):
frappe.throw(
_(
"Row #{0}: Cannot cancel this Manufacturing Stock Entry as quantity of Scrap Item {1} produced cannot be less than quantity delivered."
).format(item.idx, get_link_to_form("Item", item.item_code))
)
else:
scio_rm_item = frappe.get_value(
"Subcontracting Inward Order Received Item",
{
"docstatus": 1,
"rm_item_code": item.item_code,
"warehouse": item.s_warehouse,
"is_customer_provided_item": 0,
"is_additional_item": 1,
},
["consumed_qty", "billed_qty", "returned_qty"],
as_dict=True,
)
if scio_rm_item and (scio_rm_item.billed_qty - scio_rm_item.returned_qty) > (
scio_rm_item.consumed_qty - item.transfer_qty
):
frappe.throw(
_(
"Row #{0}: Cannot cancel this Manufacturing Stock Entry as billed quantity of Item {1} cannot be greater than consumed quantity."
).format(item.idx, get_link_to_form("Item", item.item_code))
)
def update_inward_order_item(self):
if self.purpose == "Manufacture" and (
scio_item_name := frappe.get_cached_value(
"Work Order", self.work_order, "subcontracting_inward_order_item"
)
):
if scio_item_name:
frappe.get_doc(
"Subcontracting Inward Order Item", scio_item_name
).update_manufacturing_qty_fields()
elif self.purpose in ["Subcontracting Delivery", "Subcontracting Return"]:
fieldname = "delivered_qty" if self.purpose == "Subcontracting Delivery" else "returned_qty"
for item in self.items:
doctype = (
"Subcontracting Inward Order Item"
if not item.is_scrap_item
else "Subcontracting Inward Order Scrap Item"
)
frappe.db.set_value(
doctype,
item.scio_detail,
fieldname,
frappe.get_value(doctype, item.scio_detail, fieldname)
+ (item.transfer_qty if self._action == "submit" else -item.transfer_qty),
)
def update_inward_order_received_items(self):
if self.subcontracting_inward_order:
match self.purpose:
case "Receive from Customer":
self.update_inward_order_received_items_for_raw_materials_receipt()
case "Manufacture":
self.update_inward_order_received_items_for_manufacture()
case "Return Raw Material to Customer":
scio_rm_names = {
item.scio_detail: item.transfer_qty
if self._action == "submit"
else -item.transfer_qty
for item in self.items
}
case_expr = Case()
table = frappe.qb.DocType("Subcontracting Inward Order Received Item")
for scio_rm_name, qty in scio_rm_names.items():
case_expr = case_expr.when(table.name == scio_rm_name, table.returned_qty + qty)
frappe.qb.update(table).set(table.returned_qty, case_expr).where(
(table.name.isin(list(scio_rm_names.keys()))) & (table.docstatus == 1)
).run()
def update_inward_order_received_items_for_raw_materials_receipt(self):
data = frappe._dict()
for item in self.items:
if item.scio_detail:
data[item.scio_detail] = frappe._dict(
{"transfer_qty": item.transfer_qty, "rate": item.customer_provided_item_cost}
)
else:
scio_rm = frappe.new_doc(
"Subcontracting Inward Order Received Item",
parent=self.subcontracting_inward_order,
parenttype="Subcontracting Inward Order",
parentfield="received_items",
idx=frappe.db.count(
"Subcontracting Inward Order Received Item",
{"parent": self.subcontracting_inward_order},
)
+ 1,
rm_item_code=item.item_code,
stock_uom=item.stock_uom,
warehouse=item.t_warehouse,
received_qty=item.transfer_qty,
consumed_qty=0,
work_order_qty=0,
returned_qty=0,
rate=item.customer_provided_item_cost,
is_customer_provided_item=True,
is_additional_item=True,
reference_name=item.against_fg,
main_item_code=frappe.get_cached_value(
"Subcontracting Inward Order Item", item.against_fg, "item_code"
),
)
scio_rm.insert()
scio_rm.submit()
item.db_set("scio_detail", scio_rm.name)
if data:
precision = self.precision("customer_provided_item_cost", "items")
result = frappe.get_all(
"Subcontracting Inward Order Received Item",
filters={
"parent": self.subcontracting_inward_order,
"name": ["in", list(data.keys())],
"docstatus": 1,
},
fields=["rate", "name", "required_qty", "received_qty"],
)
deleted_docs = []
table = frappe.qb.DocType("Subcontracting Inward Order Received Item")
case_expr_qty, case_expr_rate = Case(), Case()
for d in result:
current_qty = flt(data[d.name].transfer_qty) * (1 if self._action == "submit" else -1)
current_rate = flt(data[d.name].rate)
# Calculate weighted average rate
old_total = d.rate * d.received_qty
current_total = current_rate * current_qty
d.received_qty = d.received_qty + current_qty
d.rate = (
flt((old_total + current_total) / d.received_qty, precision) if d.received_qty else 0.0
)
if not d.required_qty and not d.received_qty:
deleted_docs.append(d.name)
frappe.delete_doc("Subcontracting Inward Order Received Item", d.name)
else:
case_expr_qty = case_expr_qty.when(table.name == d.name, d.received_qty)
case_expr_rate = case_expr_rate.when(table.name == d.name, d.rate)
if final_list := list(set(data.keys()) - set(deleted_docs)):
frappe.qb.update(table).set(table.received_qty, case_expr_qty).set(
table.rate, case_expr_rate
).where((table.name.isin(final_list)) & (table.docstatus == 1)).run()
def update_inward_order_received_items_for_manufacture(self):
customer_warehouse = frappe.get_cached_value(
"Subcontracting Inward Order", self.subcontracting_inward_order, "customer_warehouse"
)
items = [item for item in self.items if not item.is_finished_item and not item.is_scrap_item]
item_code_wh = frappe._dict(
{
(
item.item_code,
customer_warehouse
if frappe.get_cached_value("Item", item.item_code, "is_customer_provided_item")
else item.s_warehouse,
): item.transfer_qty if self._action == "submit" else -item.transfer_qty
for item in items
}
)
item_codes, warehouses = zip(*list(item_code_wh.keys()), strict=True)
table = frappe.qb.DocType("Subcontracting Inward Order Received Item")
data = (
frappe.qb.from_(table)
.select(
table.name,
table.rm_item_code,
table.is_customer_provided_item,
table.consumed_qty,
table.warehouse,
table.is_additional_item,
)
.where(
(table.docstatus == 1)
& (table.rm_item_code.isin(list(set(item_codes))))
& (
(table.warehouse.isin(list(set(warehouses)))) | (table.warehouse.isnull())
) # warehouse will always be null for non additional self procured raw materials
& (table.parent == self.subcontracting_inward_order)
& (
table.reference_name
== frappe.get_cached_value(
"Work Order", self.work_order, "subcontracting_inward_order_item"
)
)
)
)
if data := data.run(as_dict=True):
deleted_docs, used_item_wh = [], []
case_expr = Case()
for d in data:
if not d.warehouse:
d.warehouse = next(
key[1]
for key in item_code_wh.keys()
if key[0] == d.rm_item_code and key not in used_item_wh
)
used_item_wh.append((d.rm_item_code, d.warehouse))
qty = d.consumed_qty + item_code_wh[(d.rm_item_code, d.warehouse)]
if qty or d.is_customer_provided_item or not d.is_additional_item:
case_expr = case_expr.when((table.name == d.name), qty)
else:
deleted_docs.append(d.name)
frappe.delete_doc("Subcontracting Inward Order Received Item", d.name)
if final_list := list(set([d.name for d in data]) - set(deleted_docs)):
frappe.qb.update(table).set(table.consumed_qty, case_expr).where(
(table.name.isin(final_list)) & (table.docstatus == 1)
).run()
main_item_code = next(fg for fg in self.items if fg.is_finished_item).item_code
for extra_item in [
item
for item in items
if not frappe.get_cached_value("Item", item.item_code, "is_customer_provided_item")
and (item.item_code, item.s_warehouse)
not in [(d.rm_item_code, d.warehouse) for d in data if not d.is_customer_provided_item]
]:
doc = frappe.new_doc(
"Subcontracting Inward Order Received Item",
parent=self.subcontracting_inward_order,
parenttype="Subcontracting Inward Order",
parentfield="received_items",
idx=frappe.db.count(
"Subcontracting Inward Order Received Item",
{"parent": self.subcontracting_inward_order},
)
+ 1,
main_item_code=main_item_code,
rm_item_code=extra_item.item_code,
stock_uom=extra_item.stock_uom,
reference_name=frappe.get_cached_value(
"Work Order", self.work_order, "subcontracting_inward_order_item"
),
required_qty=0,
consumed_qty=extra_item.transfer_qty,
warehouse=extra_item.s_warehouse,
is_additional_item=True,
)
doc.insert()
doc.submit()
def update_inward_order_scrap_items(self):
if (scio := self.subcontracting_inward_order) and self.purpose == "Manufacture":
scrap_items_list = [item for item in self.items if item.is_scrap_item]
scrap_items = frappe._dict(
{
(item.item_code, item.t_warehouse): item.transfer_qty
if self._action == "submit"
else -item.transfer_qty
for item in scrap_items_list
}
)
if scrap_items:
item_codes, warehouses = zip(*list(scrap_items.keys()), strict=True)
item_codes = list(item_codes)
warehouses = list(warehouses)
result = frappe.get_all(
"Subcontracting Inward Order Scrap Item",
filters={
"item_code": ["in", item_codes],
"warehouse": ["in", warehouses],
"reference_name": frappe.get_cached_value(
"Work Order", self.work_order, "subcontracting_inward_order_item"
),
"docstatus": 1,
},
fields=["name", "item_code", "warehouse", "produced_qty"],
)
if result:
scrap_item_dict = frappe._dict(
{
(d.item_code, d.warehouse): frappe._dict(
{"name": d.name, "produced_qty": d.produced_qty}
)
for d in result
}
)
deleted_docs = []
case_expr = Case()
table = frappe.qb.DocType("Subcontracting Inward Order Scrap Item")
for key, value in scrap_item_dict.items():
if self._action == "cancel" and value.produced_qty - abs(scrap_items.get(key)) == 0:
deleted_docs.append(value.name)
frappe.delete_doc("Subcontracting Inward Order Scrap Item", value.name)
else:
case_expr = case_expr.when(
table.name == value.name, value.produced_qty + scrap_items.get(key)
)
if final_list := list(
set([v.name for v in scrap_item_dict.values()]) - set(deleted_docs)
):
frappe.qb.update(table).set(table.produced_qty, case_expr).where(
(table.name.isin(final_list)) & (table.docstatus == 1)
).run()
fg_item_code = next(fg for fg in self.items if fg.is_finished_item).item_code
for scrap_item in [
item
for item in scrap_items_list
if (item.item_code, item.t_warehouse) not in [(d.item_code, d.warehouse) for d in result]
]:
doc = frappe.new_doc(
"Subcontracting Inward Order Scrap Item",
parent=scio,
parenttype="Subcontracting Inward Order",
parentfield="scrap_items",
idx=frappe.db.count("Subcontracting Inward Order Scrap Item", {"parent": scio}) + 1,
item_code=scrap_item.item_code,
fg_item_code=fg_item_code,
stock_uom=scrap_item.stock_uom,
warehouse=scrap_item.t_warehouse,
produced_qty=scrap_item.transfer_qty,
delivered_qty=0,
reference_name=frappe.get_value(
"Work Order", self.work_order, "subcontracting_inward_order_item"
),
)
doc.insert()
doc.submit()
def cancel_stock_reservation_entries_for_inward(self):
if self.purpose == "Receive from Customer":
table = frappe.qb.DocType("Stock Reservation Entry")
query = (
frappe.qb.from_(table)
.select(table.name)
.where(
(table.docstatus == 1)
& (table.voucher_detail_no.isin([item.scio_detail for item in self.items]))
)
)
for sre in query.run(pluck="name"):
frappe.get_doc("Stock Reservation Entry", sre).cancel()
def remove_reference_for_additional_items(self):
if self.subcontracting_inward_order:
items = [
item
for item in self.items
if item.scio_detail
and (
not frappe.db.exists("Subcontracting Inward Order Received Item", item.scio_detail)
and not frappe.db.exists("Subcontracting Inward Order Item", item.scio_detail)
and not frappe.db.exists("Subcontracting Inward Order Scrap Item", item.scio_detail)
)
]
for item in items:
item.db_set("scio_detail", None)
def create_stock_reservation_entries_for_inward(self):
if self.purpose == "Receive from Customer":
for item in self.items:
item.reload()
sre = frappe.new_doc("Stock Reservation Entry")
sre.company = self.company
sre.voucher_type = "Subcontracting Inward Order"
sre.voucher_qty = sre.reserved_qty = sre.available_qty = item.transfer_qty
sre.voucher_no = self.subcontracting_inward_order
sre.voucher_detail_no = item.scio_detail
sre.item_code = item.item_code
sre.stock_uom = item.stock_uom
sre.warehouse = item.t_warehouse or item.s_warehouse
sre.has_serial_no = frappe.get_cached_value("Item", item.item_code, "has_serial_no")
sre.has_batch_no = frappe.get_cached_value("Item", item.item_code, "has_batch_no")
sre.reservation_based_on = "Qty" if not item.serial_and_batch_bundle else "Serial and Batch"
if item.serial_and_batch_bundle:
sabb = frappe.get_doc("Serial and Batch Bundle", item.serial_and_batch_bundle)
for entry in sabb.entries:
sre.append(
"sb_entries",
{
"serial_no": entry.serial_no,
"batch_no": entry.batch_no,
"qty": entry.qty,
"warehouse": entry.warehouse,
},
)
sre.submit()
frappe.msgprint(_("Stock Reservation Entries Created"), alert=True, indicator="green")
def adjust_stock_reservation_entries_for_return(self):
if self.purpose == "Return Raw Material to Customer":
for item in self.items:
serial_list, batch_list = get_serial_batch_list_from_item(item)
if serial_list or batch_list:
table = frappe.qb.DocType("Stock Reservation Entry")
child_table = frappe.qb.DocType("Serial and Batch Entry")
query = (
frappe.qb.from_(table)
.join(child_table)
.on(table.name == child_table.parent)
.select(
table.name.as_("sre_name"),
child_table.name.as_("sbe_name"),
child_table.batch_no,
child_table.qty,
)
.where((table.docstatus == 1) & (table.voucher_detail_no == item.scio_detail))
)
if serial_list:
query = query.where(child_table.serial_no.isin(serial_list))
if batch_list:
query = query.where(child_table.batch_no.isin(batch_list))
result = query.run(as_dict=True)
qty_to_deliver = {row.sre_name: 0 for row in result}
consumed_qty = {batch: 0 for batch in batch_list}
for row in result:
if serial_list:
frappe.get_doc("Serial and Batch Entry", row.sbe_name).db_set(
"delivered_qty", 1 if self._action == "submit" else 0
)
qty_to_deliver[row.sre_name] += row.qty
elif batch_list and not serial_list:
sabe_qty = abs(
frappe.get_value(
"Serial and Batch Entry",
{"parent": item.serial_and_batch_bundle, "batch_no": row.batch_no},
"qty",
)
)
qty = min(row.qty, sabe_qty)
sbe_doc = frappe.get_doc("Serial and Batch Entry", row.sbe_name)
sbe_doc.db_set(
"delivered_qty",
sbe_doc.delivered_qty + (qty if self._action == "submit" else -qty),
)
qty_to_deliver[row.sre_name] += qty
consumed_qty[row.batch_no] += qty
for sre_name, qty in qty_to_deliver.items():
sre_doc = frappe.get_doc("Stock Reservation Entry", sre_name)
sre_doc.db_set(
"delivered_qty",
sre_doc.delivered_qty + (qty if self._action == "submit" else -qty),
)
sre_doc.update_status()
sre_doc.update_reserved_stock_in_bin()
else:
table = frappe.qb.DocType("Stock Reservation Entry")
query = (
frappe.qb.from_(table)
.select(
table.name,
(table.reserved_qty - table.delivered_qty).as_("qty"),
)
.where(
(table.docstatus == 1)
& (table.voucher_detail_no == item.scio_detail)
& (table.delivered_qty < table.reserved_qty)
)
.orderby(table.creation)
)
sre_list = query.run(as_dict=True)
voucher_qty = item.transfer_qty
for sre in sre_list:
qty = min(sre.qty, voucher_qty)
sre_doc = frappe.get_doc("Stock Reservation Entry", sre.name)
sre_doc.db_set(
"delivered_qty",
sre_doc.delivered_qty + (qty if self._action == "submit" else -qty),
)
sre_doc.update_status()
sre_doc.update_reserved_stock_in_bin()
voucher_qty -= qty
if voucher_qty <= 0:
break
def update_inward_order_status(self):
if self.subcontracting_inward_order:
from erpnext.subcontracting.doctype.subcontracting_inward_order.subcontracting_inward_order import (
update_subcontracting_inward_order_status,
)
update_subcontracting_inward_order_status(self.subcontracting_inward_order)
@frappe.whitelist()
@frappe.validate_and_sanitize_search_inputs
def get_fg_reference_names(
doctype: str, txt: str, searchfield: str, start: int, page_len: int, filters: dict
):
return frappe.get_all(
"Subcontracting Inward Order Item",
limit_start=start,
limit_page_length=page_len,
filters={"parent": filters.get("parent"), "item_code": ("like", "%%%s%%" % txt), "docstatus": 1},
fields=["name", "item_code", "delivery_warehouse"],
as_list=True,
order_by="idx",
)
| {
"repo_id": "frappe/erpnext",
"file_path": "erpnext/controllers/subcontracting_inward_controller.py",
"license": "GNU General Public License v3.0",
"lines": 1046,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.