velai / services /utils /fal_service.py
cansik's picture
Upload folder via script
3025bb3 verified
from __future__ import annotations
import os
from typing import Any, Mapping
from services.exceptions import GenerationError
try:
import fal_client # type: ignore[import-not-found]
except ModuleNotFoundError: # pragma: no cover
fal_client = None # type: ignore[assignment]
# Fal text uses the OpenRouter "any LLM" router endpoint
DEFAULT_FAL_TEXT_MODEL = os.getenv("FAL_TEXT_MODEL", "openrouter/router")
# Default image model: nano banana as main image backend
DEFAULT_FAL_IMAGE_MODEL = os.getenv("FAL_IMAGE_MODEL", "fal-ai/nano-banana")
def _ensure_fal_client(api_key: str | None = None) -> Any:
if fal_client is None:
raise GenerationError(
"The 'fal-client' package is required to use the fal.ai backends."
)
if api_key:
# fal_client uses the FAL_KEY environment variable for authentication
os.environ.setdefault("FAL_KEY", api_key)
return fal_client
def run_fal(
model: str,
arguments: Mapping[str, Any],
api_key: str | None = None,
) -> dict[str, Any]:
"""Run a fal.ai model synchronously and return the raw response."""
client = _ensure_fal_client(api_key)
# Check if FAL_KEY is set
if not os.getenv("FAL_KEY"):
raise GenerationError(
"FAL_KEY environment variable is not set. "
"Please set it in your .env file or environment. "
"Get your API key from https://fal.ai/dashboard"
)
try:
result = client.run(model, arguments=dict(arguments))
except Exception as exc: # pragma: no cover
# Include the original error message for better debugging
error_msg = str(exc) if str(exc) else type(exc).__name__
raise GenerationError(
f"fal.ai request to {model!r} failed: {error_msg}"
) from exc
if not isinstance(result, dict):
raise GenerationError(
f"fal.ai model {model!r} returned an unexpected response type "
f"{type(result).__name__}."
)
return result