NU-KIOSK-API / backend /__init__.py
Monish BV
Add kiosk-api: stripped backend for speech integration
c2b7a7b
"""
Core package for the CS Kiosk prototype.
The project follows a modular layout inspired by Satyrn:
- ``backend.data``: catalog registry, data sources, and utilities.
- ``backend.tools``: analytic blueprints, analysis engine, conversation runtime helpers.
- ``backend.providers``: LLM clients/adapters and shared responders.
"""
from .data import (
DataCatalog,
EntityRecords,
RelationshipDefinition,
load_default_catalog,
DataSource,
CSVSource,
FeedListSource,
default_sources,
canonicalize_name,
generate_name_variants,
tokenize_name,
)
from .tools import (
Blueprint,
BlueprintResult,
AnalysisContext,
Fact,
FacultyByTopicBlueprint,
LocationBlueprint,
CenterBlueprint,
AdvisorshipBlueprint,
StaffSupportBlueprint,
UpcomingEventsBlueprint,
OfficeHoursBlueprint,
PersonLookupBlueprint,
AnalysisEngine,
)
from .providers import (
BaseLLM,
ChatMessage,
LLMResponse,
ProviderConfig,
ToolCall,
EchoProvider,
OpenAIChat,
ClaudeProvider,
GeminiGenerative,
)
from .responders import Responder, LLMResponder
from . import mcp
__all__ = [
# Data layer
"DataCatalog",
"EntityRecords",
"RelationshipDefinition",
"load_default_catalog",
"DataSource",
"CSVSource",
"FeedListSource",
"default_sources",
"canonicalize_name",
"generate_name_variants",
"tokenize_name",
# Tools layer
"Blueprint",
"BlueprintResult",
"AnalysisContext",
"Fact",
"FacultyByTopicBlueprint",
"LocationBlueprint",
"CenterBlueprint",
"AdvisorshipBlueprint",
"StaffSupportBlueprint",
"UpcomingEventsBlueprint",
"OfficeHoursBlueprint",
"PersonLookupBlueprint",
"AnalysisEngine",
# LLM layer
"BaseLLM",
"ChatMessage",
"LLMResponse",
"ProviderConfig",
"ToolCall",
"EchoProvider",
"OpenAIChat",
"ClaudeProvider",
"GeminiGenerative",
"Responder",
"LLMResponder",
"mcp",
]