import { useState } from "react"; import Dropdown from "./Dropdown"; import NumberInput from "./NumberInput"; import { useAPI } from "../hooks/useAPI"; import { devLog, devError } from "../utils/devLogger"; const InferencePopup = ({ isOpen, onClose, models }) => { const [selectedModel, setSelectedModel] = useState(""); const [prompt, setPrompt] = useState(""); const [response, setResponse] = useState(""); const [isLoading, setIsLoading] = useState(false); const [error, setError] = useState(""); const [maxNewTokens, setMaxNewTokens] = useState(512); const [temperature, setTemperature] = useState(0.7); const { inference, checkTaskStatus } = useAPI(); const handleInference = async () => { if (!selectedModel || !prompt.trim()) { setError("Please select a model and enter a prompt"); return; } setIsLoading(true); setError(""); setResponse(""); try { const inferenceData = { model_name: selectedModel, prompt: prompt, max_new_tokens: maxNewTokens, temperature: temperature, }; devLog("Starting inference with data:", inferenceData); const result = await inference(inferenceData); devLog("Got inference result:", result); if (result && result.task_id) { checkTaskStatus( result.task_id, (taskResult) => { devLog("Inference task result:", taskResult); if (taskResult && taskResult.response) { setResponse(taskResult.response); } else if (taskResult && taskResult.error) { setError(`Inference failed: ${taskResult.error}`); } else { setError("No response received from the model"); } setIsLoading(false); }, (errorMessage) => { // Error callback for task status check devError("Inference task failed:", errorMessage); setError(`Task failed: ${errorMessage}`); setIsLoading(false); } ); } else if (result && result.error) { // Check if it's a server error const isServerError = result.error.includes("HTTP 5"); const errorPrefix = isServerError ? "🔴 Server Error: " : "Error: "; setError(`${errorPrefix}${result.error}`); setIsLoading(false); } else { setError("No task ID received"); setIsLoading(false); } } catch (err) { devError("Inference error:", err); setError(`Error: ${err.message}`); setIsLoading(false); } }; const handleClose = () => { setSelectedModel(""); setPrompt(""); setResponse(""); setError(""); setIsLoading(false); onClose(); }; if (!isOpen) return null; return (
{/* Header */}

Model Inference