import { useEffect, useState } from "react"; import { Info, CaretDown, CaretUp } from "@phosphor-icons/react"; import paths from "@/utils/paths"; import System from "@/models/system"; import PreLoader from "@/components/Preloader"; import { LMSTUDIO_COMMON_URLS } from "@/utils/constants"; import useProviderEndpointAutoDiscovery from "@/hooks/useProviderEndpointAutoDiscovery"; export default function LMStudioOptions({ settings, showAlert = false }) { const { autoDetecting: loading, basePath, basePathValue, showAdvancedControls, setShowAdvancedControls, handleAutoDetectClick, } = useProviderEndpointAutoDiscovery({ provider: "lmstudio", initialBasePath: settings?.LMStudioBasePath, ENDPOINTS: LMSTUDIO_COMMON_URLS, }); const [maxTokens, setMaxTokens] = useState( settings?.LMStudioTokenLimit || 4096 ); const handleMaxTokensChange = (e) => { setMaxTokens(Number(e.target.value)); }; return (
{showAlert && (

LMStudio as your LLM requires you to set an embedding service to use.

Manage embedding →
)}
e.target.blur()} required={true} autoComplete="off" />

Maximum number of tokens for context and response.

); } function LMStudioModelSelection({ settings, basePath = null }) { const [customModels, setCustomModels] = useState([]); const [loading, setLoading] = useState(true); useEffect(() => { async function findCustomModels() { if (!basePath) { setCustomModels([]); setLoading(false); return; } setLoading(true); try { const { models } = await System.customModels( "lmstudio", null, basePath ); setCustomModels(models || []); } catch (error) { console.error("Failed to fetch custom models:", error); setCustomModels([]); } setLoading(false); } findCustomModels(); }, [basePath]); if (loading || customModels.length == 0) { return (

Select the LM Studio model you want to use. Models will load after entering a valid LM Studio URL.

); } return (

Choose the LM Studio model you want to use for your conversations.

); }