Spaces:
Paused
Paused
File size: 7,795 Bytes
178e38e | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 | import { useState, useMemo, useCallback } from "preact/hooks";
import { useT } from "@shared/i18n/context";
import { CopyButton } from "./CopyButton";
type Protocol = "openai" | "anthropic" | "gemini";
type CodeLang = "python" | "node" | "curl";
const protocols: { id: Protocol; label: string }[] = [
{ id: "openai", label: "OpenAI" },
{ id: "anthropic", label: "Anthropic" },
{ id: "gemini", label: "Gemini" },
];
const langs: { id: CodeLang; label: string }[] = [
{ id: "python", label: "Python" },
{ id: "node", label: "Node.js" },
{ id: "curl", label: "cURL" },
];
function buildExamples(
baseUrl: string,
apiKey: string,
model: string,
origin: string,
reasoningEffort: string
): Record<string, string> {
const effortLine = reasoningEffort && reasoningEffort !== "medium"
? `\n reasoning_effort="${reasoningEffort}",`
: "";
const effortJson = reasoningEffort && reasoningEffort !== "medium"
? `,\n "reasoning_effort": "${reasoningEffort}"`
: "";
const effortJs = reasoningEffort && reasoningEffort !== "medium"
? `\n reasoning_effort: "${reasoningEffort}",`
: "";
return {
"openai-python": `from openai import OpenAI
client = OpenAI(
base_url="${baseUrl}",
api_key="${apiKey}",
)
response = client.chat.completions.create(
model="${model}",
messages=[{"role": "user", "content": "Hello"}],${effortLine}
)
print(response.choices[0].message.content)`,
"openai-curl": `curl ${baseUrl}/chat/completions \\
-H "Content-Type: application/json" \\
-H "Authorization: Bearer ${apiKey}" \\
-d '{
"model": "${model}",
"messages": [{"role": "user", "content": "Hello"}]${effortJson}
}'`,
"openai-node": `import OpenAI from "openai";
const client = new OpenAI({
baseURL: "${baseUrl}",
apiKey: "${apiKey}",
});
const stream = await client.chat.completions.create({
model: "${model}",
messages: [{ role: "user", content: "Hello" }],${effortJs}
stream: true,
});
for await (const chunk of stream) {
process.stdout.write(chunk.choices[0]?.delta?.content || "");
}`,
"anthropic-python": `import anthropic
client = anthropic.Anthropic(
base_url="${origin}/v1",
api_key="${apiKey}",
)
message = client.messages.create(
model="${model}",
max_tokens=1024,
messages=[{"role": "user", "content": "Hello"}],
)
print(message.content[0].text)`,
"anthropic-curl": `curl ${origin}/v1/messages \\
-H "Content-Type: application/json" \\
-H "x-api-key: ${apiKey}" \\
-H "anthropic-version: 2023-06-01" \\
-d '{
"model": "${model}",
"max_tokens": 1024,
"messages": [{"role": "user", "content": "Hello"}]
}'`,
"anthropic-node": `import Anthropic from "@anthropic-ai/sdk";
const client = new Anthropic({
baseURL: "${origin}/v1",
apiKey: "${apiKey}",
});
const message = await client.messages.create({
model: "${model}",
max_tokens: 1024,
messages: [{ role: "user", content: "Hello" }],
});
console.log(message.content[0].text);`,
"gemini-python": `from google import genai
client = genai.Client(
api_key="${apiKey}",
http_options={"base_url": "${origin}/v1beta"},
)
response = client.models.generate_content(
model="${model}",
contents="Hello",
)
print(response.text)`,
"gemini-curl": `curl "${origin}/v1beta/models/${model}:generateContent?key=${apiKey}" \\
-H "Content-Type: application/json" \\
-d '{
"contents": [{"role": "user", "parts": [{"text": "Hello"}]}]
}'`,
"gemini-node": `import { GoogleGenAI } from "@google/genai";
const ai = new GoogleGenAI({
apiKey: "${apiKey}",
httpOptions: { baseUrl: "${origin}/v1beta" },
});
const response = await ai.models.generateContent({
model: "${model}",
contents: "Hello",
});
console.log(response.text);`,
};
}
interface CodeExamplesProps {
baseUrl: string;
apiKey: string;
model: string;
reasoningEffort: string;
serviceTier: string | null;
}
export function CodeExamples({ baseUrl, apiKey, model, reasoningEffort, serviceTier }: CodeExamplesProps) {
const t = useT();
const [protocol, setProtocol] = useState<Protocol>("openai");
const [codeLang, setCodeLang] = useState<CodeLang>("python");
const origin = typeof window !== "undefined" ? window.location.origin : "";
// Build compound model name with suffixes for CLI users
const displayModel = useMemo(() => {
let name = model;
if (reasoningEffort && reasoningEffort !== "medium") name += `-${reasoningEffort}`;
if (serviceTier === "fast") name += "-fast";
return name;
}, [model, reasoningEffort, serviceTier]);
// When effort/speed are embedded as suffixes, don't also show separate reasoning_effort param
const explicitEffort = displayModel === model ? reasoningEffort : "medium";
const examples = useMemo(
() => buildExamples(baseUrl, apiKey, displayModel, origin, explicitEffort),
[baseUrl, apiKey, displayModel, origin, explicitEffort]
);
const currentCode = examples[`${protocol}-${codeLang}`] || "Loading...";
const getCode = useCallback(() => currentCode, [currentCode]);
const protoActive =
"px-6 py-3 text-[0.82rem] font-semibold text-primary border-b-2 border-primary bg-white dark:bg-card-dark transition-colors";
const protoInactive =
"px-6 py-3 text-[0.82rem] font-medium text-slate-500 dark:text-text-dim hover:text-slate-700 dark:hover:text-text-main hover:bg-slate-50 dark:hover:bg-[#21262d] border-b-2 border-transparent transition-colors";
const langActive =
"px-3 py-1.5 text-xs font-semibold rounded bg-white dark:bg-[#21262d] text-slate-800 dark:text-text-main shadow-sm border border-transparent dark:border-border-dark transition-all";
const langInactive =
"px-3 py-1.5 text-xs font-medium rounded text-slate-500 dark:text-text-dim hover:text-slate-700 dark:hover:text-text-main hover:bg-white/50 dark:hover:bg-[#21262d] border border-transparent transition-all";
return (
<section class="flex flex-col gap-4">
<h2 class="text-[0.95rem] font-bold">{t("integrationExamples")}</h2>
<div class="bg-white dark:bg-card-dark border border-gray-200 dark:border-border-dark rounded-xl overflow-hidden shadow-sm transition-colors">
{/* Protocol Tabs */}
<div class="flex border-b border-gray-200 dark:border-border-dark bg-slate-50/50 dark:bg-bg-dark/30">
{protocols.map((p) => (
<button
key={p.id}
onClick={() => setProtocol(p.id)}
class={protocol === p.id ? protoActive : protoInactive}
>
{p.label}
</button>
))}
</div>
{/* Language Tabs & Code */}
<div class="p-5">
<div class="flex items-center justify-between mb-4">
<div class="flex gap-2 p-1 bg-slate-100 dark:bg-bg-dark dark:border dark:border-border-dark rounded-lg">
{langs.map((l) => (
<button
key={l.id}
onClick={() => setCodeLang(l.id)}
class={codeLang === l.id ? langActive : langInactive}
>
{l.label}
</button>
))}
</div>
</div>
{/* Code Block */}
<div class="relative group rounded-lg overflow-hidden bg-[#0d1117] text-slate-300 font-mono text-xs border border-slate-800 dark:border-border-dark">
<div class="absolute right-2 top-2 z-10 opacity-0 group-hover:opacity-100 transition-opacity">
<CopyButton getText={getCode} variant="label" />
</div>
<div class="p-4 overflow-x-auto">
<pre class="m-0"><code>{currentCode}</code></pre>
</div>
</div>
</div>
</div>
</section>
);
}
|