| | import gradio as gr |
| | import requests |
| | import json |
| |
|
| |
|
| | def _normalize_endpoint(endpoint: str) -> str: |
| | endpoint = (endpoint or "").strip() |
| | return endpoint[:-1] if endpoint.endswith("/") else endpoint |
| |
|
| |
|
| | def _azure_chat_completions(endpoint, api_key, model, messages, temperature=0.7, max_tokens=600, timeout=60): |
| | endpoint = _normalize_endpoint(endpoint) |
| | url = f"{endpoint}/openai/v1/chat/completions" |
| |
|
| | headers = { |
| | "Content-Type": "application/json", |
| | "api-key": api_key.strip(), |
| | } |
| |
|
| | payload = { |
| | "model": model.strip(), |
| | "messages": messages, |
| | "temperature": float(temperature), |
| | "max_tokens": int(max_tokens), |
| | } |
| |
|
| | r = requests.post(url, headers=headers, json=payload, timeout=timeout) |
| | r.raise_for_status() |
| | return r.json() |
| |
|
| |
|
| | def handle_message(message, history, endpoint, api_key, model, system_prompt, temperature, max_tokens): |
| | |
| | messages = [] |
| | if system_prompt and system_prompt.strip(): |
| | messages.append({"role": "system", "content": system_prompt.strip()}) |
| |
|
| | for u, a in history: |
| | if u: |
| | messages.append({"role": "user", "content": u}) |
| | if a: |
| | messages.append({"role": "assistant", "content": a}) |
| |
|
| | messages.append({"role": "user", "content": message}) |
| |
|
| | if not endpoint or not api_key or not model: |
| | return "Please provide Endpoint, API Key, and Model." |
| |
|
| | try: |
| | data = _azure_chat_completions( |
| | endpoint=endpoint, |
| | api_key=api_key, |
| | model=model, |
| | messages=messages, |
| | temperature=temperature, |
| | max_tokens=max_tokens, |
| | ) |
| | return data["choices"][0]["message"]["content"] |
| |
|
| | except requests.HTTPError as e: |
| | try: |
| | err_json = e.response.json() |
| | return f"HTTP {e.response.status_code}: {json.dumps(err_json, indent=2)}" |
| | except Exception: |
| | return f"HTTP error: {str(e)}" |
| |
|
| | except Exception as e: |
| | return f"Error: {str(e)}" |
| |
|
| |
|
| | demo = gr.ChatInterface( |
| | fn=handle_message, |
| | additional_inputs=[ |
| | gr.Textbox(label="Azure OpenAI Endpoint", placeholder="https://ai-hubXXXX.openai.azure.com"), |
| | gr.Textbox(label="API Key", placeholder="paste your key here", type="password"), |
| | gr.Textbox(label="Model (or deployment name)", placeholder="e.g., gpt-4o-mini"), |
| | gr.Textbox(label="System prompt (optional)", value="You are a helpful assistant."), |
| | gr.Slider(0.0, 1.5, value=0.7, step=0.1, label="Temperature"), |
| | gr.Slider(50, 2000, value=600, step=50, label="Max tokens"), |
| | ], |
| | title="Azure OpenAI Chat Completion", |
| | description="Enter your Azure OpenAI endpoint + key, then chat.", |
| | ) |
| |
|
| | demo.launch() |