updated1
Browse files
app.py
CHANGED
|
@@ -27,8 +27,12 @@ if initial_api_key:
|
|
| 27 |
client = AsyncOpenAI(api_key=initial_api_key)
|
| 28 |
|
| 29 |
# Predefined system prompts
|
|
|
|
|
|
|
|
|
|
|
|
|
| 30 |
SYSTEM_PROMPTS = {
|
| 31 |
-
|
| 32 |
"Creative Writer": "You are a creative writing assistant. Help users with storytelling, creative writing, poetry, and imaginative content. Be expressive and inspiring.",
|
| 33 |
"Code Expert": "You are a programming expert. Provide clear, well-commented code solutions, explain programming concepts, and help debug issues. Focus on best practices and clean code.",
|
| 34 |
"Academic Tutor": "You are an academic tutor. Explain complex concepts clearly, provide step-by-step solutions, and help students understand difficult topics across various subjects.",
|
|
@@ -93,7 +97,7 @@ async def chat_response_stream(message, history, api_key, model, temperature, ma
|
|
| 93 |
if client is None:
|
| 94 |
history.append({"role": "user", "content": message})
|
| 95 |
history.append({"role": "assistant", "content": "🔑 Please provide your OpenAI API key to start the conversation."})
|
| 96 |
-
|
| 97 |
|
| 98 |
# Update model
|
| 99 |
current_model = model
|
|
@@ -107,7 +111,7 @@ async def chat_response_stream(message, history, api_key, model, temperature, ma
|
|
| 107 |
if not message.strip():
|
| 108 |
history.append({"role": "user", "content": message})
|
| 109 |
history.append({"role": "assistant", "content": "⚠️ Please enter a message to continue our conversation."})
|
| 110 |
-
|
| 111 |
|
| 112 |
# Add user message to history
|
| 113 |
history.append({"role": "user", "content": message})
|
|
@@ -570,9 +574,9 @@ with gr.Blocks(css=custom_css, title="🤖 ChatGPT-like AI Assistant", theme=gr.
|
|
| 570 |
# Use async generator for streaming
|
| 571 |
return chat_response_stream(message, history, api_key, model, temperature, max_tokens, system_prompt_choice, custom_system_prompt)
|
| 572 |
|
|
|
|
| 573 |
def on_system_prompt_change(choice):
|
| 574 |
-
|
| 575 |
-
if choice == "Custom":
|
| 576 |
return gr.update(visible=True)
|
| 577 |
else:
|
| 578 |
return gr.update(visible=False)
|
|
|
|
| 27 |
client = AsyncOpenAI(api_key=initial_api_key)
|
| 28 |
|
| 29 |
# Predefined system prompts
|
| 30 |
+
# Constants for system prompt choices
|
| 31 |
+
DEFAULT_ASSISTANT = "Default Assistant"
|
| 32 |
+
CUSTOM_PROMPT = "Custom"
|
| 33 |
+
|
| 34 |
SYSTEM_PROMPTS = {
|
| 35 |
+
DEFAULT_ASSISTANT: "You are a helpful, creative, and intelligent AI assistant. You provide accurate, detailed, and engaging responses while being friendly and professional.",
|
| 36 |
"Creative Writer": "You are a creative writing assistant. Help users with storytelling, creative writing, poetry, and imaginative content. Be expressive and inspiring.",
|
| 37 |
"Code Expert": "You are a programming expert. Provide clear, well-commented code solutions, explain programming concepts, and help debug issues. Focus on best practices and clean code.",
|
| 38 |
"Academic Tutor": "You are an academic tutor. Explain complex concepts clearly, provide step-by-step solutions, and help students understand difficult topics across various subjects.",
|
|
|
|
| 97 |
if client is None:
|
| 98 |
history.append({"role": "user", "content": message})
|
| 99 |
history.append({"role": "assistant", "content": "🔑 Please provide your OpenAI API key to start the conversation."})
|
| 100 |
+
yield history, ""
|
| 101 |
|
| 102 |
# Update model
|
| 103 |
current_model = model
|
|
|
|
| 111 |
if not message.strip():
|
| 112 |
history.append({"role": "user", "content": message})
|
| 113 |
history.append({"role": "assistant", "content": "⚠️ Please enter a message to continue our conversation."})
|
| 114 |
+
yield history, ""
|
| 115 |
|
| 116 |
# Add user message to history
|
| 117 |
history.append({"role": "user", "content": message})
|
|
|
|
| 574 |
# Use async generator for streaming
|
| 575 |
return chat_response_stream(message, history, api_key, model, temperature, max_tokens, system_prompt_choice, custom_system_prompt)
|
| 576 |
|
| 577 |
+
# In on_system_prompt_change function:
|
| 578 |
def on_system_prompt_change(choice):
|
| 579 |
+
if choice == CUSTOM_PROMPT:
|
|
|
|
| 580 |
return gr.update(visible=True)
|
| 581 |
else:
|
| 582 |
return gr.update(visible=False)
|