Spaces:
Sleeping
Sleeping
| import os | |
| import gradio as gr | |
| import redis | |
| from openai import AzureOpenAI | |
| # Redis Cloud connection | |
| redis_client = redis.Redis( | |
| host='redis-12628.c14.us-east-1-2.ec2.redns.redis-cloud.com', | |
| port=12628, | |
| decode_responses=True, | |
| username="default", | |
| password=os.getenv("REDIS_PASSWORD") # store password as HF secret | |
| ) | |
| # Azure OpenAI client | |
| client = AzureOpenAI( | |
| api_key=os.getenv("AZURE_OPENAI_API_KEY").strip(), | |
| api_version="2025-01-01-preview", | |
| azure_endpoint=os.getenv("AZURE_OPENAI_ENDPOINT").strip() | |
| ) | |
| DEPLOYMENT_NAME = " gpt-4.1" | |
| def chat_with_ai(user_input): | |
| if not user_input: | |
| return "Please type something." | |
| # Check Redis first | |
| cached = redis_client.get(user_input) | |
| if cached: | |
| return f"[From Redis] {cached}" | |
| # Otherwise query Azure OpenAI | |
| response = client.chat.completions.create( | |
| model=DEPLOYMENT_NAME, | |
| messages=[{"role": "user", "content": user_input}], | |
| max_tokens=150 | |
| ) | |
| output = response.choices[0].message.content.strip() | |
| # Save in Redis | |
| redis_client.setex(user_input, 3600, output) | |
| return f"[From OpenAI] {output}" | |
| # Gradio UI | |
| with gr.Blocks(title="Azure OpenAI + Redis Cloud Chat") as demo: | |
| gr.Markdown("# 💬 Azure OpenAI + Redis Cloud Demo") | |
| with gr.Row(): | |
| chatbot = gr.Chatbot() | |
| with gr.Row(): | |
| msg = gr.Textbox(placeholder="Type your message here...") | |
| send = gr.Button("Send") | |
| def respond(message, history): | |
| bot_reply = chat_with_ai(message) | |
| history.append((message, bot_reply)) | |
| return history, "" | |
| send.click(respond, [msg, chatbot], [chatbot, msg]) | |
| msg.submit(respond, [msg, chatbot], [chatbot, msg]) | |
| if __name__ == "__main__": | |
| demo.launch(debug=True, server_name="0.0.0.0",server_port=7860,pwa=True) |