Spaces:
Build error
Build error
Update app.py
Browse files
app.py
CHANGED
|
@@ -1,74 +1,78 @@
|
|
|
|
|
|
|
|
| 1 |
import requests
|
| 2 |
import gradio as gr
|
| 3 |
|
| 4 |
-
# Azure OpenAI
|
| 5 |
-
OPENAI_ENDPOINT = "https://rag-openai-service-ak.openai.azure.com/openai/deployments/gpt-4-rag-ak/completions?api-version=2023-05-15"
|
| 6 |
OPENAI_KEY = "B1XyCaz87o456EVD949oODcGC8KTAEQsNLI7Yq5cnYKk41SMY9PtJQQJ99AKACHYHv6XJ3w3AAABACOGAaCZ"
|
|
|
|
| 7 |
|
| 8 |
-
#
|
| 9 |
-
SEARCH_ENDPOINT = "https://rag-search-service-ak.search.windows.net"
|
| 10 |
-
SEARCH_KEY = "Kq2Ww1XBwGCvV4JXTMvWT6qo1O9HprGo74elTSNYHiAzSeDETx4y"
|
| 11 |
-
SEARCH_INDEX = "rag-index-ak"
|
| 12 |
-
|
| 13 |
def query_openai(prompt):
|
| 14 |
headers = {
|
| 15 |
"Content-Type": "application/json",
|
| 16 |
-
"
|
| 17 |
}
|
| 18 |
payload = {
|
| 19 |
"prompt": prompt,
|
| 20 |
-
"max_tokens":
|
| 21 |
-
"temperature": 0.7
|
| 22 |
-
"stop": "\n"
|
| 23 |
-
}
|
| 24 |
-
response = requests.post(OPENAI_ENDPOINT, headers=headers, json=payload)
|
| 25 |
-
response.raise_for_status()
|
| 26 |
-
return response.json()["choices"][0]["text"]
|
| 27 |
-
|
| 28 |
-
def query_search_service(query):
|
| 29 |
-
url = f"{SEARCH_ENDPOINT}/indexes/{SEARCH_INDEX}/docs/search?api-version=2021-04-30-Preview"
|
| 30 |
-
headers = {
|
| 31 |
-
"Content-Type": "application/json",
|
| 32 |
-
"api-key": SEARCH_KEY
|
| 33 |
-
}
|
| 34 |
-
payload = {
|
| 35 |
-
"search": query,
|
| 36 |
-
"top": 3
|
| 37 |
}
|
| 38 |
-
|
| 39 |
-
|
| 40 |
-
|
| 41 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 42 |
|
|
|
|
| 43 |
def process_query(question):
|
| 44 |
-
|
| 45 |
-
|
| 46 |
-
|
| 47 |
-
|
| 48 |
-
|
| 49 |
-
|
| 50 |
-
# Query the OpenAI model
|
| 51 |
-
return query_openai(full_prompt)
|
| 52 |
|
|
|
|
| 53 |
def chatbot_ui(predefined_question, custom_question):
|
| 54 |
-
question = predefined_question if
|
|
|
|
|
|
|
| 55 |
return process_query(question)
|
| 56 |
|
| 57 |
-
#
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 58 |
with gr.Blocks() as demo:
|
| 59 |
gr.Markdown("# Azure-Powered RAG Chatbot")
|
| 60 |
-
|
| 61 |
with gr.Row():
|
| 62 |
predefined_question = gr.Dropdown(
|
| 63 |
-
|
| 64 |
-
label="Select a predefined question"
|
|
|
|
| 65 |
)
|
| 66 |
-
custom_question = gr.Textbox(
|
| 67 |
-
|
| 68 |
-
|
| 69 |
-
|
| 70 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 71 |
|
| 72 |
-
|
| 73 |
|
| 74 |
-
demo.launch()
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import time
|
| 3 |
import requests
|
| 4 |
import gradio as gr
|
| 5 |
|
| 6 |
+
# Azure OpenAI Configuration
|
|
|
|
| 7 |
OPENAI_KEY = "B1XyCaz87o456EVD949oODcGC8KTAEQsNLI7Yq5cnYKk41SMY9PtJQQJ99AKACHYHv6XJ3w3AAABACOGAaCZ"
|
| 8 |
+
OPENAI_ENDPOINT = "https://rag-openai-service-ak.openai.azure.com/openai/deployments/gpt-4-rag-ak/completions?api-version=2023-05-15"
|
| 9 |
|
| 10 |
+
# Retry mechanism for handling rate limits
|
|
|
|
|
|
|
|
|
|
|
|
|
| 11 |
def query_openai(prompt):
|
| 12 |
headers = {
|
| 13 |
"Content-Type": "application/json",
|
| 14 |
+
"Authorization": f"Bearer {OPENAI_KEY}"
|
| 15 |
}
|
| 16 |
payload = {
|
| 17 |
"prompt": prompt,
|
| 18 |
+
"max_tokens": 100,
|
| 19 |
+
"temperature": 0.7
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 20 |
}
|
| 21 |
+
retry_count = 0
|
| 22 |
+
max_retries = 5
|
| 23 |
+
while retry_count < max_retries:
|
| 24 |
+
response = requests.post(OPENAI_ENDPOINT, headers=headers, json=payload)
|
| 25 |
+
if response.status_code == 429:
|
| 26 |
+
retry_count += 1
|
| 27 |
+
print(f"Rate limit hit. Retrying in {2 ** retry_count} seconds...")
|
| 28 |
+
time.sleep(2 ** retry_count)
|
| 29 |
+
else:
|
| 30 |
+
response.raise_for_status()
|
| 31 |
+
return response.json()
|
| 32 |
+
raise Exception("Exceeded maximum retries due to rate limits.")
|
| 33 |
|
| 34 |
+
# Process user queries
|
| 35 |
def process_query(question):
|
| 36 |
+
full_prompt = f"Answer the following question based on the documents: {question}"
|
| 37 |
+
try:
|
| 38 |
+
response = query_openai(full_prompt)
|
| 39 |
+
return response.get("choices", [{}])[0].get("text", "No response available.")
|
| 40 |
+
except Exception as e:
|
| 41 |
+
return f"Error: {str(e)}"
|
|
|
|
|
|
|
| 42 |
|
| 43 |
+
# Gradio UI
|
| 44 |
def chatbot_ui(predefined_question, custom_question):
|
| 45 |
+
question = predefined_question if predefined_question else custom_question
|
| 46 |
+
if not question:
|
| 47 |
+
return "Please provide a question to proceed."
|
| 48 |
return process_query(question)
|
| 49 |
|
| 50 |
+
# Predefined questions
|
| 51 |
+
predefined_questions = [
|
| 52 |
+
"What are the career opportunities in AI?",
|
| 53 |
+
"What is the CPT start date?",
|
| 54 |
+
"Explain the MEng Handbook policies."
|
| 55 |
+
]
|
| 56 |
+
|
| 57 |
+
# Gradio Interface
|
| 58 |
with gr.Blocks() as demo:
|
| 59 |
gr.Markdown("# Azure-Powered RAG Chatbot")
|
|
|
|
| 60 |
with gr.Row():
|
| 61 |
predefined_question = gr.Dropdown(
|
| 62 |
+
choices=predefined_questions,
|
| 63 |
+
label="Select a predefined question",
|
| 64 |
+
interactive=True
|
| 65 |
)
|
| 66 |
+
custom_question = gr.Textbox(
|
| 67 |
+
label="Or type your own question",
|
| 68 |
+
placeholder="Type your custom question here...",
|
| 69 |
+
)
|
| 70 |
+
submit_btn = gr.Button("Submit")
|
| 71 |
+
chatbot_response = gr.Textbox(
|
| 72 |
+
label="Chatbot Response",
|
| 73 |
+
interactive=False
|
| 74 |
+
)
|
| 75 |
|
| 76 |
+
submit_btn.click(chatbot_ui, inputs=[predefined_question, custom_question], outputs=chatbot_response)
|
| 77 |
|
| 78 |
+
demo.launch()
|