Spaces:
Sleeping
Sleeping
File size: 4,677 Bytes
950a339 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 |
import GPTDriveIntegration
import gradio as gr
gpt_drive = GPTDriveIntegration()
def process_user_query(query, search_terms_input):
"""Process user query and return formatted response"""
if not query.strip():
return "Please enter a question.", ""
# Parse search terms if provided
search_terms = None
# if search_terms_input.strip():
# search_terms = [term.strip() for term in search_terms_input.split(',')]
# Process the query
result = gpt_drive.process_query(query, search_terms)
# Format the response
answer = result['answer']
sources = result['sources']
sources_text = ""
if sources:
sources_text = "**Sources used:**\n" + "\n".join([f"β’ {source}" for source in sources])
return answer, sources_text
def check_setup():
"""Check if the APIs are properly configured"""
status_messages = []
# Check Google Drive API
if gpt_drive.drive_initialized:
status_messages.append("β
Google Drive API: Connected")
else:
status_messages.append(f"β Google Drive API: {getattr(gpt_drive, 'drive_error', 'Not configured')}")
# Check OpenAI API
if gpt_drive.openai_initialized:
status_messages.append("β
OpenAI API: Connected")
else:
status_messages.append(f"β OpenAI API: {getattr(gpt_drive, 'openai_error', 'Not configured')}")
return "\n".join(status_messages)
# Create Gradio interface
with gr.Blocks(title="Augusta's Anatomy Reading Assistant", theme=gr.themes.Soft()) as app:
gr.Markdown("# π€ Augusta's Anatomy bot")
gr.Markdown("Ask questions about your anatomy books using AI!")
with gr.Row():
with gr.Column(scale=2):
# Main query interface
with gr.Group():
gr.Markdown("### Ask a Question")
query_input = gr.Textbox(
label="Your Question",
placeholder="Ask me any question about your anatomy books?",
lines=3
)
search_terms_input = gr.Textbox(
label="Search Terms (optional)",
placeholder="Enter comma-separated terms to search for specific files",
lines=1
)
submit_btn = gr.Button("Search & Ask", variant="primary", size="lg")
# Results section
with gr.Group():
gr.Markdown("### Answer")
answer_output = gr.Textbox(
label="AI Response",
lines=10,
interactive=False
)
sources_output = gr.Textbox(
label="Sources",
lines=3,
interactive=False
)
with gr.Column(scale=1):
# Status and setup info
with gr.Group():
gr.Markdown("### System Status")
status_btn = gr.Button("Check Status", size="sm")
status_output = gr.Textbox(
label="API Status",
lines=4,
interactive=False
)
with gr.Group():
gr.Markdown("### Setup Instructions")
gr.Markdown("""
**Important Notes:**
1.Only documents shared with it, it can answer
**File Types Supported:**
- Google Docs
- Google Sheets
- PDF files
- Text files
**Tips:**
- Use specific search terms for better results
- The system searches the top 3 most relevant files
- Ask clear, specific questions for better answers
""")
# Event handlers
submit_btn.click(
fn=process_user_query,
inputs=[query_input, search_terms_input],
outputs=[answer_output, sources_output]
)
status_btn.click(
fn=check_setup,
outputs=status_output
)
# Example queries
with gr.Row():
gr.Examples(
examples=[
["What is morbid Anatomy?", "morbid, Anatomy"],
["The transmission of nerves from one neuron to another is as a result of what?", "neuron, nerves, Dr Clement"],
],
inputs=[query_input, search_terms_input],
)
# Launch the app
if __name__ == "__main__":
app.launch(
share=True,debug =True) |