Spaces:
Sleeping
Sleeping
| import chainlit as cl | |
| import os | |
| import google.generativeai as genai | |
| from dotenv import load_dotenv | |
| from docling.document_converter import DocumentConverter | |
| # Load environment variables | |
| load_dotenv() | |
| # Configure Gemini | |
| GOOGLE_API_KEY = os.getenv("GEMINI_API_KEY") | |
| if not GOOGLE_API_KEY: | |
| raise ValueError("GEMINI_API_KEY not found in .env file") | |
| genai.configure(api_key=GOOGLE_API_KEY) | |
| model = genai.GenerativeModel('gemini-1.5-flash-latest') | |
| converter = DocumentConverter() | |
| async def start_chat(): | |
| cl.user_session.set("document_content", "") | |
| await cl.Message( | |
| content="Hello! I'm a chatbot with document analysis capabilities. " | |
| "Send me a URL starting with '/url' to analyze a document, " | |
| "then ask questions about it. Example: `/url https://example.com/document.pdf`" | |
| ).send() | |
| async def main(message: cl.Message): | |
| user_message = message.content | |
| # Handle URL input | |
| if user_message.startswith("/url"): | |
| try: | |
| url = user_message[5:].strip() | |
| msg = cl.Message(content=f"Processing document from {url}...") | |
| await msg.send() | |
| # Convert document | |
| result = converter.convert(url) | |
| document_content = result.document.export_to_markdown() | |
| # Store in session | |
| cl.user_session.set("document_content", document_content) | |
| await cl.Message( | |
| content=f"✅ Document loaded successfully! " | |
| f"You can now ask questions about it." | |
| ).send() | |
| return | |
| except Exception as e: | |
| await cl.Message( | |
| content=f"❌ Error processing document: {str(e)}" | |
| ).send() | |
| return | |
| # Handle regular questions | |
| document_content = cl.user_session.get("document_content") | |
| if document_content: | |
| # Combine document context with question | |
| prompt = f"Document content:\n{document_content}\n\nQuestion: {user_message}\nAnswer:" | |
| else: | |
| prompt = user_message | |
| # Generate response | |
| response = model.generate_content(prompt) | |
| # Send answer | |
| await cl.Message( | |
| content=response.text | |
| ).send() |