import os import sys from dotenv import load_dotenv import anthropic import gradio as gr from llama_index.core import VectorStoreIndex, SimpleDirectoryReader load_dotenv() title = "Gaia Anthropic Claude 3.5 Sonnet Chat RAG URL Demo" description = "Example of an assistant with Gradio, RAG from url and Claude 3.5 Sonnet via its API" placeholder = "Vous pouvez me posez une question sur ce contexte, appuyer sur Entrée pour valider" placeholder_url = "Extract text from this url" # llm_model = 'open-mixtral-8x22b' llm_model = 'claude-3-5-sonnet-latest' # choose api_key from .env or from input field # placeholder_api_key = "API key" # env_mistral_api_key = os.environ.get("MISTRAL_API_KEY") env_api_key = os.environ.get("ANTHROPIC_API_KEY") # Set-up clients # llm = MistralAI(api_key=env_api_key,model=llm_model) # llm = Anthropic(api_key=env_api_key, model=llm_model) # embed_model = MistralAIEmbedding(model_name='mistral-embed', api_key=env_mistral_api_key) # Settings.llm = llm # Settings.embed_model = embed_model # Settings.chunk_size = 1024 # client = anthropic.Anthropic(api_key=env_api_key) def answer(message, history): files = [] for msg in history: if msg['role'] == "user" and isinstance(msg['content'], tuple): files.append(msg['content'][0]) for file in message["files"]: files.append(file) documents = SimpleDirectoryReader(input_files=files).load_data() index = VectorStoreIndex.from_documents(documents) query_engine = index.as_query_engine() return str(query_engine.query(message["text"])) demo = gr.ChatInterface( answer, type="messages", title="Llama Index RAG Chatbot", description="Upload any text or pdf files and ask questions about them!", textbox=gr.MultimodalTextbox(file_types=[".pdf", ".txt", ".html"]), multimodal=True ) demo.title = title demo.launch(share=True)