Alexandre commited on
Commit
267a41a
·
1 Parent(s): c5229a1

initial commit

Browse files
Files changed (3) hide show
  1. .gitignore +1 -0
  2. app.py +33 -0
  3. requirements.txt +10 -0
.gitignore ADDED
@@ -0,0 +1 @@
 
 
1
+ __pycache__
app.py ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from llm import predict
3
+ from llama_index.llms.mistralai import MistralAI
4
+ from llama_index.core.llms import ChatMessage
5
+
6
+ title = "Gaia Mistral Chat RAG PDF Demo"
7
+ description = "Example of an assistant with Gradio, RAG from PDF documents and Mistral AI via its API"
8
+ placeholder = (
9
+ "Vous pouvez me posez une question sur ce contexte, appuyer sur Entrée pour valider"
10
+ )
11
+ placeholder_url = "Extract text from this url"
12
+ llm_model = "mistral-medium"
13
+
14
+ env_api_key = "Yb2kAF0DR4Mva5AEmoYFV3kYRAKdXB7i"
15
+ query_engine = None
16
+
17
+
18
+ llm = MistralAI(api_key=env_api_key, model=llm_model)
19
+
20
+ def predict(message, history):
21
+ messages = []
22
+ for human, assistant in history:
23
+ ChatMessage(role="system", content=assistant),
24
+ ChatMessage(role="user", content=human),
25
+
26
+ messages.append(ChatMessage(role="user", content=message))
27
+ response = llm.stream_chat(messages)
28
+ partial_message = ""
29
+ for chunk in response:
30
+ partial_message = partial_message + chunk.delta
31
+ yield partial_message
32
+
33
+ chat = gr.ChatInterface(predict).launch()
requirements.txt ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ pypdf
2
+ mistralai
3
+ llama-index
4
+ llama-index-llms-mistralai
5
+ chromadb
6
+ fastapi
7
+ uvicorn
8
+ gradio
9
+ gradio_client
10
+ starlette