cdupland commited on
Commit
9252b8e
·
2 Parent(s): 68aa090 9c5caa5

Merge branch 'main' of https://huggingface.co/spaces/bziiit/OpenData-Bordeaux-RSE into main

Browse files
.gitattributes CHANGED
@@ -34,3 +34,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
  DATA_bziiit/op.pdf filter=lfs diff=lfs merge=lfs -text
 
 
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
  DATA_bziiit/op.pdf filter=lfs diff=lfs merge=lfs -text
37
+ DATA_bziiit/vectorstore_op/index.faiss filter=lfs diff=lfs merge=lfs -text
.gitignore CHANGED
@@ -1,4 +1,4 @@
1
  __pycache__/
2
- .streamlit/secrets.toml
3
- .streamlit/.env
4
- DATA_bziiit/vectorstore_op
 
1
  __pycache__/
2
+ .streamlit
3
+ DATA_bziiit/vectorstore_op
4
+ .env
.streamlit/.env DELETED
@@ -1,4 +0,0 @@
1
- API_TOKEN_PERPLEXITYAI = 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
2
- OPENAI_API_KEY = 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
3
- FIRECRAWL_API_KEY = fc-381ecdb1175147aab5d2b48023961491
4
- MISTRAL_API_KEY = Hcm1IhfT38thMSaBIMt0t6L1SmrbgbXj
 
 
 
 
 
DATA_bziiit/vectorstore_op/index.faiss ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0d308f98f3798328b0c1efdfb5ab5ade0e8b7b93f7f32ee439b54aa060879067
3
+ size 1228845
DATA_bziiit/vectorstore_op/index.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7b492225278bd4ba23d11fe72fa16f8abd9a023babcc6734901740ba34fd0ba7
3
+ size 106874
chat_te.py CHANGED
@@ -20,12 +20,12 @@ def get_docs_from_pdf(file):
20
  return docs
21
 
22
  def get_doc_chunks(docs):
23
- text_splitter = SemanticChunker(OpenAIEmbeddings())
24
  chunks = text_splitter.split_documents(docs)
25
  return chunks
26
 
27
  def get_vectorstore_from_docs(doc_chunks):
28
- embedding = OpenAIEmbeddings()
29
  vectorstore = FAISS.from_documents(documents=doc_chunks, embedding=embedding)
30
  return vectorstore
31
 
@@ -47,7 +47,7 @@ def create_db(file):
47
  # docs = get_docs_from_pdf(file)
48
  # doc_chunks = get_doc_chunks(docs)
49
  # vectorstore = get_vectorstore_from_docs(doc_chunks)
50
- vectorstore = FAISS.load_local("index", OpenAIEmbeddings())
51
  return vectorstore
52
 
53
  def get_response(chain,user_query, chat_history):
@@ -81,7 +81,7 @@ def display_chat_te():
81
  AIMessage(content="Salut, posez-moi vos question sur la transistion ecologique."),
82
  ]
83
  if "chain" not in st.session_state:
84
- db=create_db("DATA_bziiit/op.pdf")
85
  chain = get_conversation_chain(db)
86
  st.session_state.chain = chain
87
 
 
20
  return docs
21
 
22
  def get_doc_chunks(docs):
23
+ text_splitter = SemanticChunker(OpenAIEmbeddings(model="text-embedding-3-large"))
24
  chunks = text_splitter.split_documents(docs)
25
  return chunks
26
 
27
  def get_vectorstore_from_docs(doc_chunks):
28
+ embedding = OpenAIEmbeddings(model="text-embedding-3-large")
29
  vectorstore = FAISS.from_documents(documents=doc_chunks, embedding=embedding)
30
  return vectorstore
31
 
 
47
  # docs = get_docs_from_pdf(file)
48
  # doc_chunks = get_doc_chunks(docs)
49
  # vectorstore = get_vectorstore_from_docs(doc_chunks)
50
+ vectorstore = FAISS.load_local(file, OpenAIEmbeddings(model="text-embedding-3-large"),allow_dangerous_deserialization= True)
51
  return vectorstore
52
 
53
  def get_response(chain,user_query, chat_history):
 
81
  AIMessage(content="Salut, posez-moi vos question sur la transistion ecologique."),
82
  ]
83
  if "chain" not in st.session_state:
84
+ db=create_db("./DATA_bziiit/vectorstore_op")
85
  chain = get_conversation_chain(db)
86
  st.session_state.chain = chain
87
 
chat_with_pps.py CHANGED
@@ -7,6 +7,7 @@ from langchain_core.prompts import ChatPromptTemplate
7
  from langchain_mistralai.chat_models import ChatMistralAI
8
  from download_chart import construct_plot
9
  from prompt import get_prompts_list
 
10
 
11
 
12
  load_dotenv()
@@ -87,6 +88,7 @@ def choose_model(index):
87
  def display_chat():
88
  # app config
89
  st.title("Chatbot")
 
90
 
91
  models_name = {
92
  "Mistral (FR)":1,
@@ -147,6 +149,7 @@ def display_chat():
147
  if isinstance(last_message, HumanMessage):
148
  with st.chat_message("AI"):
149
  response = st.write_stream(get_response(last_message.content, st.session_state.chat_history,format_context(st.session_state['pp_grouped'],st.session_state['Nom de la marque']),st.session_state.model))
 
150
  st.session_state.chat_history.append(AIMessage(content=response))
151
 
152
 
@@ -165,7 +168,8 @@ def display_chat():
165
  with st.chat_message("AI"):
166
  st.markdown(f"**{st.session_state.model}**")
167
  response = st.write_stream(get_response(user_query, st.session_state.chat_history,format_context(st.session_state['pp_grouped'],st.session_state['Nom de la marque']),st.session_state.model))
168
- if "cartographie des parties prenantes" in message.content:
 
169
  display_chart()
170
 
171
  st.session_state.chat_history.append(AIMessage(content=response))
 
7
  from langchain_mistralai.chat_models import ChatMistralAI
8
  from download_chart import construct_plot
9
  from prompt import get_prompts_list
10
+ from st_copy_to_clipboard import st_copy_to_clipboard
11
 
12
 
13
  load_dotenv()
 
88
  def display_chat():
89
  # app config
90
  st.title("Chatbot")
91
+
92
 
93
  models_name = {
94
  "Mistral (FR)":1,
 
149
  if isinstance(last_message, HumanMessage):
150
  with st.chat_message("AI"):
151
  response = st.write_stream(get_response(last_message.content, st.session_state.chat_history,format_context(st.session_state['pp_grouped'],st.session_state['Nom de la marque']),st.session_state.model))
152
+ st_copy_to_clipboard(response)
153
  st.session_state.chat_history.append(AIMessage(content=response))
154
 
155
 
 
168
  with st.chat_message("AI"):
169
  st.markdown(f"**{st.session_state.model}**")
170
  response = st.write_stream(get_response(user_query, st.session_state.chat_history,format_context(st.session_state['pp_grouped'],st.session_state['Nom de la marque']),st.session_state.model))
171
+ st_copy_to_clipboard(response)
172
+ if "cartographie" in response:
173
  display_chart()
174
 
175
  st.session_state.chat_history.append(AIMessage(content=response))
requirements.txt CHANGED
@@ -37,3 +37,4 @@ kaleido
37
  langchain-core
38
  langchain-mistralai
39
  firecrawl-py
 
 
37
  langchain-core
38
  langchain-mistralai
39
  firecrawl-py
40
+ st_copy_to_clipboard