jmlon commited on
Commit
f2a5478
·
1 Parent(s): 676ddb6

Implemented Groq

Browse files
Files changed (2) hide show
  1. app02-chatRag.py +15 -4
  2. modules.md +7 -1
app02-chatRag.py CHANGED
@@ -17,6 +17,9 @@ from langchain_community.embeddings import HuggingFaceEmbeddings
17
  # GeminiPro
18
  from langchain_google_genai import ChatGoogleGenerativeAI
19
 
 
 
 
20
  # Pinecone vector database
21
  from pinecone import Pinecone, ServerlessSpec
22
  from langchain_pinecone import PineconeVectorStore
@@ -26,10 +29,18 @@ setid = "global"
26
 
27
  embeddings = HuggingFaceEmbeddings(model_name=os.getenv("EMBEDDINGS_MODEL"))
28
 
 
29
  # model = ChatOpenAI(temperature=0.0)
30
- model = ChatGoogleGenerativeAI(
31
- model="gemini-pro", temperature=0.1, convert_system_message_to_human=True
32
- )
 
 
 
 
 
 
 
33
 
34
  pc = Pinecone(
35
  api_key=os.getenv("PINECONE_API_KEY")
@@ -81,7 +92,7 @@ def rag_query(question: str, history: list[list[str]]):
81
  | pipeLog
82
  | model
83
  )
84
- response = chain.invoke({ "chat_history": chat_history, "question": "Que es consenso?"})
85
  return response.content
86
 
87
 
 
17
  # GeminiPro
18
  from langchain_google_genai import ChatGoogleGenerativeAI
19
 
20
+ # Groq
21
+ from langchain_groq import ChatGroq
22
+
23
  # Pinecone vector database
24
  from pinecone import Pinecone, ServerlessSpec
25
  from langchain_pinecone import PineconeVectorStore
 
29
 
30
  embeddings = HuggingFaceEmbeddings(model_name=os.getenv("EMBEDDINGS_MODEL"))
31
 
32
+ # OpenAI
33
  # model = ChatOpenAI(temperature=0.0)
34
+
35
+ # Gemini
36
+ # model = ChatGoogleGenerativeAI(
37
+ # model="gemini-pro", temperature=0.1, convert_system_message_to_human=True
38
+ # )
39
+
40
+ # Groq
41
+ # llama2-70b-4096 (4k), mixtral-8x7b-32768 (32k)
42
+ model = ChatGroq(model_name='mixtral-8x7b-32768')
43
+
44
 
45
  pc = Pinecone(
46
  api_key=os.getenv("PINECONE_API_KEY")
 
92
  | pipeLog
93
  | model
94
  )
95
+ response = chain.invoke({ "chat_history": chat_history, "question": question })
96
  return response.content
97
 
98
 
modules.md CHANGED
@@ -1,5 +1,7 @@
1
  ```sh
2
 
 
 
3
  pip install \
4
  gradio \
5
  langchain langchain-community \
@@ -8,7 +10,11 @@ pip install \
8
  langchain-pinecone \
9
  huggingface_hub
10
 
11
- pip install ipykernel IProgress ipywidgets --upgrade
 
 
 
 
12
 
13
  # python-dotenv \
14
  # pinecone-client==2.2.4 \
 
1
  ```sh
2
 
3
+ pip install ipykernel IProgress ipywidgets --upgrade
4
+
5
  pip install \
6
  gradio \
7
  langchain langchain-community \
 
10
  langchain-pinecone \
11
  huggingface_hub
12
 
13
+ # Groq API
14
+ pip install langchain_groq
15
+
16
+
17
+
18
 
19
  # python-dotenv \
20
  # pinecone-client==2.2.4 \