Spaces:
Sleeping
Sleeping
Update models.py
Browse files
models.py
CHANGED
|
@@ -8,6 +8,7 @@ from llama_index.core import Settings
|
|
| 8 |
from groq import Groq
|
| 9 |
from mem0 import MemoryClient
|
| 10 |
from config import config
|
|
|
|
| 11 |
|
| 12 |
# Initialize embedding function for Chroma
|
| 13 |
embedding_function = chromadb.utils.embedding_functions.OpenAIEmbeddingFunction(
|
|
@@ -24,7 +25,7 @@ embedding_model = OpenAIEmbeddings(
|
|
| 24 |
)
|
| 25 |
|
| 26 |
# Initialize Chat OpenAI model
|
| 27 |
-
llm =
|
| 28 |
openai_api_base=config.OPENAI_API_BASE,
|
| 29 |
openai_api_key=config.API_KEY,
|
| 30 |
model=config.CHAT_MODEL,
|
|
|
|
| 8 |
from groq import Groq
|
| 9 |
from mem0 import MemoryClient
|
| 10 |
from config import config
|
| 11 |
+
from langchain_openai import ChatOpenAI
|
| 12 |
|
| 13 |
# Initialize embedding function for Chroma
|
| 14 |
embedding_function = chromadb.utils.embedding_functions.OpenAIEmbeddingFunction(
|
|
|
|
| 25 |
)
|
| 26 |
|
| 27 |
# Initialize Chat OpenAI model
|
| 28 |
+
llm =ChatOpenAI(
|
| 29 |
openai_api_base=config.OPENAI_API_BASE,
|
| 30 |
openai_api_key=config.API_KEY,
|
| 31 |
model=config.CHAT_MODEL,
|