Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -9,6 +9,11 @@ from chromadb.utils.embedding_functions import OpenAIEmbeddingFunction
|
|
| 9 |
client = OpenAI(api_key=os.getenv("OPENAI_KEY"))
|
| 10 |
pp = pprint.PrettyPrinter(indent=4)
|
| 11 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 12 |
def generate_response(messages):
|
| 13 |
model_name = os.getenv("MODEL_NAME")
|
| 14 |
response = client.chat.completions.create(model=model_name, messages=messages, temperature=0.5, max_tokens=250)
|
|
@@ -18,10 +23,11 @@ def generate_response(messages):
|
|
| 18 |
return response.choices[0].message
|
| 19 |
|
| 20 |
def chat_interface(user_input):
|
|
|
|
| 21 |
chroma_client = chromadb.Client()
|
| 22 |
embedding_function = OpenAIEmbeddingFunction(api_key=os.getenv("OPENAI_KEY"), model_name=os.getenv("EMBEDDING_MODEL"))
|
| 23 |
collection = chroma_client.create_collection(name="conversations", embedding_function=embedding_function)
|
| 24 |
-
|
| 25 |
messages = [{"role": "system", "content": "You are a kind and friendly chatbot"}]
|
| 26 |
results = collection.query(query_texts=[user_input], n_results=2)
|
| 27 |
for res in results['documents'][0]:
|
|
@@ -29,6 +35,19 @@ def chat_interface(user_input):
|
|
| 29 |
messages.append({"role": "user", "content": user_input})
|
| 30 |
|
| 31 |
response = generate_response(messages)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 32 |
return response
|
| 33 |
|
| 34 |
def main():
|
|
|
|
| 9 |
client = OpenAI(api_key=os.getenv("OPENAI_KEY"))
|
| 10 |
pp = pprint.PrettyPrinter(indent=4)
|
| 11 |
|
| 12 |
+
current_id = 0
|
| 13 |
+
chat_history = []
|
| 14 |
+
chat_metadata = []
|
| 15 |
+
history_ids = []
|
| 16 |
+
|
| 17 |
def generate_response(messages):
|
| 18 |
model_name = os.getenv("MODEL_NAME")
|
| 19 |
response = client.chat.completions.create(model=model_name, messages=messages, temperature=0.5, max_tokens=250)
|
|
|
|
| 23 |
return response.choices[0].message
|
| 24 |
|
| 25 |
def chat_interface(user_input):
|
| 26 |
+
global current_id
|
| 27 |
chroma_client = chromadb.Client()
|
| 28 |
embedding_function = OpenAIEmbeddingFunction(api_key=os.getenv("OPENAI_KEY"), model_name=os.getenv("EMBEDDING_MODEL"))
|
| 29 |
collection = chroma_client.create_collection(name="conversations", embedding_function=embedding_function)
|
| 30 |
+
|
| 31 |
messages = [{"role": "system", "content": "You are a kind and friendly chatbot"}]
|
| 32 |
results = collection.query(query_texts=[user_input], n_results=2)
|
| 33 |
for res in results['documents'][0]:
|
|
|
|
| 35 |
messages.append({"role": "user", "content": user_input})
|
| 36 |
|
| 37 |
response = generate_response(messages)
|
| 38 |
+
chat_metadata.append({"role":"user"})
|
| 39 |
+
chat_history.append(input_text)
|
| 40 |
+
chat_metadata.append({"role":"assistant"})
|
| 41 |
+
chat_history.append(response.content)
|
| 42 |
+
current_id += 1
|
| 43 |
+
history_ids.append(f"id_{current_id}")
|
| 44 |
+
current_id += 1
|
| 45 |
+
history_ids.append(f"id_{current_id}")
|
| 46 |
+
collection.add(
|
| 47 |
+
documents=chat_history,
|
| 48 |
+
metadatas=chat_metadata,
|
| 49 |
+
ids=history_ids
|
| 50 |
+
)
|
| 51 |
return response
|
| 52 |
|
| 53 |
def main():
|