Psiska commited on
Commit
970c09c
·
1 Parent(s): 3d2943f

Deleted memory tool

Browse files
app.py CHANGED
@@ -2,8 +2,6 @@ import os, threading
2
  import gradio as gr
3
  from crew import run_crew
4
  from utils import get_questions
5
- from tools.memory_tools import memory_tool
6
-
7
 
8
 
9
  def ask(question, openai_api_key, gemini_api_key, anthropic_api_key, file_name = ""):
 
2
  import gradio as gr
3
  from crew import run_crew
4
  from utils import get_questions
 
 
5
 
6
 
7
  def ask(question, openai_api_key, gemini_api_key, anthropic_api_key, file_name = ""):
crew.py CHANGED
@@ -4,7 +4,6 @@
4
  # https://ai.google.dev/gemini-api/docs
5
 
6
  import os
7
- from tools.memory_tools import memory_tool
8
  from crewai import Agent, Crew, Task
9
  from crewai.agents.agent_builder.base_agent import BaseAgent
10
  from crewai.project import CrewBase, agent, crew, task
@@ -122,8 +121,7 @@ class GAIACrew():
122
  verbose=True
123
  ))
124
  return agents
125
-
126
-
127
  @task
128
  def manager_task(self) -> Task:
129
  # Build the Task object from your YAML
@@ -139,8 +137,6 @@ class GAIACrew():
139
 
140
  return task
141
 
142
-
143
-
144
  def get_crew(self) -> Crew:
145
  return Crew(
146
  agents=self.agents,
@@ -149,7 +145,6 @@ class GAIACrew():
149
  )
150
 
151
 
152
-
153
  def run_crew(question, file_path):
154
  # 0) Prepend file data if needed
155
  final_question = question
 
4
  # https://ai.google.dev/gemini-api/docs
5
 
6
  import os
 
7
  from crewai import Agent, Crew, Task
8
  from crewai.agents.agent_builder.base_agent import BaseAgent
9
  from crewai.project import CrewBase, agent, crew, task
 
121
  verbose=True
122
  ))
123
  return agents
124
+
 
125
  @task
126
  def manager_task(self) -> Task:
127
  # Build the Task object from your YAML
 
137
 
138
  return task
139
 
 
 
140
  def get_crew(self) -> Crew:
141
  return Crew(
142
  agents=self.agents,
 
145
  )
146
 
147
 
 
148
  def run_crew(question, file_path):
149
  # 0) Prepend file data if needed
150
  final_question = question
faiss_index/index.faiss DELETED
Binary file (30.8 kB)
 
faiss_index/index.pkl DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:04bd4a34c4fb5c05840d6d26afa952c49bad6fd9e93951096a45584c4ad69dbb
3
- size 893
 
 
 
 
tools/memory_tools.py DELETED
@@ -1,91 +0,0 @@
1
- # tools/memory_tools.py
2
-
3
- import os
4
- from crewai.tools import tool
5
- from langchain.schema import Document
6
- from langchain.embeddings import OpenAIEmbeddings
7
- from langchain_community.vectorstores import FAISS # faiss-cpu must be installed
8
-
9
- INDEX_DIR = "faiss_index"
10
- _embeddings = None # will hold our OpenAIEmbeddings
11
- _vectorstore = None # will hold our FAISS instance
12
-
13
- def _init_embeddings():
14
- global _embeddings
15
- if _embeddings is None:
16
- _embeddings = OpenAIEmbeddings(
17
- openai_api_key=os.environ.get("OPENAI_API_KEY", "")
18
- )
19
- return _embeddings
20
-
21
- def _load_or_build_index(doc: Document = None):
22
- """
23
- - If there's an in-memory index but the on-disk folder was removed, reset it.
24
- - If there's an in-memory index and the folder still exists, reuse it.
25
- - Else, if there's an on-disk index, load it.
26
- - Else, if a single `doc` is provided, create a new index from it.
27
- """
28
- global _vectorstore
29
- emb = _init_embeddings()
30
-
31
- # 1) If we had an in-memory index but the INDEX_DIR was deleted, clear it so we rebuild
32
- if _vectorstore is not None and not os.path.isdir(INDEX_DIR):
33
- _vectorstore = None
34
-
35
- # 2) If we now have an in-memory index, reuse it
36
- if _vectorstore is not None:
37
- return _vectorstore
38
-
39
- # 3) On‐disk index?
40
- if os.path.isdir(INDEX_DIR):
41
- _vectorstore = FAISS.load_local(
42
- INDEX_DIR, emb, allow_dangerous_deserialization=True
43
- )
44
- return _vectorstore
45
-
46
- # 4) No index yet, but we're saving the first doc
47
- if doc is not None:
48
- _vectorstore = FAISS.from_documents([doc], emb)
49
- _vectorstore.save_local(INDEX_DIR)
50
- return _vectorstore
51
-
52
- # 5) Otherwise, no index and no doc to build from
53
- return None
54
-
55
- @tool("Memory Tool")
56
- def memory_tool(action: str, text: str) -> str:
57
- """
58
- action: "save" to store the user message, or "load" to retrieve similar past messages.
59
- text: the message to save, or the query for load.
60
-
61
- Returns:
62
- - on "save": "Saved"
63
- - on "load": up to 3 similar messages joined by newline, or "" if none
64
- - otherwise: "Invalid action"
65
- """
66
- act = action.strip().lower()
67
-
68
- if act == "save":
69
- # Wrap the text in a Document
70
- doc = Document(page_content=text)
71
-
72
- # Build or load the index (if it's the first doc, we pass it here)
73
- vs = _load_or_build_index(doc)
74
-
75
- # If we already had an index, just add the new doc
76
- if vs and os.path.isdir(INDEX_DIR):
77
- vs.add_documents([doc])
78
- vs.save_local(INDEX_DIR)
79
-
80
- return "Saved"
81
-
82
- elif act == "load":
83
- vs = _load_or_build_index()
84
- if not vs:
85
- return "" # no history yet
86
-
87
- hits = vs.similarity_search(text, k=3)
88
- return "\n".join(d.page_content for d in hits)
89
-
90
- else:
91
- return "Invalid action"