Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -104,36 +104,6 @@ with gr.Blocks(title="CryptoSphere AI Agent") as interface:
|
|
| 104 |
)
|
| 105 |
|
| 106 |
interface.launch()
|
| 107 |
-
from langchain_community.vectorstores import FAISS
|
| 108 |
-
from langchain.embeddings import HuggingFaceEmbeddings
|
| 109 |
|
| 110 |
-
# Load FAISS DB at startup
|
| 111 |
-
emb = HuggingFaceEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2")
|
| 112 |
-
db = FAISS.load_local("faiss_blockchain", emb, allow_dangerous_deserialization=True)
|
| 113 |
-
|
| 114 |
-
def blockchain_query(user_input):
|
| 115 |
-
try:
|
| 116 |
-
# RAG retrieval
|
| 117 |
-
docs = db.similarity_search(user_input, k=3)
|
| 118 |
-
context = "\n\n".join([d.page_content for d in docs])
|
| 119 |
-
|
| 120 |
-
final_prompt = f"""
|
| 121 |
-
User question: {user_input}
|
| 122 |
-
|
| 123 |
-
Relevant blockchain context:
|
| 124 |
-
{context}
|
| 125 |
-
|
| 126 |
-
Answer using ONLY the context + your own understanding.
|
| 127 |
-
Be concise and technically correct.
|
| 128 |
-
"""
|
| 129 |
-
|
| 130 |
-
response = client.chat.completions.create(
|
| 131 |
-
model="llama-3.3-70b-versatile",
|
| 132 |
-
messages=[{"role":"user", "content": final_prompt}]
|
| 133 |
-
)
|
| 134 |
-
return response.choices[0].message.content
|
| 135 |
-
|
| 136 |
-
except Exception as e:
|
| 137 |
-
return f"❌ Error: {e}"
|
| 138 |
|
| 139 |
|
|
|
|
| 104 |
)
|
| 105 |
|
| 106 |
interface.launch()
|
|
|
|
|
|
|
| 107 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 108 |
|
| 109 |
|