Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -3,7 +3,6 @@ from huggingface_hub import InferenceClient
|
|
| 3 |
import torch
|
| 4 |
from sentence_transformers import SentenceTransformer
|
| 5 |
|
| 6 |
-
|
| 7 |
client = InferenceClient("microsoft/phi-4")
|
| 8 |
|
| 9 |
#Loading the bio spec txt file
|
|
@@ -27,7 +26,7 @@ def preprocess_text(text):
|
|
| 27 |
bio_chunks = preprocess_text(bio_spec_text)
|
| 28 |
|
| 29 |
#Loading sentance transformer model and then embedding the chunks (idrk it was on colab)
|
| 30 |
-
embedding_model =
|
| 31 |
|
| 32 |
chunk_embeddings = embedding_model.encode(bio_chunks, convert_to_tensor=True)
|
| 33 |
|
|
@@ -56,13 +55,12 @@ def respond(message, history):
|
|
| 56 |
global chosen_topic
|
| 57 |
|
| 58 |
#Getting the relevnt parts from the txt file
|
| 59 |
-
|
| 60 |
relevant_chunks = get_top_chunks(message, chunk_embeddings, bio_chunks, top_k=4)
|
| 61 |
spec_content = "\n".join(relevant_chunks)
|
| 62 |
|
| 63 |
system_prompt = (
|
| 64 |
f"You are a friendly GCSE Biology tutor focusing on **{chosen_topic}**.\n"
|
| 65 |
-
f"Use the following specification excerpts to answer:\n{
|
| 66 |
)
|
| 67 |
|
| 68 |
messages = [{"role": "system", "content": system_prompt}]
|
|
|
|
| 3 |
import torch
|
| 4 |
from sentence_transformers import SentenceTransformer
|
| 5 |
|
|
|
|
| 6 |
client = InferenceClient("microsoft/phi-4")
|
| 7 |
|
| 8 |
#Loading the bio spec txt file
|
|
|
|
| 26 |
bio_chunks = preprocess_text(bio_spec_text)
|
| 27 |
|
| 28 |
#Loading sentance transformer model and then embedding the chunks (idrk it was on colab)
|
| 29 |
+
embedding_model = SentenceTransformer("all-MiniLM-L6-v2")
|
| 30 |
|
| 31 |
chunk_embeddings = embedding_model.encode(bio_chunks, convert_to_tensor=True)
|
| 32 |
|
|
|
|
| 55 |
global chosen_topic
|
| 56 |
|
| 57 |
#Getting the relevnt parts from the txt file
|
|
|
|
| 58 |
relevant_chunks = get_top_chunks(message, chunk_embeddings, bio_chunks, top_k=4)
|
| 59 |
spec_content = "\n".join(relevant_chunks)
|
| 60 |
|
| 61 |
system_prompt = (
|
| 62 |
f"You are a friendly GCSE Biology tutor focusing on **{chosen_topic}**.\n"
|
| 63 |
+
f"Use the following specification excerpts to answer:\n{spec_content}"
|
| 64 |
)
|
| 65 |
|
| 66 |
messages = [{"role": "system", "content": system_prompt}]
|