Spaces:
Sleeping
Sleeping
File size: 3,532 Bytes
573a91c b1d7b45 bc4b2cc 519b85c ce2712b bc4b2cc 519b85c bc4b2cc 573a91c 8651b5b bc4b2cc b1de38a bc4b2cc 72a3c35 8651b5b 3c2b4d9 8651b5b 3c2b4d9 8651b5b bc4b2cc 54cc619 8651b5b bc4b2cc 8651b5b bc4b2cc 8651b5b bc4b2cc 8651b5b bc4b2cc 8651b5b bc4b2cc 8651b5b 3c2b4d9 bc4b2cc 8651b5b bc4b2cc 8651b5b bc4b2cc 8651b5b b3bf22a 8651b5b | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 | import os
import re
import gradio as gr
from langchain.prompts import PromptTemplate
from langchain_openai import ChatOpenAI
from langchain_chroma import Chroma
from langchain_community.embeddings import HuggingFaceEmbeddings
# Load embedding model and vector store from persisted DB
embedding_model = HuggingFaceEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2")
vector_store = Chroma(
embedding_function=embedding_model,
persist_directory="geometry_db", # relative folder inside your Hugging Face Space
collection_name="geometry_sol"
)
# Load OpenAI key (set this in Hugging Face Space Secrets)
os.environ["OPENAI_API_KEY"] = os.getenv("OPENAI_API_KEY")
# Load the LLM (GPT-3.5)
llm = ChatOpenAI(model_name="gpt-3.5-turbo", temperature=0.3)
# Unified prompt to auto-detect intent
template = PromptTemplate(
input_variables=["context", "query"],
template="""
You are a Virginia high school Geometry assistant. Based on the user question below, determine the correct response type and answer accordingly:
User Question:
{query}
Based on the following SOL text:
{context}
Response Rules:
- If the question is asking for an **SOL number**, respond with:
1. The exact SOL code (e.g., G.RLT.1)
2. The exact description line from the SOL guide
⚠️ Do not summarize. Only copy directly from the context.
- If the user asks for a **lesson plan**, provide:
- Simple explanation of the concept
- Real-world example
- Engaging class activity
Format the output clearly with bullet points.
- If the user asks for a **worksheet**, include:
- Concept summary
- A worked example
- 3 practice problems
Format the output clearly with bullet points.
- If the user asks for **proofs**, include:
- Student-friendly explanation
- Real-world connection
- One short class activity
Format the output clearly with bullet points.
- If the user asks for **flashcards**, generate 5 cards, each with:
- A clear question
- A short answer
Format the output clearly with bullet points.
Only answer one way depending on the intent of the question.
"""
)
# Optional: shortcut to solve simple math problems (like area of rectangle)
def try_math_solver(query):
match = re.search(r"rectangle.*l\s*=\s*(\d+).+w\s*=\s*(\d+)", query.lower())
if match:
l, w = int(match.group(1)), int(match.group(2))
return f"The area of the rectangle is {l} × {w} = {l * w} square units."
return None
# RAG function using unified intent-aware prompt
def rag_query(query):
docs = vector_store.similarity_search(query, k=2)
context = "\n\n".join([doc.page_content for doc in docs])
prompt = template.format_prompt(context=context, query=query).to_string()
return llm.invoke(prompt).content
# Gradio app function
def ask_geometry_sol(query):
math_result = try_math_solver(query)
if math_result:
return math_result
try:
return rag_query(query)
except Exception as e:
return f"⚠️ Error: {type(e).__name__} - {str(e)}"
# Gradio UI (no need for manual response type selection anymore!)
iface = gr.Interface(
fn=ask_geometry_sol,
inputs=gr.Textbox(label="Enter your Geometry SOL question or topic"),
outputs="text",
title="📘 Virginia Geometry SOL Assistant",
description="Ask about any 2023 Geometry SOL (Standards of Learning). The assistant will auto-detect if you want a lesson plan, worksheet, proof, flashcards, or SOL reference."
)
if __name__ == "__main__":
iface.launch() |