File size: 1,833 Bytes
ce7ae74
 
92c1761
ce7ae74
 
92c1761
ce7ae74
 
9085743
92c1761
 
ce7ae74
 
 
 
 
 
 
 
 
 
 
 
 
 
92c1761
 
ce7ae74
 
 
 
 
 
 
 
 
92c1761
ce7ae74
 
 
 
92c1761
 
ce7ae74
 
 
 
 
92c1761
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
import os, json, gradio as gr
from huggingface_hub import InferenceClient

MODEL_ID = "tiiuae/falcon-7b-instruct"  # keep the big model
client = InferenceClient(model=MODEL_ID, token=os.getenv("HF_TOKEN"))

SYSTEM_PROMPT = """You are an evaluator of undergraduate research problem statements.
Return only a JSON object with the following keys:
G1,G2,G3,G4,G5 (Yes/No) and S1–S5 (0-3) and comments. df
"""

def assess(statement):
    prompt = f"{SYSTEM_PROMPT}\nProblem statement:\n\"\"\"{statement}\"\"\"\nChecklist:"
    response = client.text_generation(
        prompt,
        max_new_tokens=256,
        temperature=0.1,
        top_p=0.9,
        stream=False,
    )

    # Get first JSON block
    start = response.find("{")
    end   = response.rfind("}") + 1
    json_str = response[start:end] if start != -1 else "{}"

    try:
        checklist = json.loads(json_str)
        gate_fail = any(checklist.get(f"G{i}") == "No" for i in range(1,6))
        score     = sum(int(checklist.get(f"S{i}",0)) for i in range(1,6))
        if gate_fail:
            verdict = "❌ Mandatory gatekeeper criterion failed."
        elif score >= 11:
            verdict = "✅ Meets requirements (Exceptional/Proficient)."
        elif score >= 8:
            verdict = "⚠️ Adequate but needs refinement."
        else:
            verdict = "❌ Needs major improvement."
    except Exception as e:
        verdict = f"⚠️ JSON parse error: {e}"
    return json_str, verdict

iface = gr.Interface(
    fn=assess,
    inputs=gr.Textbox(lines=8, label="Paste REU Problem Statement"),
    outputs=[gr.Code(label="Checklist JSON"), gr.Textbox(label="Verdict")],
    title="🧠 REU Problem Statement Evaluator",
    description="Uses the Hugging Face Inference API so no large model is loaded in the Space itself."
)
iface.launch()