Spaces:
Running
Running
| import re | |
| import joblib | |
| import gradio as gr | |
| from sentence_transformers import SentenceTransformer | |
| enc = SentenceTransformer("all-MiniLM-L6-v2") | |
| clf = joblib.load("model.pkl") | |
| meta = joblib.load("meta.pkl") | |
| threshold = meta["threshold"] | |
| EXAMPLES = [ | |
| ["What is 2 + 2?"], | |
| ["Write a Python function to reverse a linked list."], | |
| ["Explain how transformers handle long-range dependencies in NLP."], | |
| ["What microscopic mechanisms reconcile correlated insulating phases with unconventional superconductivity in magic-angle twisted bilayer graphene?"], | |
| ] | |
| def predict(q): | |
| if not q.strip(): | |
| return "β", "β" | |
| emb = enc.encode([q]) | |
| prob = clf.predict_proba(emb)[0][1] # prob of "complex" | |
| label = "π΄ Complex" if prob > 0.5 else "π’ Simple" | |
| confidence = f"{max(prob, 1-prob):.1%}" | |
| return label, confidence | |
| with gr.Blocks(title="Question Complexity Classifier") as demo: | |
| gr.Markdown( | |
| """ | |
| # π§ Question Complexity Classifier | |
| Predicts whether a question requires **long chain-of-thought reasoning** or not. | |
| Trained on 20k samples from [KIMI-K2.5-700000x](https://huggingface.co/datasets/ianncity/KIMI-K2.5-700000x) reasoning traces. | |
| Complexity proxy: CoT length > {:.0f} chars = Complex. | |
| """.format(threshold) | |
| ) | |
| with gr.Row(): | |
| inp = gr.Textbox(label="Question", lines=4, placeholder="Enter any question...") | |
| with gr.Column(): | |
| out_label = gr.Textbox(label="Complexity") | |
| out_conf = gr.Textbox(label="Confidence") | |
| btn = gr.Button("Predict", variant="primary") | |
| btn.click(predict, inputs=inp, outputs=[out_label, out_conf]) | |
| inp.submit(predict, inputs=inp, outputs=[out_label, out_conf]) | |
| gr.Examples(examples=EXAMPLES, inputs=inp, label="Try these") | |
| demo.launch() | |