import gradio as gr import torch from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline from peft import PeftModel import re import json from pathlib import Path # ==================== CONFIGURATION ==================== # Base models BASE_MODELS = { "PHI-2 (2.7B)": "microsoft/phi-2", "SmolLM2 (135M)": "HuggingFaceTB/SmolLM2-135M", } # Adapter configurations - update with your HuggingFace username # Format: "username/repo-name" or local path ADAPTERS = { "PHI-2 (2.7B)": { "No Fine-tuning (Base Model)": None, "Baseline Fine-tuned": "CrystalRaindropsFall/phi2-gsm8k-baseline", "Curriculum: Answer Length": "CrystalRaindropsFall/phi2-gsm8k-curriculum-answer-length", "Curriculum: Complexity Score": "CrystalRaindropsFall/phi2-gsm8k-curriculum-complexity", }, "SmolLM2 (135M)": { "No Fine-tuning (Base Model)": None, "Baseline Fine-tuned": "CrystalRaindropsFall/smolLM2-gsm8k-baseline", "Curriculum: Answer Length": "CrystalRaindropsFall/smolLM2-gsm8k-curriculum-answer-length", "Curriculum: Complexity Score": "CrystalRaindropsFall/smolLM2-gsm8k-curriculum-complexity", }, } # Sample math problems SAMPLE_PROBLEMS = [ "Janet's ducks lay 16 eggs per day. She eats three for breakfast every morning and bakes muffins for her friends every day with four. She sells the remainder at the farmers' market daily for $2 per fresh duck egg. How much in dollars does she make every day at the farmers' market?", "A robe takes 2 bolts of blue fiber and half that much white fiber. How many bolts in total does it take?", "Josh decides to try flipping a house. He buys a house for $80,000 and then puts in $50,000 in repairs. This increased the value of the house by 150%. How much profit did he make?", "James decides to run 3 sprints 3 times a week. He runs 60 meters each sprint. How many total meters does he run a week?", "A store sells pencils for $0.50 each and notebooks for $3.00 each. If Sarah buys 6 pencils and 4 notebooks, how much does she spend in total?", "Mike has 45 apples. He gives 1/3 of them to his friend and then buys 12 more apples. How many apples does Mike have now?", "A train travels 120 miles in 2 hours. At the same speed, how far will it travel in 5 hours?", ] # ==================== MODEL LOADING ==================== class ModelCache: """Cache loaded models to avoid reloading""" def __init__(self): self.current_base = None self.current_adapter = None self.model = None self.tokenizer = None self.pipe = None def load_model(self, base_model_name, adapter_path=None): """Load model with optional adapter""" cache_key = f"{base_model_name}_{adapter_path}" current_key = f"{self.current_base}_{self.current_adapter}" # Return cached if same if cache_key == current_key and self.pipe is not None: return self.pipe # Clear old model if self.model is not None: del self.model del self.tokenizer del self.pipe torch.cuda.empty_cache() print(f"Loading {base_model_name}...") # Load tokenizer tokenizer = AutoTokenizer.from_pretrained(base_model_name) if tokenizer.pad_token is None: tokenizer.pad_token = tokenizer.eos_token tokenizer.pad_token_id = tokenizer.eos_token_id tokenizer.padding_side = "left" # Load base model model = AutoModelForCausalLM.from_pretrained( base_model_name, device_map="auto", torch_dtype=torch.float16, ) # Load adapter if specified if adapter_path: print(f"Loading adapter from {adapter_path}...") if Path(adapter_path).exists(): # Local path model = PeftModel.from_pretrained(model, adapter_path) else: # HuggingFace path try: model = PeftModel.from_pretrained(model, adapter_path) except Exception as e: print(f"Warning: Could not load adapter from {adapter_path}: {e}") print("Using base model only") # Create pipeline pipe = pipeline( "text-generation", model=model, tokenizer=tokenizer, max_new_tokens=512, do_sample=False, # Deterministic for math pad_token_id=tokenizer.pad_token_id, ) # Cache self.current_base = base_model_name self.current_adapter = adapter_path self.model = model self.tokenizer = tokenizer self.pipe = pipe return pipe # Global cache model_cache = ModelCache() # ==================== HELPER FUNCTIONS ==================== def extract_answer(text): """Extract the final numerical answer from generated text""" # Look for #### format (GSM8K style) match = re.search(r"####\s*(-?\d+\.?\d*)", text) if match: return match.group(1).rstrip(".") # Fallback: find last number numbers = re.findall(r"-?\d+\.?\d*", text) if numbers: return numbers[-1].rstrip(".") return "No answer found" def format_solution(generated_text, question): """Format the solution for display""" # Remove the question from the output (model echoes it) solution = generated_text.replace(f"Question: {question}\nAnswer:", "").strip() # Extract answer final_answer = extract_answer(generated_text) return solution, final_answer # ==================== GRADIO INTERFACE ==================== def solve_math_problem(base_model, adapter_choice, question, max_tokens, temperature): """Main function to solve math problems""" try: # Get model path base_model_path = BASE_MODELS[base_model] adapter_path = ADAPTERS[base_model].get(adapter_choice) # Load model pipe = model_cache.load_model(base_model_path, adapter_path) # Format prompt prompt = f"Question: {question}\nAnswer:" # Generate outputs = pipe( prompt, max_new_tokens=max_tokens, do_sample=temperature > 0, temperature=temperature if temperature > 0 else None, ) generated_text = outputs[0]["generated_text"] # Format output solution, final_answer = format_solution(generated_text, question) # Create formatted output output = f"""### Solution Steps: {solution} ### Final Answer: **{final_answer}** """ return output except Exception as e: return f"โ Error: {str(e)}\n\nPlease check that the model and adapter are correctly loaded." def update_adapter_choices(base_model): """Update adapter dropdown based on selected base model""" adapters = list(ADAPTERS[base_model].keys()) return gr.Dropdown(choices=adapters, value=adapters[0]) def load_sample_problem(sample_idx): """Load a sample problem""" if sample_idx is None or sample_idx >= len(SAMPLE_PROBLEMS): return SAMPLE_PROBLEMS[0] return SAMPLE_PROBLEMS[sample_idx] # ==================== BUILD INTERFACE ==================== def create_demo(): """Create the Gradio interface""" with gr.Blocks( theme=gr.themes.Soft(), title="Curriculum Design Matters: Math Reasoning Demo" ) as demo: gr.Markdown( """ # ๐ Curriculum Design Matters: Training LLMs for Math Reasoning