Spaces:
Sleeping
Sleeping
| from transformers import AutoModelForCausalLM, AutoTokenizer | |
| from ai_sentence import load_model | |
| # 生成選擇題 | |
| def generate_mcq(word, model_name): | |
| tokenizer, model = load_model(model_name) | |
| prompt = f"Write a simple multiple-choice English question for beginners using the word '{word}'. Provide 4 options labeled A, B, C, D, and mark the correct answer." | |
| inputs = tokenizer(prompt, return_tensors="pt") | |
| outputs = model.generate( | |
| **inputs, | |
| max_new_tokens=50, | |
| temperature=0.7, | |
| top_p=0.9 | |
| ) | |
| question = tokenizer.decode(outputs[0], skip_special_tokens=True) | |
| return question | |
| # 對答案(未來補) | |
| def check_answer(user_answer, correct_answer): | |
| return user_answer == correct_answer | |
| # 計算分數(未來補) | |
| def calculate_score(total, correct): | |
| return f"{correct}/{total} 分" | |