Spaces:
Build error
Build error
| import gradio as gr | |
| from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline | |
| import torch | |
| # ------------------------------- | |
| # Load TinyLlama Model (LLaMA-based) | |
| # ------------------------------- | |
| model_name = "TinyLlama/TinyLlama-1.1B-Chat-v1.0" | |
| tokenizer = AutoTokenizer.from_pretrained(model_name) | |
| model = AutoModelForCausalLM.from_pretrained( | |
| model_name, | |
| torch_dtype=torch.float32, | |
| device_map="auto" | |
| ) | |
| pipe = pipeline( | |
| "text-generation", | |
| model=model, | |
| tokenizer=tokenizer, | |
| max_new_tokens=256, | |
| temperature=0.7, | |
| do_sample=True | |
| ) | |
| # ------------------------------- | |
| # College Assistant Function | |
| # ------------------------------- | |
| def college_ai(question): | |
| if not question: | |
| return "Please ask a question." | |
| prompt = f""" | |
| You are a helpful college assistant. | |
| Answer clearly for students. | |
| Student Question: {question} | |
| Answer: | |
| """ | |
| result = pipe(prompt)[0]["generated_text"] | |
| # Clean output | |
| answer = result.split("Answer:")[-1].strip() | |
| return answer | |
| # ------------------------------- | |
| # Gradio UI | |
| # ------------------------------- | |
| with gr.Blocks(theme=gr.themes.Soft()) as demo: | |
| gr.Markdown("# 🎓 College AI Assistant (LLaMA Powered)") | |
| gr.Markdown("Ask any academic question!") | |
| user_input = gr.Textbox( | |
| label="Enter Your Question", | |
| placeholder="Example: Explain Machine Learning" | |
| ) | |
| output = gr.Textbox(label="AI Answer") | |
| ask_btn = gr.Button("Ask AI") | |
| ask_btn.click(college_ai, inputs=user_input, outputs=output) | |
| demo.launch() |