aaysush's picture
Update app.py
6f75649 verified
import gradio as gr
from transformers import AutoModelForCausalLM, AutoTokenizer, TextGenerationPipeline
# Load model and tokenizer
model_id = "mistralai/Mistral-7B-Instruct-v0.2"
tokenizer = AutoTokenizer.from_pretrained(model_id)
model = AutoModelForCausalLM.from_pretrained(model_id)
pipe = TextGenerationPipeline(model=model, tokenizer=tokenizer, max_new_tokens=256)
def generate_advice(user_input):
if not user_input.strip():
return "Please enter a valid finance question."
prompt = f"[INST] You are a knowledgeable financial advisor for Indian students. Provide detailed, practical advice for this question:\n\n{user_input} [/INST]"
response = pipe(prompt)[0]['generated_text']
# Remove prompt from response
cleaned = response.replace(prompt, "").strip()
return cleaned
iface = gr.Interface(
fn=generate_advice,
inputs=gr.Textbox(lines=2, placeholder="Ask something like: How to invest as a college student?"),
outputs="text",
title="Smart Finance Advisor",
description="Ask financial questions: budgeting, investing, saving. Targeted for Indian students. Powered by Mistral 7B."
)
iface.launch()