# FinGPT Compliance Agents - Inference Example from transformers import AutoTokenizer, AutoModelForCausalLM from peft import PeftModel import torch # Load the model base_model = AutoModelForCausalLM.from_pretrained("meta-llama/Llama-3.2-1B-Instruct") model = PeftModel.from_pretrained(base_model, "your-username/fingpt-compliance-agents") tokenizer = AutoTokenizer.from_pretrained("your-username/fingpt-compliance-agents") # Example usage def analyze_financial_text(text): prompt = f"Analyze this financial text: {text}" inputs = tokenizer(prompt, return_tensors="pt") with torch.no_grad(): outputs = model.generate(**inputs, max_new_tokens=512, temperature=0.7) response = tokenizer.decode(outputs[0], skip_special_tokens=True) return response # Test the model result = analyze_financial_text("Company X reported strong quarterly earnings with 15% revenue growth.") print(result)