import gradio as gr from transformers import pipeline # Initialize the text generation pipeline with a suitable model model_name = "meta-llama/Llama-3.2-1B" # You can replace this with any legal-specific model if needed pipe = pipeline("text-generation", model=model_name) # Function to generate legal responses def legal_assistant(query): # Format the input with context for legal advice input_text = f"Legal Assistant: {query}\nProvide me with a legal perspective or explanation." result = pipe(input_text, max_length=200, num_return_sequences=1) return result[0]["generated_text"] # Create the Gradio interface iface = gr.Interface( fn=legal_assistant, inputs=gr.Textbox(label="Ask the AI Advocate a Legal Question"), outputs=gr.Textbox(label="Legal Response"), title="Code Legalist - Your AI Legal Advocate", description="Ask any legal-related questions, and get answers from the AI-powered advocate assistant.", ) # Launch the interface iface.launch(share=True)