import gradio as gr from groq import Groq import os api_key = os.getenv("Historia") # Initialize the Groq client with the API key client = Groq(api_key = api_key) # Function to interact with the Llama 3.1 model def teach_history(user_input): completion = client.chat.completions.create( model="llama-3.1-70b-versatile", messages=[ { "role": "system", "content": "You are an experienced historian with wide and deep knowledge in world history. You are a professor of history with 30 years of experience." }, { "role": "user", "content": user_input } ], temperature=0.8, max_tokens=4096, top_p=1, stream=True, stop=None, ) response = "" for chunk in completion: response += chunk.choices[0].delta.content or "" return response # Gradio Blocks interface with gr.Blocks() as demo: #gr.Markdown("# Historia") #gr.Markdown("### Learn History with a Knowledgeable Historian") gr.Markdown("

Historia

") gr.Markdown("

Learn History with a Knowledgeable Historian

") with gr.Row(): with gr.Column(): output = gr.Textbox(label="Response", lines=10) user_input = gr.Textbox(label="Enter your question or topic") submit_button = gr.Button("Submit") submit_button.click(fn=teach_history, inputs=user_input, outputs=output) demo.launch()