|
|
from groq import Groq |
|
|
import gradio as gr |
|
|
|
|
|
|
|
|
GROQ_API_KEY = "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" |
|
|
|
|
|
|
|
|
client = Groq(api_key=GROQ_API_KEY) |
|
|
|
|
|
|
|
|
def get_traffic_recommendation(input_situation): |
|
|
try: |
|
|
chat_completion = client.chat.completions.create( |
|
|
messages=[ |
|
|
{ |
|
|
"role": "user", |
|
|
"content": f"Suggest a solution to optimize traffic flow in the following situation:\n\n{input_situation}" |
|
|
} |
|
|
], |
|
|
model="llama-3.3-70b-versatile" |
|
|
) |
|
|
return chat_completion.choices[0].message.content |
|
|
except Exception as e: |
|
|
return f"Error: {e}" |
|
|
|
|
|
|
|
|
demo = gr.Interface( |
|
|
fn=get_traffic_recommendation, |
|
|
inputs=gr.Textbox(lines=5, placeholder="Describe the traffic scenario..."), |
|
|
outputs="text", |
|
|
title="Traffic Flow Optimizer", |
|
|
description="Enter a traffic scenario to get suggestions for improving traffic flow using the LLaMA-3 model powered by Groq." |
|
|
) |
|
|
|
|
|
if __name__ == "__main__": |
|
|
demo.launch() |
|
|
|