| | import os |
| | import streamlit as st |
| | from groq import Groq |
| | from dotenv import load_dotenv |
| |
|
| | |
| | load_dotenv() |
| | GROQ_API_KEY = os.getenv("GROQ_API_KEY") |
| |
|
| | |
| | client = Groq(api_key=GROQ_API_KEY) |
| |
|
| | |
| | st.title("AI Code Generator & Debugger") |
| | st.write("Generate and debug code in multiple programming languages.") |
| |
|
| | language = st.selectbox("Select Programming Language", ["Python", "Java", "JavaScript", "PHP", "C++", "C#"]) |
| | mode = st.radio("Select Mode", ["Generate Code", "Debug Code"]) |
| | user_input = st.text_area("Enter your prompt or code snippet") |
| | submit = st.button("Submit") |
| |
|
| | def get_groq_response(prompt): |
| | completion = client.chat.completions.create( |
| | model="deepseek-r1-distill-llama-70b", |
| | messages=[{"role": "system", "content": "You are an expert AI code assistant."}, |
| | {"role": "user", "content": prompt}], |
| | temperature=0.6, |
| | max_completion_tokens=4096, |
| | top_p=0.95, |
| | stream=True, |
| | stop=None, |
| | ) |
| | return "".join(chunk.choices[0].delta.content or "" for chunk in completion) |
| |
|
| | if submit: |
| | if user_input.strip(): |
| | if any(lang.lower() in user_input.lower() for lang in ["python", "java", "javascript", "php", "c++", "c#"]): |
| | prompt = f"{mode} {language} code: {user_input}" |
| | response = get_groq_response(prompt) |
| | st.code(response, language=language.lower()) |
| | else: |
| | st.error("Invalid question. Please try again with a relevant coding prompt.") |
| | else: |
| | st.warning("Please enter a prompt or code snippet.") |
| |
|