test_llama / app.py
chandan10's picture
Update app.py
ddb9697 verified
import gradio as gr
import pdfplumber
from groq import Groq
import os
def file_to_string(file):
if file.name.endswith(".pdf"):
text = ''
with pdfplumber.open(file.name) as pdf:
for page in pdf.pages:
text += page.extract_text()
return text
elif file.name.endswith(".txt"):
return file.read().decode('utf-8')
else:
return "Unsupported file format."
def generate_summary(file, prompt, model):
if not prompt:
return "Please provide a prompt."
file_text = file_to_string(file) if file else ""
full_prompt = f"{prompt}\n{file_text}" if file_text else prompt
output_text = ''
try:
client = Groq(api_key=os.environ.get("GROQ_API_KEY"))
completion = client.chat.completions.create(
model=model,
messages=[
{
"role": "user",
"content": full_prompt
}
],
temperature=0,
max_tokens=1024,
top_p=0.9,
response_format={"type": "json_object"},
stop=None,
)
output_text = completion.choices[0].message.content
except Exception as e:
output_text = f"An error occurred: {str(e)}"
return output_text
def clear_output():
return None, "", ""
with gr.Blocks() as iface:
gr.Markdown("LLAMA 70B Groq API")
with gr.Row():
file_input = gr.File(label="Upload File (Optional)")
prompt_input = gr.Textbox(label="Prompt", placeholder="Enter your prompt here...", lines=3)
model_dropdown = gr.Dropdown(label="Choose Model", choices=["llama-3.1-8b-instant", "llama-3.1-70b-versatile"], value="llama-3.1-70b-versatile")
with gr.Row():
clear_button = gr.Button("Clear", size="small")
submit_button = gr.Button("Generate Output")
output = gr.Textbox(label="Output", lines=10, placeholder="Output will appear here...")
submit_button.click(
fn=generate_summary,
inputs=[file_input, prompt_input, model_dropdown],
outputs=[output]
)
clear_button.click(
fn=clear_output,
inputs=[],
outputs=[file_input, prompt_input, output]
)
iface.launch()