Spaces:
Running
Running
File size: 2,953 Bytes
e87e749 d5fb43e e87e749 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 | import os
import gradio as gr
from huggingface_hub import InferenceClient
# Initialize the InferenceClient
client = InferenceClient(api_key=os.getenv("HF_TOKEN"))
# Define the function for generating the response
def generate_response(question, prompt):
messages = [
{"role": "user", "content": question},
{"role": "system", "content": prompt}
]
# Get the completion from the model
completion = client.chat.completions.create(
model="meta-llama/Meta-Llama-3-8B-Instruct",
messages=messages,
max_tokens=1024
)
# Return the generated response
return completion.choices[0].message['content']
# Design the Gradio interface
def create_ui():
with gr.Blocks() as demo:
gr.HTML("<h1 style='color: #4CAF50; text-align: center;'>Welcome to PromptMate!</h1>")
gr.HTML("<p style='color: #2c3e50; text-align: center;'>Ask a question and get a detailed response with the power of AI!</p>")
# Add a horizontal rule for visual separation
gr.HTML("<hr style='border-top: 2px solid #4CAF50;'>")
gr.HTML("<h2 style='color: #34495e; text-align: center;'>Example Usage:</h2>")
gr.HTML("<p style='color: #2c3e50; text-align: center;'>Question: <em>What is the capital of France?</em><br>Prompt: <em>Provide a detailed answer using geographical and cultural context.</em></p>")
# Add another horizontal rule for visual separation
gr.HTML("<hr style='border-top: 2px solid #4CAF50;'>")
with gr.Row():
question_input = gr.Textbox(
label="Your Question",
placeholder="Type your question here...",
lines=2,
max_lines=4,
elem_id="question_input",
value="What is the capital of France?" # Example for user
)
with gr.Row():
prompt_input = gr.Textbox(
label="Prompt",
placeholder="Provide a prompt to guide the response...",
lines=4,
max_lines=6,
elem_id="prompt_input",
value="Provide a detailed answer using geographical and cultural context." # Example for user
)
with gr.Row():
submit_button = gr.Button("Generate Response", elem_id="submit_button")
output = gr.Textbox(
label="Generated Response",
placeholder="Your AI-generated response will appear here...",
lines=6,
max_lines=8,
interactive=False,
elem_id="output"
)
# Define the button action
submit_button.click(
fn=generate_response,
inputs=[question_input, prompt_input],
outputs=output
)
return demo
# Run the app
if __name__ == "__main__":
app = create_ui()
app.launch()
|