Spaces:
Sleeping
Sleeping
File size: 1,799 Bytes
8a0cd8f 2e6d447 8a0cd8f 2e6d447 8a0cd8f 2e6d447 8a0cd8f 2e6d447 8a0cd8f 2e6d447 8a0cd8f 2e6d447 8a0cd8f | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 | import gradio as gr
import requests
import json
import os
# Set your API key as an environment variable
# Important: In the Hugging Face Space, set this as a secret
HF_API_KEY = os.environ.get("HF_API_KEY", "")
def analyze_shampoo(prompt):
"""
Send the prompt to the Hugging Face Inference API instead of loading the model locally
"""
API_URL = "https://api-inference.huggingface.co/models/microsoft/phi-3-mini-4k-instruct"
headers = {
"Authorization": f"Bearer {HF_API_KEY}",
"Content-Type": "application/json"
}
payload = {
"inputs": prompt,
"parameters": {
"max_new_tokens": 2048,
"temperature": 0.7,
"top_p": 0.95,
"return_full_text": False
}
}
try:
if not HF_API_KEY:
return "Error: No API key provided. Please add the HF_API_KEY secret to your Space."
response = requests.post(API_URL, headers=headers, json=payload)
response.raise_for_status() # Raise an exception for HTTP errors
result = response.json()
if isinstance(result, list) and len(result) > 0:
# Extract the generated text
return result[0].get('generated_text', 'No response generated')
else:
return f"Unexpected response format: {result}"
except Exception as e:
return f"Error: {str(e)}"
# Create the Gradio interface
interface = gr.Interface(
fn=analyze_shampoo,
inputs=gr.Textbox(lines=10, placeholder="Enter your analysis prompt here..."),
outputs=gr.Textbox(lines=20),
title="Phi-3 Shampoo Analyzer",
description="Analyze shampoo ingredients based on user profiles using the Hugging Face Inference API"
)
interface.launch() |