import os import gradio as gr from huggingface_hub import InferenceClient # Initialize the hugging face inference api client client = InferenceClient( api_key=os.environ.get("HF_API_TOKEN") ) # Define the function to generate stories or poems def generate_text(category, theme, tone, length): prompt = f"Write a {length} {tone} {category} about {theme}." try: completion = client.chat.completions.create( model="mistralai/Mistral-7B-Instruct-v0.1", messages=[{"role":"user", "content":prompt}], ) return completion.choices[0].message.content except Exception as e: return f"Error: {e}" # Create Gradio UI with gr.Blocks() as demo: gr.Markdown("## 📝 Creative Writing Generator") category = gr.Dropdown(["Story", "Poem"], label="Category") theme = gr.Textbox(label="Theme", placeholder="e.g. friendship, time travel, lost love") tone = gr.Dropdown(["Happy", "Sad", "Funny", "Dark", "Inspiring"], label="Tone") length = gr.Dropdown(["Short", "Medium", "Long"], label="Length") generate_button = gr.Button("Generate") output = gr.Textbox(label="Generated Content", lines=10) generate_button.click(fn=generate_text, inputs=[category, theme, tone, length], outputs=output) demo.launch()