Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| from llama_cpp import Llama | |
| from huggingface_hub import hf_hub_download | |
| import time | |
| # --- Configuration --- | |
| MODEL_REPO = "Kezovic/iris-q4gguf-baseline-10k"#iris-f16gguf-test" #iris-q4gguf-lora-test" #iris-q4gguf-baseline-10k" | |
| MODEL_FILE = "llama-3.2-1b-instruct.Q4_K_M.gguf"#llama-3.2-1b-instruct.F16.gguf"#Llama-3.2-1B-Instruct.Q4_K_M.gguf"#llama-3.2-1b-instruct.Q4_K_M.gguf" | |
| CONTEXT_WINDOW = 2048 | |
| MAX_NEW_TOKENS = 400 | |
| TEMPERATURE = 1.5 | |
| # --- Model Loading --- | |
| llm = None | |
| def load_llm(): | |
| global llm | |
| print("Downloading model...") | |
| try: | |
| model_path = hf_hub_download(repo_id=MODEL_REPO, filename=MODEL_FILE) | |
| llm = Llama( | |
| model_path=model_path, | |
| n_ctx=CONTEXT_WINDOW, | |
| n_threads=2, | |
| verbose=False, | |
| min_p = 0.1 | |
| ) | |
| print("Model loaded successfully!") | |
| except Exception as e: | |
| print(f"Error loading model: {e}") | |
| load_llm() | |
| # --- Generation Function --- | |
| def generate_poem(format_type, persona, topic, progress=gr.Progress()): | |
| # 1. VISUAL FEEDBACK: Immediately show the bar | |
| # This now works because the Textbox below has a fixed height! | |
| progress(0, desc="Consulting the Muse...") | |
| time.sleep(0.2) # Force a tiny pause so the eye catches the bar | |
| if not llm: | |
| return "Error: Model not loaded." | |
| if not topic: | |
| return "Please enter a topic!" | |
| # 2. Progress Update | |
| progress(0.2, desc=f"Summoning {persona}...") | |
| time.sleep(0.3) | |
| persona_map = { | |
| "Grumpy Pirate": "You are a grumpy and annoyed pirate captain. Use salty nautical slang, complain about the sea and use 'Arrr' and 'matey'.", | |
| "Philosopher": "You are a philosopher. Use metaphors and reflective musings about the human condition.", | |
| "Ancient Wizard": "You are an ancient wizard. Speak in mystical tones. Use references to magic.", | |
| "Shakespearean Actor": "You are a Shakespearean actor. Use Early Modern English (thee, thou) and dramatic flair.", | |
| "Sarcastic Teenager": "You are a sarcastic teenager. Use dry humor." | |
| } | |
| format_map = { | |
| "Free Verse": "Write in Free Verse about the topic. Focus on vivid imagery and emotion.", | |
| "Ballad": "Write a Ballad about the topic. It should tell a narrative story with a clear beginning, middle, and end. Use four-line stanzas (quatrains) and a simple rhyme scheme like A-B-C-B. Keep the language simple and musical.", | |
| "Ode": "Write an Ode about the topic. Use elevated, expressive language to praise or celebrate the subject. Focus on strong emotion, rich imagery, and admiration.", | |
| "Elegy": "Write an Elegy about the topic. Use a somber, reflective tone to mourn a loss or contemplate death. Focus on sorrow, remembrance, and emotional depth.", | |
| "Hymn": "Write a Hymn about the topic. Use a reverent, uplifting tone. Focus on praise, devotion, or spiritual reflection, with a rhythmic, chant-like flow.", | |
| "Epic": "Write an Epic about the topic. Use grand, dramatic language to tell a heroic story." | |
| } | |
| selected_voice = persona_map.get(persona, "You are a helpful assistant.") | |
| selected_constraint = format_map.get(format_type, "Write a poem.") | |
| full_prompt = ( | |
| f"Write a poem with rhymes.\n" | |
| f"{selected_voice}\n" | |
| f"{selected_constraint}\n" | |
| f"Your response should only contain the poem.\n" | |
| f"The {format_type} should focus on this topic: '{topic}'.\n\n" | |
| ) | |
| # 3. Progress Update | |
| progress(0.4, desc="Drafting Masterpiece...") | |
| output = llm( | |
| prompt=full_prompt, | |
| max_tokens=MAX_NEW_TOKENS, | |
| temperature=TEMPERATURE, | |
| stop=["### Instruction:", "### Human:"], | |
| echo=False | |
| ) | |
| # 4. Progress Update | |
| progress(0.9, desc="Polishing rhymes...") | |
| time.sleep(0.2) | |
| return output['choices'][0]['text'].strip() | |
| # --- UI Layout --- | |
| with gr.Blocks(title="The Poetry Workshop", theme=gr.themes.Soft()) as demo: | |
| gr.Markdown("# 🖋️ The Poetry Workshop") | |
| gr.Markdown("Your own personal muse to help you get started with poetry.") | |
| with gr.Group(): | |
| with gr.Row(equal_height=True): | |
| gr.Markdown("### I want to write a ") | |
| format_dropdown = gr.Dropdown( | |
| choices=["Ballad","Ode","Elegy","Hymn","Epic", "Free Verse"], | |
| value="Ballad", | |
| label="Poem Type", | |
| show_label=False, | |
| container=False, | |
| scale=2 | |
| ) | |
| gr.Markdown("### in the style of ") | |
| persona_dropdown = gr.Dropdown( | |
| choices=["Grumpy Pirate", "Philosopher", "Shakespearean Actor", "Ancient Wizard", "Sarcastic Teenager"], | |
| value="Grumpy Pirate", | |
| label="Persona", | |
| show_label=False, | |
| container=False, | |
| scale=3 | |
| ) | |
| with gr.Row(equal_height=True): | |
| gr.Markdown("### about this topic: ") | |
| topic_input = gr.Textbox( | |
| placeholder="e.g., my broken laptop, the smell of rain, a lost sock", | |
| label="Topic", | |
| show_label=False, | |
| scale=5 | |
| ) | |
| generate_btn = gr.Button("✨ Create Masterpiece", variant="primary", scale=1) | |
| gr.Markdown("---") | |
| # --- CRITICAL CHANGE HERE --- | |
| # Swapped gr.Markdown for gr.Textbox. | |
| # 'lines=10' forces the box to be visible immediately, so the loading bar has a place to live. | |
| output_display = gr.Textbox( | |
| label="Your Poem", | |
| lines=12, # Ensures a big empty box exists on load | |
| interactive=False, # Read-only | |
| placeholder="Your masterpiece will appear here..." | |
| ) | |
| generate_btn.click( | |
| fn=generate_poem, | |
| inputs=[format_dropdown, persona_dropdown, topic_input], | |
| outputs=[output_display] | |
| ) | |
| if __name__ == "__main__": | |
| demo.launch() |