Spaces:
Runtime error
Runtime error
| import gradio as gr | |
| from huggingface_hub import hf_hub_download | |
| from llama_cpp import Llama | |
| import os | |
| # Model configuration | |
| MODEL_REPO = "tiiuae/Falcon3-1B-Instruct-GGUF" | |
| MODEL_FILE = "falcon3-1b-instruct-q4_k_m.gguf" | |
| # Download and load model | |
| print("Downloading model...") | |
| model_path = hf_hub_download( | |
| repo_id=MODEL_REPO, | |
| filename=MODEL_FILE, | |
| cache_dir="./models" | |
| ) | |
| print("Loading model...") | |
| llm = Llama( | |
| model_path=model_path, | |
| n_ctx=2048, | |
| n_threads=4, | |
| n_gpu_layers=0 # Set to higher value if GPU available | |
| ) | |
| def generate_ppt_script(topic, num_slides=5, temperature=0.7, max_tokens=1500): | |
| """Generate a PowerPoint presentation script for the given topic.""" | |
| prompt = f"""You are a professional presentation writer. Create a detailed PowerPoint presentation script for the topic: "{topic}" | |
| Generate a script with {num_slides} slides. For each slide, provide: | |
| 1. Slide number and title | |
| 2. Key points to include (3-5 bullet points) | |
| 3. Speaker notes/talking points | |
| Format the output clearly with slide numbers and sections. | |
| Presentation Script:""" | |
| # Generate response | |
| output = llm( | |
| prompt, | |
| max_tokens=max_tokens, | |
| temperature=temperature, | |
| top_p=0.9, | |
| repeat_penalty=1.1, | |
| stop=["</s>", "User:", "Human:"], | |
| echo=False | |
| ) | |
| response = output['choices'][0]['text'].strip() | |
| return response | |
| def create_interface(): | |
| """Create the Gradio interface.""" | |
| with gr.Blocks(theme=gr.themes.Soft(), title="PPT Script Generator") as demo: | |
| gr.Markdown( | |
| """ | |
| # 📊 PowerPoint Script Generator | |
| ### Powered by Falcon3-1B-Instruct | |
| Generate professional presentation scripts for any topic using AI. | |
| Simply enter your topic and get a complete slide-by-slide script! | |
| """ | |
| ) | |
| with gr.Row(): | |
| with gr.Column(scale=1): | |
| topic_input = gr.Textbox( | |
| label="Presentation Topic", | |
| placeholder="e.g., 'Introduction to Artificial Intelligence', 'Climate Change Solutions', 'Digital Marketing Strategies'", | |
| lines=2 | |
| ) | |
| with gr.Row(): | |
| num_slides = gr.Slider( | |
| minimum=3, | |
| maximum=10, | |
| value=5, | |
| step=1, | |
| label="Number of Slides" | |
| ) | |
| temperature = gr.Slider( | |
| minimum=0.1, | |
| maximum=1.0, | |
| value=0.7, | |
| step=0.1, | |
| label="Creativity (Temperature)" | |
| ) | |
| max_tokens = gr.Slider( | |
| minimum=500, | |
| maximum=3000, | |
| value=1500, | |
| step=100, | |
| label="Maximum Length (tokens)" | |
| ) | |
| generate_btn = gr.Button("🎯 Generate Script", variant="primary", size="lg") | |
| gr.Markdown( | |
| """ | |
| ### Tips: | |
| - Be specific with your topic for better results | |
| - Use 5-7 slides for standard presentations | |
| - Higher temperature = more creative output | |
| - Adjust max tokens if output is cut off | |
| """ | |
| ) | |
| with gr.Column(scale=2): | |
| output = gr.Textbox( | |
| label="Generated Presentation Script", | |
| lines=25, | |
| show_copy_button=True | |
| ) | |
| # Examples | |
| gr.Examples( | |
| examples=[ | |
| ["Introduction to Machine Learning", 5, 0.7, 1500], | |
| ["The Future of Renewable Energy", 6, 0.8, 1800], | |
| ["Effective Team Management Strategies", 5, 0.7, 1500], | |
| ["Blockchain Technology Explained", 7, 0.7, 2000], | |
| ["Mental Health in the Workplace", 5, 0.6, 1500], | |
| ], | |
| inputs=[topic_input, num_slides, temperature, max_tokens], | |
| label="Example Topics" | |
| ) | |
| # Connect the button to the function | |
| generate_btn.click( | |
| fn=generate_ppt_script, | |
| inputs=[topic_input, num_slides, temperature, max_tokens], | |
| outputs=output | |
| ) | |
| gr.Markdown( | |
| """ | |
| --- | |
| **Note:** This app uses the Falcon3-1B-Instruct model in GGUF format for efficient CPU inference. | |
| Generation may take 30-60 seconds depending on the length requested. | |
| """ | |
| ) | |
| return demo | |
| if __name__ == "__main__": | |
| demo = create_interface() | |
| demo.launch(server_name="0.0.0.0", server_port=7860) | |