Spaces:
Sleeping
Sleeping
| from transformers import pipeline | |
| import gradio as gr | |
| # KoAlpaca λͺ¨λΈ λ‘λ (μλμ νμ§μ λ°Έλ°μ€) | |
| generator = pipeline( | |
| "text-generation", | |
| model="beomi/KoAlpaca-Polyglot-1.1B", | |
| tokenizer="beomi/KoAlpaca-Polyglot-1.1B", | |
| device_map="auto" | |
| ) | |
| def answer_question(prompt): | |
| system_prompt = ( | |
| "λλ νκ΅ λν μ μ μ 보λ₯Ό μλ €μ£Όλ AIμΌ. " | |
| "μλ₯, νμλΆμ’ ν©, λ Όμ , μ μ λ± μ νμ μ΄ν΄νκΈ° μ½κ² μ€λͺ ν΄μ€.\n\n" | |
| ) | |
| response = generator( | |
| system_prompt + prompt, | |
| max_new_tokens=250, | |
| temperature=0.7, | |
| top_p=0.9, | |
| do_sample=True | |
| ) | |
| return response[0]["generated_text"].replace(system_prompt, "").strip() | |
| app = gr.Interface( | |
| fn=answer_question, | |
| inputs=gr.Textbox( | |
| lines=2, | |
| label="μ μ μ§λ¬Έ μ λ ₯", | |
| placeholder="μ: κ°μ²λ λ Όμ μ ν / 2025 μλ₯ μΌμ / νκ΅κ³΅νλ μ νμμ½" | |
| ), | |
| outputs=gr.Textbox(label="AI λ΅λ³"), | |
| title="μ μ μ λ¬Έ AI (KoAlpaca 1.1B)", | |
| description="λΉ λ₯΄κ³ λλν μ μμ 보 μ±λ΄μ λλ€. μ€μ μ νμ 보μ κ·Όκ±°ν μ€λͺ μ μ 곡ν©λλ€." | |
| ) | |
| if __name__ == "__main__": | |
| app.launch() | |