Spaces:
Sleeping
Sleeping
| # -*- coding: utf-8 -*- | |
| import gradio as gr | |
| import os | |
| import requests | |
| import json | |
| import logging | |
| # Logging configuration | |
| logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s') | |
| # API configuration | |
| FRIENDLI_TOKEN = os.environ.get("FRIENDLI_TOKEN") or "YOUR_FRIENDLI_TOKEN" | |
| FRIENDLI_API_URL = "https://api.friendli.ai/dedicated/v1/chat/completions" | |
| # μ₯λ₯΄λ³ μμ€ν ν둬ννΈ | |
| GENRE_PROMPTS = { | |
| "ννμ§": """λΉμ μ ννμ§ μμ€ μκ°μ λλ€. λ§λ², λλκ³€, μν, λμ , λ§κ²μ¬ λ± ννμ§ μΈκ³κ΄μ μμλ€μ νμ©νμ¬ λͺ°μ κ° μλ μ΄μΌκΈ°λ₯Ό μμ±νμΈμ. | |
| μ£Όμ μμ: λ§λ² μμ€ν , μ νμ μ‘΄μ¬, ννμ§ μ’ μ‘±, λ§λ² μμ΄ν , μμ μ μ¬μ , μμΈκ³Ό μ΄λͺ """, | |
| "λ‘맨μ€": """λΉμ μ λ‘λ§¨μ€ μμ€ μκ°μ λλ€. κ°μ μ , κ΄κ³μ λ°μ , κ°λ±κ³Ό νν΄λ₯Ό μ€μ¬μΌλ‘ λ μμ κ°μ±μ μκ·Ήνλ μ΄μΌκΈ°λ₯Ό μμ±νμΈμ. | |
| μ£Όμ μμ: 첫 λ§λ¨, κ°μ μ λ³ν, μ€ν΄μ νν΄, μ΄λͺ μ μ¬λ, κ°λμ μΈ κ³ λ°±, ν΄νΌμλ© λλ μ νν μ΄λ³""", | |
| "무ν": """λΉμ μ 무ν μμ€ μκ°μ λλ€. 무곡, κ°νΈ, λ¬Έν, λΉκΈ λ± λμ 무νμ μμλ€μ νμ©νμ¬ λ°μ§κ° λμΉλ μ΄μΌκΈ°λ₯Ό μμ±νμΈμ. | |
| μ£Όμ μμ: 무곡 μλ ¨, λ¬Έν κ° λκ²°, κ°νΈμ μμ, 무림맹주, λ§κ΅μ μ ν, μ μΈ κ³ μ, λΉκΈ μν""", | |
| "λ―Έμ€ν°λ¦¬": """λΉμ μ λ―Έμ€ν°λ¦¬ μμ€ μκ°μ λλ€. μμκ»λΌ, λ¨μ, μΆλ¦¬ κ³Όμ μ ν΅ν΄ λ μκ° ν¨κ» μ¬κ±΄μ ν΄κ²°ν΄λκ°λ κΈ΄μ₯κ° μλ μ΄μΌκΈ°λ₯Ό μμ±νμΈμ. | |
| μ£Όμ μμ: μλ¬Έμ μ¬κ±΄, λ¨μ λ°κ²¬, μ©μμλ€, κ±°μ§ λ¨μ, λ°μ , μ§μ€μ νλ‘, νμ μ μΆλ¦¬""", | |
| "SF": """λΉμ μ SF μμ€ μκ°μ λλ€. λ―Έλ κΈ°μ , μ°μ£Ό, μΈκ³ μλͺ 체, μκ°μ¬ν λ± κ³Όνμ μμλ ₯μ λ°νμΌλ‘ ν μ΄μΌκΈ°λ₯Ό μμ±νμΈμ. | |
| μ£Όμ μμ: λ―Έλ κΈ°μ , μ°μ£Ό νν, μΈκ³ λ¬Έλͺ , μκ° μ¬ν, AIμ λ‘λ΄, λμ€ν νΌμ/μ ν νΌμ, κ³Όνμ λ°κ²¬""", | |
| "μ€λ¦΄λ¬": """λΉμ μ μ€λ¦΄λ¬ μμ€ μκ°μ λλ€. κΈ΄μ₯κ°, μμ€νμ€, μμΈ‘λΆκ°λ₯ν μ κ°λ‘ λ μλ₯Ό μλνλ μ΄μΌκΈ°λ₯Ό μμ±νμΈμ. | |
| μ£Όμ μμ: μμ‘΄μ μκΈ°, μΆκ²©μ , μ¬λ¦¬μ , μμμΉ λͺ»ν λ°μ , μκ°μ ν, κ·Ήμ μΈ λ립, κΈ΄λ°ν μν©""", | |
| "μμ¬": """λΉμ μ μμ¬ μμ€ μκ°μ λλ€. μ€μ μμ¬μ λ°°κ²½μ λ°νμΌλ‘ λΉμμ λ¬Ένμ μΈλ¬Όλ€μ μμνκ² κ·Έλ €λ΄λ μ΄μΌκΈ°λ₯Ό μμ±νμΈμ. | |
| μ£Όμ μμ: μλμ λ°°κ²½, μμ¬μ μ¬κ±΄, μ€μ‘΄ μΈλ¬Ό, λΉμμ λ¬Έν, μμ¬μ κ°λ±, μλμ μ , κ³ μ¦""", | |
| "νΈλ¬": """λΉμ μ νΈλ¬ μμ€ μκ°μ λλ€. 곡ν¬, μ΄μμ°μ νμ, λ―Έμ§μ μ‘΄μ¬λ₯Ό ν΅ν΄ λ μμκ² μ€μΉν μ μ¨μ μ μ¬νλ μ΄μΌκΈ°λ₯Ό μμ±νμΈμ. | |
| μ£Όμ μμ: μ΄μμ°μ νμ, μ μ£Ό, κ·μ , λ―Έμ§μ μ‘΄μ¬, 곡ν¬μ€λ¬μ΄ λΆμκΈ°, μ¬λ¦¬μ 곡ν¬, μμ‘΄μ μν""" | |
| } | |
| def format_text(text: str, max_line_length: int = 80) -> str: | |
| """ν μ€νΈ ν¬λ§·ν ν¨μ""" | |
| lines = [] | |
| current_line = "" | |
| for paragraph in text.split('\n'): | |
| words = paragraph.split() | |
| for word in words: | |
| if len(current_line) + len(word) + 1 <= max_line_length: | |
| current_line += word + " " | |
| else: | |
| lines.append(current_line.strip()) | |
| current_line = word + " " | |
| if current_line: | |
| lines.append(current_line.strip()) | |
| current_line = "" | |
| lines.append("") # λ¬Έλ¨ κ΅¬λΆμ μν λΉ μ€ | |
| return "\n".join(lines) | |
| def respond( | |
| message, | |
| history: list[tuple[str, str]], | |
| genre="ννμ§", | |
| system_message="", | |
| max_tokens=8000, | |
| temperature=0.8, | |
| top_p=0.9, | |
| ): | |
| # μ νλ μ₯λ₯΄μ λ°λ₯Έ μμ€ν ν둬ννΈ μ€μ | |
| genre_prompt = GENRE_PROMPTS.get(genre, GENRE_PROMPTS["ννμ§"]) | |
| system_prefix = f"""λΉμ μ νκ΅μ΄ {genre} μμ€ μ λ¬Έ μκ°μ λλ€. | |
| {genre_prompt} | |
| μμ± μ§μΉ¨: | |
| 1. μμν λ¬μ¬μ λͺ°μ κ° μλ μμ | |
| 2. μ 체μ μΈ μΊλ¦ν° μ€μ κ³Ό λν | |
| 3. κΈ΄μ₯κ° μλ νλ‘― μ κ° | |
| 4. μ₯λ₯΄ νΉμ±μ λ§λ 문체μ λΆμκΈ° | |
| 5. λ μλ₯Ό μ¬λ‘μ‘λ ν₯λ―Έμ§μ§ν μ κ° | |
| κ° μ±ν°λ μκ²°μ±μ κ°μ§λ©΄μλ μ 체 μ΄μΌκΈ°μ μ°μμ±μ μ μ§ν΄μΌ ν©λλ€. | |
| μ΅λ 8000 ν ν°κΉμ§ μ¬μ©νμ¬ νλΆνκ³ μμΈν μ΄μΌκΈ°λ₯Ό μμ±νμΈμ.""" | |
| messages = [{"role": "system", "content": f"{system_prefix} {system_message}"}] | |
| for val in history: | |
| if val[0]: | |
| messages.append({"role": "user", "content": val[0]}) | |
| if val[1]: | |
| messages.append({"role": "assistant", "content": val[1]}) | |
| messages.append({"role": "user", "content": message}) | |
| current_response = "" | |
| new_history = history.copy() | |
| try: | |
| headers = { | |
| "Authorization": f"Bearer {FRIENDLI_TOKEN}", | |
| "Content-Type": "application/json" | |
| } | |
| payload = { | |
| "model": "dep86pjolcjjnv8", | |
| "messages": messages, | |
| "max_tokens": max_tokens, | |
| "temperature": temperature, | |
| "top_p": top_p, | |
| "stream": True, | |
| "stream_options": { | |
| "include_usage": True | |
| } | |
| } | |
| response = requests.post(FRIENDLI_API_URL, json=payload, headers=headers, stream=True) | |
| response.raise_for_status() | |
| for line in response.iter_lines(): | |
| if line: | |
| line = line.decode('utf-8') | |
| if line.startswith('data: '): | |
| line = line[6:] | |
| if line == '[DONE]': | |
| break | |
| try: | |
| chunk = json.loads(line) | |
| if 'choices' in chunk and len(chunk['choices']) > 0: | |
| if 'delta' in chunk['choices'][0] and 'content' in chunk['choices'][0]['delta']: | |
| token = chunk['choices'][0]['delta']['content'] | |
| if token: | |
| current_response += token | |
| formatted_response = format_text(current_response) | |
| new_history = history + [(message, formatted_response)] | |
| yield new_history | |
| except json.JSONDecodeError: | |
| continue | |
| final_response = format_text(current_response) | |
| new_history = history + [(message, final_response)] | |
| yield new_history | |
| except Exception as e: | |
| error_message = f"μ€λ₯ λ°μ: {str(e)}" | |
| logging.error(f"μλ΅ μμ± μ€ν¨: {error_message}") | |
| yield history + [(message, error_message)] | |
| with gr.Blocks(theme="Yntec/HaleyCH_Theme_Orange", css=""" | |
| .message-wrap { | |
| font-size: 16px !important; | |
| line-height: 1.6em !important; | |
| max-width: 90% !important; | |
| margin: 0 auto !important; | |
| } | |
| .message { | |
| padding: 1em !important; | |
| margin-bottom: 0.5em !important; | |
| white-space: pre-wrap !important; | |
| word-wrap: break-word !important; | |
| max-width: 100% !important; | |
| } | |
| .message p { | |
| margin: 0 !important; | |
| padding: 0 !important; | |
| width: 100% !important; | |
| } | |
| .chatbot { | |
| font-family: 'Noto Sans KR', sans-serif !important; | |
| } | |
| """) as interface: | |
| gr.Markdown("# νκ΅μ΄ μμ€ μμ±κΈ°") | |
| gr.Markdown("### μνλ μ₯λ₯΄λ₯Ό μ ννκ³ ν둬ννΈλ₯Ό μ λ ₯νμ¬ μμ€μ μμ±νμΈμ.") | |
| with gr.Row(): | |
| with gr.Column(): | |
| genre_dropdown = gr.Dropdown( | |
| choices=list(GENRE_PROMPTS.keys()), | |
| value="ννμ§", | |
| label="μ₯λ₯΄ μ ν", | |
| info="μνλ μμ€ μ₯λ₯΄λ₯Ό μ ννμΈμ" | |
| ) | |
| chatbot = gr.Chatbot( | |
| value=[], | |
| show_label=True, | |
| label="μμ€ μ§ν", | |
| height=600, | |
| elem_classes="chatbot" | |
| ) | |
| with gr.Row(): | |
| msg = gr.Textbox( | |
| label="ν둬ννΈ μ λ ₯", | |
| placeholder="μ΄μΌκΈ°μ μμμ΄λ μ§ν λ°©ν₯μ μ λ ₯νμΈμ...", | |
| lines=3 | |
| ) | |
| submit_btn = gr.Button("μμ±", variant="primary") | |
| with gr.Accordion("κ³ κΈ μ€μ ", open=False): | |
| system_msg = gr.Textbox( | |
| label="μΆκ° μμ€ν λ©μμ§", | |
| value="λ μλ₯Ό μ¬λ‘μ‘λ ν₯λ―Έμ§μ§ν μ΄μΌκΈ°λ₯Ό μμ±νμΈμ.", | |
| lines=2 | |
| ) | |
| with gr.Row(): | |
| max_tokens = gr.Slider( | |
| minimum=100, | |
| maximum=8000, | |
| value=8000, | |
| step=100, | |
| label="μ΅λ ν ν° μ" | |
| ) | |
| temperature = gr.Slider( | |
| minimum=0, | |
| maximum=1, | |
| value=0.8, | |
| step=0.1, | |
| label="μ°½μμ± μμ€" | |
| ) | |
| top_p = gr.Slider( | |
| minimum=0, | |
| maximum=1, | |
| value=0.9, | |
| step=0.1, | |
| label="μλ΅ λ€μμ±" | |
| ) | |
| examples = gr.Examples( | |
| examples=[ | |
| ["μ΄μΌκΈ°λ₯Ό κ³μ μ§νν΄μ£ΌμΈμ"], | |
| ["μλ‘μ΄ μΊλ¦ν°λ₯Ό λ±μ₯μμΌμ£ΌμΈμ"], | |
| ["κΈ΄μ₯κ° μλ μ ν¬ μ₯λ©΄μ λ§λ€μ΄μ£ΌμΈμ"], | |
| ["μ£ΌμΈκ³΅μ κ³Όκ±°λ₯Ό λ°νμ£ΌμΈμ"], | |
| ["μμμΉ λͺ»ν λ°μ μ λ§λ€μ΄μ£ΌμΈμ"], | |
| ["κ°λμ μΈ μ₯λ©΄μ μ°μΆν΄μ£ΌμΈμ"], | |
| ["μ΄μΌκΈ°μ ν΄λΌμ΄λ§₯μ€λ₯Ό λ§λ€μ΄μ£ΌμΈμ"], | |
| ["μλ‘μ΄ μ₯μλ‘ μ΄λνλ μ₯λ©΄μ λ§λ€μ΄μ£ΌμΈμ"], | |
| ], | |
| inputs=msg, | |
| label="μμ ν둬ννΈ" | |
| ) | |
| submit_btn.click( | |
| fn=respond, | |
| inputs=[msg, chatbot, genre_dropdown, system_msg, max_tokens, temperature, top_p], | |
| outputs=[chatbot] | |
| ) | |
| msg.submit( | |
| fn=respond, | |
| inputs=[msg, chatbot, genre_dropdown, system_msg, max_tokens, temperature, top_p], | |
| outputs=[chatbot] | |
| ) | |
| if __name__ == "__main__": | |
| interface.launch( | |
| server_name="0.0.0.0", | |
| server_port=7860, | |
| share=True | |
| ) |