dellabee7's picture
Update app.py
82f7e90 verified
raw
history blame
2.21 kB
import gradio as gr
from huggingface_hub import InferenceClient
from PyPDF2 import PdfReader
# PDF ํ…์ŠคํŠธ ๋ฏธ๋ฆฌ ์ฝ์–ด์˜ค๊ธฐ
def extract_pdf_text(pdf_paths):
full_text = ""
for path in pdf_paths:
reader = PdfReader(path)
for page in reader.pages:
text = page.extract_text()
if text:
full_text += text + "\n"
return full_text.strip()
# ์‚ฌ์ „ ์ •์˜๋œ ๋ ˆํผ๋Ÿฐ์Šค ๋ฌธ์„œ๋“ค
pdf_context = extract_pdf_text([
"assets/Programming-Fundamentals-1570222270.pdf",
"assets/1๋ถ„ํŒŒ์ด์ฌ_๊ฐ•์˜์ž๋ฃŒ_์ „์ฒด.pdf"
])
client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
def respond(message, history, system_message, max_tokens, temperature, top_p):
# ์‚ฌ์šฉ์ž ์ž…๋ ฅ + ๋ ˆํผ๋Ÿฐ์Šค ๋ฌธ์„œ๋ฅผ ๊ฒฐํ•ฉ
messages = [
{"role": "system", "content": system_message},
{"role": "user", "content": f"์•„๋ž˜๋Š” ํŒŒ์ด์ฌ ํ”„๋กœ๊ทธ๋ž˜๋ฐ API ๋ ˆํผ๋Ÿฐ์Šค์ž…๋‹ˆ๋‹ค:\n{pdf_context}\n\n์งˆ๋ฌธ: {message}"}
]
for user_msg, bot_msg in history:
if user_msg:
messages.append({"role": "user", "content": user_msg})
if bot_msg:
messages.append({"role": "assistant", "content": bot_msg})
response = ""
for chunk in client.chat_completion(
messages,
max_tokens=max_tokens,
stream=True,
temperature=temperature,
top_p=top_p,
):
delta = chunk.choices[0].delta.content
if delta:
response += delta
yield response
demo = gr.ChatInterface(
fn=respond,
additional_inputs=[
gr.Textbox(value="You are a friendly chatbot that answers questions based on the given document.", label="System message"),
gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (nucleus sampling)"),
],
title="๐Ÿ“˜ ํŒŒ์ด์ฌ API ๋ ˆํผ๋Ÿฐ์Šค ์ฑ—๋ด‡",
description="ํ•œ๊ตญ๊ณต๋Œ€ ์ˆ˜์—…์ž๋ฃŒ ๊ธฐ๋ฐ˜ ์ฑ—๋ด‡์ž…๋‹ˆ๋‹ค. ์งˆ๋ฌธ์„ ์ž…๋ ฅํ•ด ๋ณด์„ธ์š”!"
)
if __name__ == "__main__":
demo.launch()