dellabee7's picture
Update app.py
09c85b0 verified
raw
history blame
1.87 kB
import gradio as gr
from huggingface_hub import InferenceClient
from PyPDF2 import PdfReader
import os
# PDF ํ…์ŠคํŠธ ๋ฏธ๋ฆฌ ์ฝ์–ด์˜ค๊ธฐ
def extract_pdf_text(pdf_paths):
full_text = ""
for path in pdf_paths:
reader = PdfReader(path)
for page in reader.pages:
text = page.extract_text()
if text:
full_text += text + "\n"
return full_text.strip()
# ๋ ˆํผ๋Ÿฐ์Šค PDF ํ…์ŠคํŠธ ๋ถˆ๋Ÿฌ์˜ค๊ธฐ
pdf_context = extract_pdf_text([
"assets/Programming-Fundamentals-1570222270.pdf",
"assets/1๋ถ„ํŒŒ์ด์ฌ_๊ฐ•์˜์ž๋ฃŒ_์ „์ฒด.pdf"
])
# ๋ฌด๋ฃŒ ์‚ฌ์šฉ ๊ฐ€๋Šฅํ•œ FLAN-T5 ๋ชจ๋ธ ์‚ฌ์šฉ
client = InferenceClient(
model="tiiuae/falcon-rw-1b",
token=os.getenv("HUGGINGFACEHUB_API_TOKEN")
)
def respond(message, history, system_message, max_tokens, temperature, top_p):
# ๋‹จ์ˆœ ํ”„๋กฌํ”„ํŠธ ๊ตฌ์„ฑ (flan-t5๋Š” chat ๊ตฌ์กฐ๊ฐ€ ์•„๋‹˜)
prompt = f"{system_message}\n\n๋ฌธ์„œ ์š”์•ฝ:\n{pdf_context}\n\n์งˆ๋ฌธ: {message}\n๋‹ต๋ณ€:"
result = client.text_generation(
prompt=prompt,
max_new_tokens=max_tokens,
temperature=temperature,
top_p=top_p
)
return result.strip()
demo = gr.ChatInterface(
fn=respond,
additional_inputs=[
gr.Textbox(value="๋‹น์‹ ์€ ํŒŒ์ด์ฌ API ๋ฌธ์„œ์— ๊ธฐ๋ฐ˜ํ•ด ๋‹ต๋ณ€ํ•˜๋Š” ์œ ์šฉํ•œ ์กฐ๊ต์ž…๋‹ˆ๋‹ค.", label="System message"),
gr.Slider(minimum=1, maximum=1024, value=512, step=1, label="Max new tokens"),
gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p"),
],
title="๐Ÿ“˜ ํŒŒ์ด์ฌ API ๋ ˆํผ๋Ÿฐ์Šค ์ฑ—๋ด‡ (FLAN-T5 ๊ธฐ๋ฐ˜)",
description="ํ•œ๊ตญ๊ณต๋Œ€ ์ˆ˜์—…์ž๋ฃŒ ๊ธฐ๋ฐ˜์œผ๋กœ ์งˆ๋ฌธ์— ๋‹ตํ•˜๋Š” ๋ฌด๋ฃŒ ์ฑ—๋ด‡์ž…๋‹ˆ๋‹ค."
)
if __name__ == "__main__":
demo.launch()