Spaces:
Build error
Build error
Commit ยท
4f913b7
1
Parent(s): e24629e
Update app.py
Browse files
app.py
CHANGED
|
@@ -18,6 +18,15 @@ from langchain.document_loaders import PyPDFLoader, TextLoader, JSONLoader, CSVL
|
|
| 18 |
import tempfile # ์์ ํ์ผ์ ์์ฑํ๊ธฐ ์ํ ๋ผ์ด๋ธ๋ฌ๋ฆฌ์
๋๋ค.
|
| 19 |
import os
|
| 20 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 21 |
|
| 22 |
# PDF ๋ฌธ์๋ก๋ถํฐ ํ
์คํธ๋ฅผ ์ถ์ถํ๋ ํจ์์
๋๋ค.
|
| 23 |
def get_pdf_text(pdf_docs):
|
|
@@ -65,14 +74,6 @@ def get_vectorstore(text_chunks):
|
|
| 65 |
return vectorstore # ์์ฑ๋ ๋ฒกํฐ ์คํ ์ด๋ฅผ ๋ฐํํฉ๋๋ค.
|
| 66 |
|
| 67 |
def get_conversation_chain(vectorstore):
|
| 68 |
-
# Replace 'microsoft/DialoGPT-large' with the desired model name
|
| 69 |
-
model_name = "Shaleen123/mistrallite_medical_qa"
|
| 70 |
-
|
| 71 |
-
config = PeftConfig.from_pretrained(model_name)
|
| 72 |
-
model = AutoModelForCausalLM.from_pretrained(model_name)
|
| 73 |
-
model = PeftModel.from_pretrained(model, model_name)
|
| 74 |
-
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
| 75 |
-
|
| 76 |
|
| 77 |
# ๋ํ ๊ธฐ๋ก์ ์ ์ฅํ๊ธฐ ์ํ ๋ฉ๋ชจ๋ฆฌ๋ฅผ ์์ฑํฉ๋๋ค.
|
| 78 |
memory = ConversationBufferMemory(
|
|
|
|
| 18 |
import tempfile # ์์ ํ์ผ์ ์์ฑํ๊ธฐ ์ํ ๋ผ์ด๋ธ๋ฌ๋ฆฌ์
๋๋ค.
|
| 19 |
import os
|
| 20 |
|
| 21 |
+
with st.spinner("Loading the model"):
|
| 22 |
+
model_name = "Shaleen123/mistrallite_medical_qa"
|
| 23 |
+
|
| 24 |
+
config = PeftConfig.from_pretrained(model_name)
|
| 25 |
+
model = AutoModelForCausalLM.from_pretrained(model_name)
|
| 26 |
+
model = PeftModel.from_pretrained(model, model_name)
|
| 27 |
+
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
| 28 |
+
|
| 29 |
+
|
| 30 |
|
| 31 |
# PDF ๋ฌธ์๋ก๋ถํฐ ํ
์คํธ๋ฅผ ์ถ์ถํ๋ ํจ์์
๋๋ค.
|
| 32 |
def get_pdf_text(pdf_docs):
|
|
|
|
| 74 |
return vectorstore # ์์ฑ๋ ๋ฒกํฐ ์คํ ์ด๋ฅผ ๋ฐํํฉ๋๋ค.
|
| 75 |
|
| 76 |
def get_conversation_chain(vectorstore):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 77 |
|
| 78 |
# ๋ํ ๊ธฐ๋ก์ ์ ์ฅํ๊ธฐ ์ํ ๋ฉ๋ชจ๋ฆฌ๋ฅผ ์์ฑํฉ๋๋ค.
|
| 79 |
memory = ConversationBufferMemory(
|