Spaces:
Sleeping
Sleeping
Upload app.py
Browse files
app.py
CHANGED
|
@@ -2,11 +2,11 @@ import gradio as gr
|
|
| 2 |
import os
|
| 3 |
from pathlib import Path
|
| 4 |
import fitz # PyMuPDF
|
| 5 |
-
from
|
| 6 |
-
from
|
| 7 |
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
| 8 |
from langchain.chains import RetrievalQA
|
| 9 |
-
from
|
| 10 |
from langchain.prompts import PromptTemplate
|
| 11 |
from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
|
| 12 |
import torch
|
|
@@ -30,7 +30,7 @@ class CurriculumAssistant:
|
|
| 30 |
model = AutoModelForCausalLM.from_pretrained(
|
| 31 |
model_name,
|
| 32 |
torch_dtype=torch.float16,
|
| 33 |
-
device_map=
|
| 34 |
trust_remote_code=True
|
| 35 |
)
|
| 36 |
|
|
|
|
| 2 |
import os
|
| 3 |
from pathlib import Path
|
| 4 |
import fitz # PyMuPDF
|
| 5 |
+
from langchain_community.embeddings import HuggingFaceEmbeddings
|
| 6 |
+
from langchain_community.vectorstores import Chroma
|
| 7 |
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
| 8 |
from langchain.chains import RetrievalQA
|
| 9 |
+
from langchain_community.llms import HuggingFacePipeline
|
| 10 |
from langchain.prompts import PromptTemplate
|
| 11 |
from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
|
| 12 |
import torch
|
|
|
|
| 30 |
model = AutoModelForCausalLM.from_pretrained(
|
| 31 |
model_name,
|
| 32 |
torch_dtype=torch.float16,
|
| 33 |
+
device_map=None, # Use CPU for Hugging Face Spaces
|
| 34 |
trust_remote_code=True
|
| 35 |
)
|
| 36 |
|