File size: 2,146 Bytes
5a005a2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
import gradio as gr
import torch
from transformers import AutoModelForCausalLM, AutoTokenizer
from langchain.prompts import PromptTemplate
from langchain.text_splitter import RecursiveCharacterTextSplitter
from langchain.document_loaders import PyPDFLoader, TextLoader, Docx2txtLoader
from langchain.chains.summarize import load_summarize_chain
from langchain.llms import HuggingFacePipeline
import fitz  # PyMuPDF for PDF
from docx import Document

# Load Phi-2 model and tokenizer
device = "cpu"
model_name = "microsoft/phi-2"
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForCausalLM.from_pretrained(model_name, torch_dtype=torch.float16)
model.to(device)

def load_document(file_path):
    """Loads document and extracts text using appropriate loader."""
    if file_path.endswith(".pdf"):
        loader = PyPDFLoader(file_path)
    elif file_path.endswith(".docx"):
        loader = Docx2txtLoader(file_path)
    elif file_path.endswith(".txt"):
        loader = TextLoader(file_path)
    else:
        return "Unsupported file format."
    
    docs = loader.load()
    return docs

def summarize_document(file):
    """Summarizes the extracted text using Phi-2 and MapReduce."""
    docs = load_document(file.name)
    if not docs:
        return "No text found in document."
    
    # Split text into chunks
    text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=100)
    split_docs = text_splitter.split_documents(docs)
    
    # Load summarization chain with MapReduce
    llm = HuggingFacePipeline(pipeline=model)
    summarize_chain = load_summarize_chain(llm, chain_type="map_reduce")
    summary = summarize_chain.run(split_docs)
    
    return summary

# Gradio Interface
demo = gr.Interface(
    fn=summarize_document,
    inputs=gr.File(label="Upload Document (PDF, DOCX, TXT)"),
    outputs=gr.Textbox(label="Summarized Text"),
    title="AI Document Summarizer with Phi-2",
    description="Upload a document, and the AI will generate a summary using MapReduce."
)

if __name__ == "__main__":
    demo.launch()