jefalod commited on
Commit
447da7b
Β·
verified Β·
1 Parent(s): cf1f1c5

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +40 -21
app.py CHANGED
@@ -1,32 +1,51 @@
1
  import os
2
  import gradio as gr
3
- from llama_index.core import VectorStoreIndex
4
  from llama_index.readers.file import PDFReader
 
 
 
 
 
 
 
 
 
 
5
 
6
- # Set your OpenAI API key (use HF secrets in prod!)
7
- openai,api.key = os.environ.get("OPENAI_API_KEY")
8
- def process_resume(file, question):
9
- if file is None or question.strip() == "":
10
- return "Please upload a resume and enter a question."
11
 
12
- reader = PDFReader()
13
- documents = reader.load_data(file=file.name)
 
14
 
15
- # Use OpenAI-based vector index
16
- index = VectorStoreIndex.from_documents(documents)
17
- query_engine = index.as_query_engine() # Defaults to OpenAI LLM if key is set
18
- response = query_engine.query(question)
19
 
20
- return str(response)
 
 
 
 
 
 
 
 
 
21
 
22
- with gr.Blocks() as demo:
23
- gr.Markdown("## πŸ“„ Resume Analysis Chatbot (OpenAI Powered)")
24
  with gr.Row():
25
- file = gr.File(label="Upload Resume (PDF)", file_types=[".pdf"])
26
- question = gr.Textbox(label="Ask a question about your resume", placeholder="e.g. What are my strengths?")
27
- answer = gr.Textbox(label="Bot's Answer", lines=5)
28
- btn = gr.Button("Ask")
 
 
 
 
 
29
 
30
- btn.click(fn=process_resume, inputs=[file, question], outputs=answer)
31
 
32
- demo.launch()
 
 
 
1
  import os
2
  import gradio as gr
3
+ import openai
4
  from llama_index.readers.file import PDFReader
5
+ from llama_index.core import VectorStoreIndex
6
+ from llama_index.embeddings.openai import OpenAIEmbedding
7
+ from llama_index.llms.openai import OpenAI
8
+
9
+ openai.api_key = os.environ.get("OPENAI_API_KEY")
10
+
11
+ def process_pdf(file, question):
12
+ try:
13
+ reader = PDFReader()
14
+ documents = reader.load_data(file=file.name)
15
 
16
+ embed_model = OpenAIEmbedding()
17
+ llm = OpenAI()
 
 
 
18
 
19
+ index = VectorStoreIndex.from_documents(documents, embed_model=embed_model)
20
+ query_engine = index.as_query_engine(llm=llm)
21
+ response = query_engine.query(question)
22
 
23
+ return str(response)
 
 
 
24
 
25
+ except Exception as e:
26
+ return f"❌ Error: {e}"
27
+
28
+ # Gradio Blocks UI
29
+ with gr.Blocks(title="Resume Analyzer by Advaith") as demo:
30
+ gr.Markdown("""
31
+ # πŸ“„ Resume Analyzer
32
+ Upload a resume and ask any question about the candidate!
33
+ Powered by **LlamaIndex** + **OpenAI**
34
+ """)
35
 
 
 
36
  with gr.Row():
37
+ pdf_file = gr.File(label="πŸ“ Upload your resume (PDF)", file_types=[".pdf"])
38
+ question = gr.Textbox(lines=2, label="πŸ’¬ Ask something", placeholder="e.g., What are the candidate's technical strengths?")
39
+
40
+ analyze_button = gr.Button("πŸ” Analyze")
41
+
42
+ result = gr.Textbox(label="🧠 Answer", lines=10)
43
+
44
+ def run_analysis(file, question):
45
+ return process_pdf(file, question)
46
 
47
+ analyze_button.click(run_analysis, inputs=[pdf_file, question], outputs=result)
48
 
49
+ # Launch app
50
+ if __name__ == "__main__":
51
+ demo.launch()