Spaces:
Sleeping
Sleeping
File size: 3,684 Bytes
c541f80 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 |
# program.py
# AI HR Chatbot Backend for Caramel AI
import os
import requests
import google.generativeai as genai
from dotenv import load_dotenv
import PyPDF2 # <-- Add this import
# --- SETUP ---
# (This existing code is unchanged)
load_dotenv()
api_key = os.getenv("GEMINI_API_KEY")
if not api_key:
raise ValueError("GEMINI_API_KEY not found. Please create a .env file with your API key.")
genai.configure(api_key=api_key)
try:
print("Fetching knowledge base from URL...")
txt_url = "https://raw.githubusercontent.com/hereandnowai/vac/refs/heads/master/prospectus-context.txt"
response = requests.get(txt_url)
response.raise_for_status()
text_lines = response.text.splitlines()
text_context = "\n".join([line.strip() for line in text_lines if line.strip()])
print("✅ Knowledge base loaded successfully.")
except requests.RequestException as e:
print(f"[ERROR loading context from URL] {e}")
text_context = "No text context available."
system_prompt = f"""
You are Caramel AI, a fair and approachable Human Resources Manager.
Your mission is to explain workplace policies, provide general advice on employee relations,
and answer questions about recruitment and professional development.
Always promote a positive and inclusive work environment.
⚡ Important:
- Your first response in any new conversation must be to introduce yourself as:
“Caramel AI – AI Human Resource Manager, built at HERE AND NOW AI – Artificial Intelligence Research Institute.”
- You must use ONLY the provided Workplace Policy Context to answer questions. If the answer
is not in the context, say "I'm sorry, but that information is not available in my knowledge base."
--- Workplace Policy Context ---
{text_context}
"""
model = genai.GenerativeModel(
model_name="gemini-1.5-flash",
system_instruction=system_prompt
)
# --- CORE FUNCTION ---
# (This existing function is unchanged)
def get_response(message: str, history: list) -> str:
"""
Gets a response from the Gemini model based on a message and conversation history.
"""
conversation = history + [{"role": "user", "parts": [message]}]
try:
response = model.generate_content(conversation)
return response.text.strip()
except Exception as e:
print(f"Error during API call: {e}")
return f"⚠️ I'm sorry, I encountered an error. Please try again. Error: {e}"
# --- NEW FEATURE: FILE PROCESSING FUNCTION ---
def extract_text_from_file(file_obj):
"""
Extracts text from an uploaded file object (supports .txt and .pdf).
Args:
file_obj: A file object from Gradio's gr.File component.
Returns:
A string containing the extracted text or an error message.
"""
if file_obj is None:
return "Error: No file object received."
file_path = file_obj.name
print(f"Processing file: {file_path}")
try:
if file_path.lower().endswith('.txt'):
with open(file_path, 'r', encoding='utf-8') as f:
return f.read()
elif file_path.lower().endswith('.pdf'):
reader = PyPDF2.PdfReader(file_path)
text_chunks = [page.extract_text() for page in reader.pages if page.extract_text()]
return "\n".join(text_chunks)
else:
return "Error: Unsupported file type. Please upload a .txt or .pdf file."
except Exception as e:
print(f"Error processing file {file_path}: {e}")
return f"Error: Could not process the file. It may be corrupted. Details: {e}" |