File size: 5,411 Bytes
18c7f43 0ab8e73 5a3dd7f 0ab8e73 7598214 0ab8e73 7598214 9a1b219 7598214 5a3dd7f 18c7f43 9a1b219 18c7f43 0ab8e73 18c7f43 0ab8e73 18c7f43 0ab8e73 7598214 5a3dd7f 7598214 0ab8e73 7598214 0ab8e73 7598214 0ab8e73 7598214 0ab8e73 7598214 0ab8e73 7598214 9a1b219 7598214 9a1b219 7598214 0ab8e73 7598214 0ab8e73 9a1b219 0ab8e73 9a1b219 7598214 0ab8e73 9a1b219 5a3dd7f 9a1b219 18c7f43 9b537e1 5a3dd7f 9a1b219 18c7f43 9a1b219 0ab8e73 9a1b219 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 |
# app.py β Omantel Insurance Q&A (RAG) with local logo at top-center
import os
import logging
import gradio as gr
from pinecone import Pinecone, ServerlessSpec
from llama_index.core import VectorStoreIndex, SimpleDirectoryReader, StorageContext, Settings
from llama_index.vector_stores.pinecone import PineconeVectorStore
from llama_index.embeddings.openai import OpenAIEmbedding
from llama_index.llms.openai import OpenAI
# ===== CONFIG =====
PINECONE_API_KEY = os.getenv("PINECONE_API_KEY")
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
PINECONE_INDEX_NAME = os.getenv("PINECONE_INDEX_NAME", "dds-insurance-index")
PINECONE_REGION = os.getenv("PINECONE_REGION", "us-east-1")
PINECONE_CLOUD = os.getenv("PINECONE_CLOUD", "aws")
EMBED_MODEL = os.getenv("EMBED_MODEL", "text-embedding-3-small") # 1536-dim
LLM_MODEL = os.getenv("LLM_MODEL", "gpt-4o-mini")
DATA_DIR = "data"
DEFAULT_TOP_K = 4 # internal similarity_top_k
# ---- Local logo (commit this image to your Space repo) ----
LOGO_PATH = os.path.join(DATA_DIR, "Omantel_Logo_new.png")
if not PINECONE_API_KEY:
raise RuntimeError("Missing PINECONE_API_KEY (Space β Settings β Variables).")
if not OPENAI_API_KEY:
raise RuntimeError("Missing OPENAI_API_KEY (Space β Settings β Variables).")
if not os.path.exists(LOGO_PATH):
raise RuntimeError("Logo not found: data/Omantel_Logo_new.png (commit it to your Space repo).")
logging.basicConfig(level=logging.INFO)
log = logging.getLogger("dds-space")
# ===== LlamaIndex / Pinecone =====
Settings.embed_model = OpenAIEmbedding(model=EMBED_MODEL, api_key=OPENAI_API_KEY)
Settings.llm = OpenAI(model=LLM_MODEL, api_key=OPENAI_API_KEY)
pc = Pinecone(api_key=PINECONE_API_KEY)
def ensure_index(name: str, dim: int = 1536):
names = [i["name"] for i in pc.list_indexes()]
if name not in names:
log.info(f"Creating Pinecone index '{name}' (dim={dim})...")
pc.create_index(
name=name,
dimension=dim,
metric="cosine",
spec=ServerlessSpec(cloud=PINECONE_CLOUD, region=PINECONE_REGION),
)
return pc.Index(name)
pinecone_index = ensure_index(PINECONE_INDEX_NAME, dim=1536)
vector_store = PineconeVectorStore(pinecone_index=pinecone_index)
def bootstrap_index():
if not os.path.isdir(DATA_DIR):
raise RuntimeError("No 'data/' directory found. Commit your documents to data/ in the Space repo.")
docs = SimpleDirectoryReader(DATA_DIR).load_data()
if not docs:
raise RuntimeError("No documents found in data/. Add e.g., data/insurance.pdf")
storage_ctx = StorageContext.from_defaults(vector_store=vector_store)
VectorStoreIndex.from_documents(docs, storage_context=storage_ctx, show_progress=True)
bootstrap_index()
def answer(query: str) -> str:
if not query or not query.strip():
return "Please enter a question (or select one from the FAQ list)."
index = VectorStoreIndex.from_vector_store(vector_store)
engine = index.as_query_engine(similarity_top_k=DEFAULT_TOP_K)
resp = engine.query(query)
return str(resp)
FAQS = [
"",
"What benefits are covered under the policy?",
"How do I file a claim and what documents are required?",
"What are the exclusions and limitations?",
"Is pre-authorization needed for hospitalization?",
"What is the reimbursement timeline?",
"How are outpatient vs inpatient services handled?",
"How can I check my network hospitals/clinics?",
"What is the co-pay or deductible policy?",
]
def use_faq(selected_faq: str, free_text: str):
prompt = (selected_faq or "").strip() or (free_text or "").strip()
if not prompt:
return "", "Please select a FAQ or type your question."
return prompt, answer(prompt)
# ===== UI =====
CSS = """
.header { display:flex; flex-direction:column; align-items:center; gap:6px; }
.logo img { width:300px; height:300px; object-fit:contain; } /* fixed 300x300 */
.title { text-align:center; font-weight:700; font-size:1.4rem; margin:6px 0 0 0; }
.subnote { text-align:center; margin-top:-2px; opacity:0.8; }
"""
with gr.Blocks(css=CSS, theme=gr.themes.Soft()) as demo:
with gr.Row():
with gr.Column():
gr.Markdown("<div class='header'>")
gr.Image(value=LOGO_PATH, show_label=False, elem_classes=["logo"])
gr.Markdown("<h1 class='title'>Omantel Insurance Q&A β AI Assistant</h1>"
"<p class='subnote'>Ask about coverage, claims, exclusions, and more β powered by LlamaIndex + Pinecone</p>")
gr.Markdown("</div>")
with gr.Row():
with gr.Column(scale=1):
gr.Markdown("### Ask from Frequently Asked Questions")
faq = gr.Dropdown(choices=FAQS, value=FAQS[0], label="Select a common question")
gr.Markdown("### Or type your question")
user_q = gr.Textbox(
label="Your question",
placeholder="e.g., What is covered under outpatient benefits?",
lines=2
)
ask_btn = gr.Button("Ask", variant="primary")
with gr.Column(scale=1):
chosen_prompt = gr.Textbox(label="Query sent", interactive=False)
answer_box = gr.Markdown()
ask_btn.click(use_faq, inputs=[faq, user_q], outputs=[chosen_prompt, answer_box])
if __name__ == "__main__":
demo.launch()
|