| """ |
| Chat-with-my-experience β Gradio UI |
| author: Arash Nicoomanesh |
| """ |
| import gradio as gr |
| import traceback |
| import logging |
| from datetime import datetime |
| import os, urllib.request, pathlib |
| from processing import load_or_create_index |
| from qa import get_qa_chain, ask_chain |
|
|
| |
| |
| |
| MODEL_URL = "https://huggingface.co/unsloth/gemma-2-it-GGUF/resolve/main/gemma-2-2b-it.q3_k_m.gguf" |
| MODEL_PATH = pathlib.Path(__file__).with_name("models") / "gemma-2-2b-it.q3_k_m.gguf" |
|
|
| MODEL_PATH.parent.mkdir(exist_ok=True) |
| if not MODEL_PATH.exists(): |
| print("β¬οΈ Downloading model (1.1 GB) β¦") |
| urllib.request.urlretrieve(MODEL_URL, MODEL_PATH) |
| print("β
Model ready.") |
|
|
| |
| import config |
| config.Config.MODEL_PATH = str(MODEL_PATH) |
|
|
| |
| |
| |
| logging.basicConfig( |
| level=logging.INFO, |
| format="%(asctime)s %(levelname)s %(message)s", |
| handlers=[ |
| logging.StreamHandler(), |
| logging.FileHandler("app_error.log", mode="a") |
| ] |
| ) |
| logger = logging.getLogger(__name__) |
|
|
| |
| |
| |
| logger.info("Loading FAISS index β¦") |
| vector_store = load_or_create_index() |
| logger.info("Building QA chain β¦") |
| qa_chain = get_qa_chain(vector_store) |
|
|
| |
| |
| |
| def chat(message: str, history: list): |
| if not message or not message.strip(): |
| return "Please type a question." |
|
|
| try: |
| answer = ask_chain(qa_chain, message.strip(), return_sources=False) |
| logger.info("Q: %s | A: %s", message, answer[:100]) |
| return answer |
| except Exception as e: |
| tb = traceback.format_exc() |
| logger.error("QA error for question: %s\n%s", message, tb) |
| return f"β οΈ Error running QA chain: {e} (details in server log)" |
|
|
| |
| |
| |
| demo = gr.ChatInterface( |
| fn=chat, |
| title="Chat with my experience β Arash Nicoomanesh", |
| description="Ask anything about my career, projects, or skills.", |
| examples=[ |
| "What is your biggest AI project?", |
| "Explain your clinical triage agent", |
| "What LLM optimisation techniques have you used?", |
| "How would you describe your overall AI/ML expertise?" |
| ], |
| cache_examples=False, |
| |
| ) |
|
|
| if __name__ == "__main__": |
| demo.launch( |
| server_name="0.0.0.0", |
| server_port=7860, |
| share=True, |
| show_api=False, |
| ) |