Veda-Chat / app.py
vibhansh's picture
Create app.py
a18a126 verified
raw
history blame contribute delete
930 Bytes
import gradio as gr
from huggingface_hub import hf_hub_download
from llama_cpp import Llama
# 1. Download optimal model for Free CPU
print("⏳ Veda model download ho raha hai...")
model_path = hf_hub_download(
repo_id="vibhansh/Veda-8B-v1-Cognitive",
filename="Veda-8B-v1-Q4_K_M.gguf"
)
# 2. Setup Veda
llm = Llama(model_path=model_path, n_ctx=2048)
def chat_with_veda(message, history):
# Enforcing Veda's Identity
prompt = f"System: You are Veda-8B, a cognitive AI specialized in architecture and logic.\nUser: {message}\nAI:"
output = llm(prompt, max_tokens=256, stop=["User:", "\n"], echo=False)
return output["choices"][0]["text"].strip()
# 3. Professional Chat UI
demo = gr.ChatInterface(
chat_with_veda,
title="Veda-8B-v1-Cognitive 🪐",
description="The Sovereign Intelligence. Optimized for architectural logic.",
theme="soft"
)
if __name__ == "__main__":
demo.launch()