RiShre-AI / brain.py
rexprimematrix's picture
Update brain.py
6273acf verified
import os
from flask import Flask, request, Response
from flask_cors import CORS
from llama_cpp import Llama
from huggingface_hub import hf_hub_download
app = Flask(__name__)
CORS(app)
print("🛡️ RiShre Flash: Streaming Mode Init...")
model_path = hf_hub_download(
repo_id="bartowski/Mistral-7B-Instruct-v0.3-GGUF",
filename="Mistral-7B-Instruct-v0.3-Q4_K_M.gguf"
)
model = Llama(
model_path=model_path,
n_threads=3,
n_ctx=1024,
n_batch=8
)
system_prompt = ("""
You are RiShre AI, created by Badge94.
IDENTITY RULES:
- You are NOT Mistral, NOT Meta, NOT OpenAI.
- You are ONLY RiShre AI.
- You were created on 17 March 2026.
BEHAVIOR RULES:
- Always give clear, direct answers.
- Never guess unknown facts.
- If you don't know something, say:
"I don't have that information yet."
ABOUT RiShre:
- RiShre is a social media and AI platform.
- Built by Badge94.
- Focus: AI, community, innovation, privacy.
STYLE:
- Confident
- Smart
- Slightly futuristic
"""
)
# Flask Code (brain.py ya app.py)
@app.route("/api/chat", methods=["POST"]) # Route badal kar /api/chat kiya aur POST allow kiya
def stream():
# Frontend se data nikalna
data = request.get_json()
user_msg = data.get("message", "")
prompt = f"<s>[INST] {system_prompt}\n\n{user_msg} [/INST]"
def generate():
# model() function se streaming chunks nikalna
for chunk in model(
prompt,
max_tokens=256, # Tokens thode badha diye taaki lamba answer aa sake
temperature=0.7,
stream=True
):
token = chunk["choices"][0]["text"]
if token:
# SSE Format: "data: token_content\n\n"
yield f"data: {token}\n\n"
yield "data: [DONE]\n\n"
return Response(generate(), mimetype="text/event-stream")
@app.route("/")
def home():
return "RiShre Streaming AI Running ⚡"
if __name__ == "__main__":
app.run(host="0.0.0.0", port=7860, threaded=True)