GGUF
conversational
RiShreAI / brain.py
rexprimematrix's picture
Create brain.py
9933fc6 verified
from flask import Flask, request, jsonify
from flask_cors import CORS
from gpt4all import GPT4All
import os
app = Flask(__name__)
# Sabhi connections allow karne ke liye CORS setup
CORS(app)
# --- CONFIGURATION ---
# Note: Is code mein hum model ko seedha tumhari naye repository se load karenge
MODEL_NAME = "Phi-3-mini-4k-instruct-q4.gguf"
REPO_ID = "rexprimematrix/RiShreAI" # Tumhara model repository
print(f"๐Ÿ”„ RiShre AI is waking up... Loading {MODEL_NAME}")
try:
# Ye gpt4all ko batayega ki file Hugging Face repo se download/load karni hai
model = GPT4All(MODEL_NAME, model_path=".", allow_download=True)
print("โœ… RiShre AI Core is now ONLINE and Ready!")
except Exception as e:
print(f"โŒ Critical Error: {e}")
@app.route('/', methods=['GET'])
def health_check():
return "RiShre AI Server is Running!"
@app.route('/api/chat', methods=['POST'])
def chat():
try:
data = request.json
user_msg = data.get("message", "")
if not user_msg:
return jsonify({"error": "No message provided"}), 400
# AI Response Generation
with model.chat_session():
response = model.generate(prompt=user_msg, max_tokens=300)
return jsonify({"text": response})
except Exception as e:
return jsonify({"error": str(e)}), 500
if __name__ == "__main__":
# Hugging Face Spaces strictly port 7860 hi use karta hai
app.run(host="0.0.0.0", port=7860)