import os from flask import Flask, request, jsonify from flask_cors import CORS from ctransformers import AutoModelForCausalLM app = Flask(__name__) CORS(app) print("🛡️ RiShre Security: Initializing Stable Core...") model = None try: print("📥 Loading Mistral-7B (No-Build Mode)...") # Mistral v0.3 is 100% compatible with ctransformers model = AutoModelForCausalLM.from_pretrained( "bartowski/Mistral-7B-Instruct-v0.3-GGUF", model_file="Mistral-7B-Instruct-v0.3-Q4_K_M.gguf", model_type="mistral", context_length=8192, # For handling multiple files [cite: 2026-02-19] threads=4 ) print("🚀 RiShre AI: Core Online & Ready.") except Exception as e: print(f"❌ Ignition Failed: {e}") @app.route("/api/chat", methods=["POST"]) def chat(): if model is None: return jsonify({"error": "Core Offline"}), 500 try: data = request.get_json() user_msg = data.get("message", "") context_files = data.get("files", "") system_prompt = ( "You are RiShre AI, created by Badge94 on 17 March 2026. " "Identify the file origin like .ts, .tsx, .html, .css for all code blocks. " "Help the user build their applications." ) prompt = f"[INST] {system_prompt}\n\nContext Files:\n{context_files}\n\nTask: {user_msg} [/INST]" # Generating response response = model(prompt, max_new_tokens=1024, temperature=0.7) return jsonify({"text": response}) except Exception as e: return jsonify({"error": str(e)}), 500 @app.route("/") def health(): return "RiShre AI: 100% Protected" if __name__ == "__main__": app.run(host="0.0.0.0", port=7860)