CyberCoder225 commited on
Commit
7a43222
·
verified ·
1 Parent(s): b10ce05

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +34 -10
app.py CHANGED
@@ -1,19 +1,33 @@
1
  from flask import Flask, request, jsonify
2
- from flask_cors import CORS # <--- Add this!
3
  from brain import MairaBrain
4
 
5
  app = Flask(__name__)
6
- CORS(app) # <--- This allows your HTML to talk to the API!
7
 
8
- # Config
9
- REPO_ID = "CyberCoder225/maira-model"
10
- FILENAME = "SmolLM2-360M-Instruct.Q4_K_M.gguf"
 
11
 
12
- maira = MairaBrain(REPO_ID, FILENAME)
 
 
 
 
 
 
 
 
 
13
 
14
  @app.route('/', methods=['GET'])
15
  def home():
16
- return jsonify({"status": "online", "version": "2.1 (Modular)"})
 
 
 
 
17
 
18
  @app.route('/chat', methods=['POST'])
19
  def chat():
@@ -21,11 +35,21 @@ def chat():
21
  data = request.json
22
  user_id = data.get("user_id", "guest")
23
  user_input = data.get("message", "")
 
 
24
 
25
- answer = maira.get_response(user_id, user_input)
 
 
 
 
 
 
26
 
27
- # We match the key 'maira' to your brain's output
28
- return jsonify({"maira": answer})
 
 
29
  except Exception as e:
30
  return jsonify({"error": str(e)}), 500
31
 
 
1
  from flask import Flask, request, jsonify
2
+ from flask_cors import CORS
3
  from brain import MairaBrain
4
 
5
  app = Flask(__name__)
6
+ CORS(app)
7
 
8
+ # --- Configuration for both models ---
9
+ # Model 1: Small/Fast
10
+ REPO_SMALL = "CyberCoder225/maira-model"
11
+ FILE_SMALL = "SmolLM2-360M-Instruct.Q4_K_M.gguf"
12
 
13
+ # Model 2: Medium/Smart (Llama-3.2-1B)
14
+ REPO_MEDIUM = "bartowski/Llama-3.2-1B-Instruct-GGUF"
15
+ FILE_MEDIUM = "Llama-3.2-1B-Instruct-Q4_K_M.gguf"
16
+
17
+ # Initialize both brains
18
+ print("🌌 Initializing Maira Small...")
19
+ maira_small = MairaBrain(REPO_SMALL, FILE_SMALL)
20
+
21
+ print("🌟 Initializing Maira Medium...")
22
+ maira_medium = MairaBrain(REPO_MEDIUM, FILE_MEDIUM)
23
 
24
  @app.route('/', methods=['GET'])
25
  def home():
26
+ return jsonify({
27
+ "status": "online",
28
+ "version": "5.0 (Multi-Neural)",
29
+ "models": ["small", "medium"]
30
+ })
31
 
32
  @app.route('/chat', methods=['POST'])
33
  def chat():
 
35
  data = request.json
36
  user_id = data.get("user_id", "guest")
37
  user_input = data.get("message", "")
38
+ # Get the model choice from the request, default to 'small'
39
+ model_type = data.get("model_type", "small")
40
 
41
+ # Choose the brain based on user preference
42
+ if model_type == "medium":
43
+ answer = maira_medium.get_response(user_id, user_input)
44
+ model_used = "maira-medium"
45
+ else:
46
+ answer = maira_small.get_response(user_id, user_input)
47
+ model_used = "maira-small"
48
 
49
+ return jsonify({
50
+ "maira": answer,
51
+ "metadata": {"model": model_used}
52
+ })
53
  except Exception as e:
54
  return jsonify({"error": str(e)}), 500
55