Dreamy0 commited on
Commit
509be6b
·
verified ·
1 Parent(s): 722315c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +111 -120
app.py CHANGED
@@ -1,120 +1,111 @@
1
- from flask import Flask, request, jsonify, send_from_directory
2
- from flask_cors import CORS
3
- import os
4
- import requests
5
- import random
6
-
7
- # Serve UI from / (but static files live in 'ui/')
8
- app = Flask(__name__, static_folder='ui', static_url_path='/')
9
- CORS(app, resources={r"/*": {"origins": "*"}})
10
-
11
- # ---------- UI ----------
12
- @app.route('/', methods=['GET'])
13
- def index():
14
- return app.send_static_file('index.html')
15
-
16
- # Serve assets from /ui/... and fallback to SPA (GET only)
17
- @app.route('/<path:path>', methods=['GET'])
18
- def spa_or_asset(path):
19
- file_path = os.path.join(app.static_folder, path)
20
- if os.path.isfile(file_path):
21
- return send_from_directory(app.static_folder, path)
22
- return send_from_directory(app.static_folder, 'index.html')
23
-
24
- # Quiet the favicon noise
25
- @app.route('/favicon.ico', methods=['GET'])
26
- def favicon():
27
- return ('', 204)
28
-
29
-
30
-
31
- id2label = {
32
- 0: "Level 0: mild roast",
33
- 1: "Level 1: emotional shade",
34
- 2: "Level 2: mild damage",
35
- 3: "Level 3: utter destruction",
36
- 4: "Level 4: character defamation",
37
- 5: "Level 5: soul punch",
38
- 6: "Level 6: historic devastation",
39
- 7: "Level 7: irreparable trauma"
40
- }
41
-
42
- from transformers import AutoModelForCausalLM, AutoTokenizer
43
- import torch
44
-
45
- print("Loading Phi-3-mini model...")
46
-
47
- MODEL_NAME = "microsoft/phi-3-mini-4k-instruct"
48
-
49
- tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME)
50
- model = AutoModelForCausalLM.from_pretrained(
51
- MODEL_NAME,
52
- torch_dtype=torch.float16 if torch.cuda.is_available() else torch.float32,
53
- device_map="auto"
54
- )
55
-
56
- def ask_phi(prompt):
57
- print("[ask_phi] -> Local Phi-3-mini")
58
- inputs = tokenizer(prompt, return_tensors="pt").to(model.device)
59
-
60
- output_ids = model.generate(
61
- **inputs,
62
- max_new_tokens=250,
63
- temperature=0.8,
64
- do_sample=True
65
- )
66
-
67
- return tokenizer.decode(output_ids[0], skip_special_tokens=True)
68
-
69
-
70
- def _predict_impl():
71
- data = request.get_json(silent=True) or {}
72
- text = data.get("text", "").strip()
73
- if not text:
74
- return jsonify({"error": "Empty input"}), 400
75
-
76
- level_prompt = (
77
- "Classify the emotional damage of the following sentence on a scale from 0 "
78
- "(mild roast) to 7 (irreparable trauma). Respond with a single digit number only.\n\n"
79
- f"Sentence: \"{text}\"\nDamage Level:"
80
- )
81
- level_raw = ask_phi(level_prompt)
82
- print("[Level Output]", level_raw)
83
-
84
- digits = [ch for ch in level_raw if ch.isdigit()]
85
- level = int(digits[0]) if digits else 0
86
- level = max(0, min(level, 7))
87
-
88
- roast_prompt = (
89
- "You are EmoNet’s snarky alter ego — a sasscore machine trained on sarcasm, late-night roasts, and internet chaos. But today, you’re in your flirty villain era. For any input text, reply with a short, teasingly devastating comeback that mixes charm with chaos. It should feel like emotional damage... but make it hot. Keep it clever, spicy, and seductive — no cruelty, no cringe. Think: if Gen Z flirted like a roast battle and a chaotic therapist whispered pickup lines mid-burn.\n\n"
90
- f"Sentence: \"{text}\"\nDamage Level: {level}\nRoast:"
91
- )
92
- roast = ask_phi(roast_prompt).strip()
93
- print("[Roast Output]", roast)
94
-
95
- return jsonify({
96
- "label": id2label[level],
97
- "confidence": round(random.uniform(0.45, 0.9), 3),
98
- "roast": roast
99
- })
100
-
101
- # Single handler serves both routes; explicit OPTIONS so preflight never gets hijacked
102
- @app.route("/predict", methods=["POST", "OPTIONS"])
103
- @app.route("/api/predict", methods=["POST", "OPTIONS"])
104
- def predict():
105
- if request.method == "OPTIONS":
106
- return ('', 204)
107
- return _predict_impl()
108
-
109
-
110
- # Debug helpers: see what routes are live
111
- @app.route("/__routes", methods=["GET"])
112
- def list_routes():
113
- return {"routes": [str(r) for r in app.url_map.iter_rules()]}
114
-
115
- if __name__ == "__main__":
116
- print("=== ROUTE MAP ===")
117
- for r in app.url_map.iter_rules():
118
- print(r, list(r.methods))
119
- print("=================")
120
- app.run(host="0.0.0.0", port=7860, debug=True)
 
1
+ from flask import Flask, request, jsonify, send_from_directory
2
+ from flask_cors import CORS
3
+ import os
4
+ import requests
5
+ import random
6
+
7
+ # Serve UI from / (but static files live in 'ui/')
8
+ app = Flask(__name__, static_folder='ui', static_url_path='/')
9
+ CORS(app, resources={r"/*": {"origins": "*"}})
10
+
11
+ # ---------- UI ----------
12
+ @app.route('/', methods=['GET'])
13
+ def index():
14
+ return app.send_static_file('index.html')
15
+
16
+ # Serve assets from /ui/... and fallback to SPA (GET only)
17
+ @app.route('/<path:path>', methods=['GET'])
18
+ def spa_or_asset(path):
19
+ file_path = os.path.join(app.static_folder, path)
20
+ if os.path.isfile(file_path):
21
+ return send_from_directory(app.static_folder, path)
22
+ return send_from_directory(app.static_folder, 'index.html')
23
+
24
+ # Quiet the favicon noise
25
+ @app.route('/favicon.ico', methods=['GET'])
26
+ def favicon():
27
+ return ('', 204)
28
+
29
+
30
+
31
+ id2label = {
32
+ 0: "Level 0: mild roast",
33
+ 1: "Level 1: emotional shade",
34
+ 2: "Level 2: mild damage",
35
+ 3: "Level 3: utter destruction",
36
+ 4: "Level 4: character defamation",
37
+ 5: "Level 5: soul punch",
38
+ 6: "Level 6: historic devastation",
39
+ 7: "Level 7: irreparable trauma"
40
+ }
41
+
42
+ # ---------- OLLAMA BACKEND ----------
43
+ OLLAMA_URL = "http://localhost:11434/api/generate"
44
+ MODEL_NAME = "phi3:mini"
45
+
46
+ def ask_phi(prompt):
47
+ print("[ask_phi] -> Ollama")
48
+ try:
49
+ resp = requests.post(
50
+ OLLAMA_URL,
51
+ json={"model": MODEL_NAME, "prompt": prompt, "stream": False},
52
+ timeout=60
53
+ )
54
+ resp.raise_for_status()
55
+ return resp.json().get("response", "").strip()
56
+ except Exception as e:
57
+ print("Ollama error:", e)
58
+ return "0"
59
+
60
+
61
+ def _predict_impl():
62
+ data = request.get_json(silent=True) or {}
63
+ text = data.get("text", "").strip()
64
+ if not text:
65
+ return jsonify({"error": "Empty input"}), 400
66
+
67
+ level_prompt = (
68
+ "Classify the emotional damage of the following sentence on a scale from 0 "
69
+ "(mild roast) to 7 (irreparable trauma). Respond with a single digit number only.\n\n"
70
+ f"Sentence: \"{text}\"\nDamage Level:"
71
+ )
72
+ level_raw = ask_phi(level_prompt)
73
+ print("[Level Output]", level_raw)
74
+
75
+ digits = [ch for ch in level_raw if ch.isdigit()]
76
+ level = int(digits[0]) if digits else 0
77
+ level = max(0, min(level, 7))
78
+
79
+ roast_prompt = (
80
+ "You are EmoNet’s snarky alter ego — a sasscore machine trained on sarcasm, late-night roasts, and internet chaos. But today, you’re in your flirty villain era. For any input text, reply with a short, teasingly devastating comeback that mixes charm with chaos. It should feel like emotional damage... but make it hot. Keep it clever, spicy, and seductive — no cruelty, no cringe. Think: if Gen Z flirted like a roast battle and a chaotic therapist whispered pickup lines mid-burn.\n\n"
81
+ f"Sentence: \"{text}\"\nDamage Level: {level}\nRoast:"
82
+ )
83
+ roast = ask_phi(roast_prompt).strip()
84
+ print("[Roast Output]", roast)
85
+
86
+ return jsonify({
87
+ "label": id2label[level],
88
+ "confidence": round(random.uniform(0.45, 0.9), 3),
89
+ "roast": roast
90
+ })
91
+
92
+ # Single handler serves both routes; explicit OPTIONS so preflight never gets hijacked
93
+ @app.route("/predict", methods=["POST", "OPTIONS"])
94
+ @app.route("/api/predict", methods=["POST", "OPTIONS"])
95
+ def predict():
96
+ if request.method == "OPTIONS":
97
+ return ('', 204)
98
+ return _predict_impl()
99
+
100
+
101
+ # Debug helpers: see what routes are live
102
+ @app.route("/__routes", methods=["GET"])
103
+ def list_routes():
104
+ return {"routes": [str(r) for r in app.url_map.iter_rules()]}
105
+
106
+ if __name__ == "__main__":
107
+ print("=== ROUTE MAP ===")
108
+ for r in app.url_map.iter_rules():
109
+ print(r, list(r.methods))
110
+ print("=================")
111
+ app.run(host="0.0.0.0", port=7860, debug=True)