tcmmichaelb139 commited on
Commit
36bc1d5
·
1 Parent(s): 0c09303

added model inference max tokens and temperature

Browse files
frontend/src/App.jsx CHANGED
@@ -4,6 +4,7 @@ import Options from "./components/Options";
4
  import Recipe from "./components/Recipe";
5
  import { setModelLayers } from "./utils/modelCookies";
6
  import { useAPI } from "./hooks/useAPI";
 
7
 
8
  function App() {
9
  const [models, setModels] = useState([]);
@@ -34,12 +35,12 @@ function App() {
34
  }
35
  },
36
  (error) => {
37
- console.error("Failed to load models:", error);
38
  }
39
  );
40
  }
41
  } catch (error) {
42
- console.error("Error fetching models:", error);
43
  }
44
  };
45
 
 
4
  import Recipe from "./components/Recipe";
5
  import { setModelLayers } from "./utils/modelCookies";
6
  import { useAPI } from "./hooks/useAPI";
7
+ import { devError } from "./utils/devLogger";
8
 
9
  function App() {
10
  const [models, setModels] = useState([]);
 
35
  }
36
  },
37
  (error) => {
38
+ devError("Failed to load models:", error);
39
  }
40
  );
41
  }
42
  } catch (error) {
43
+ devError("Error fetching models:", error);
44
  }
45
  };
46
 
frontend/src/components/InferencePopup.jsx CHANGED
@@ -1,6 +1,8 @@
1
  import { useState } from "react";
2
  import Dropdown from "./Dropdown";
 
3
  import { useAPI } from "../hooks/useAPI";
 
4
 
5
  const InferencePopup = ({ isOpen, onClose, models }) => {
6
  const [selectedModel, setSelectedModel] = useState("");
@@ -8,6 +10,8 @@ const InferencePopup = ({ isOpen, onClose, models }) => {
8
  const [response, setResponse] = useState("");
9
  const [isLoading, setIsLoading] = useState(false);
10
  const [error, setError] = useState("");
 
 
11
 
12
  const { inference, checkTaskStatus } = useAPI();
13
 
@@ -25,22 +29,21 @@ const InferencePopup = ({ isOpen, onClose, models }) => {
25
  const inferenceData = {
26
  model_name: selectedModel,
27
  prompt: prompt,
28
- max_new_tokens: 100, // You can make this configurable if needed
29
- temperature: 0.7, // Add temperature field
30
  };
31
 
32
- console.log("Starting inference with data:", inferenceData);
33
  const result = await inference(inferenceData);
34
- console.log("Got inference result:", result);
35
 
36
  if (result && result.task_id) {
37
- // Check task status for inference result
38
  checkTaskStatus(
39
  result.task_id,
40
  (taskResult) => {
41
- console.log("Inference task result:", taskResult);
42
- if (taskResult && taskResult.generated_text) {
43
- setResponse(taskResult.generated_text);
44
  } else if (taskResult && taskResult.error) {
45
  setError(`Inference failed: ${taskResult.error}`);
46
  } else {
@@ -50,7 +53,7 @@ const InferencePopup = ({ isOpen, onClose, models }) => {
50
  },
51
  (errorMessage) => {
52
  // Error callback for task status check
53
- console.error("Inference task failed:", errorMessage);
54
  setError(`Task failed: ${errorMessage}`);
55
  setIsLoading(false);
56
  }
@@ -66,7 +69,7 @@ const InferencePopup = ({ isOpen, onClose, models }) => {
66
  setIsLoading(false);
67
  }
68
  } catch (err) {
69
- console.error("Inference error:", err);
70
  setError(`Error: ${err.message}`);
71
  setIsLoading(false);
72
  }
@@ -137,9 +140,7 @@ const InferencePopup = ({ isOpen, onClose, models }) => {
137
  </div>
138
  </div>
139
 
140
- {/* Content */}
141
  <div className="p-6 space-y-6 max-h-[calc(90vh-140px)] overflow-y-auto">
142
- {/* Model Selection */}
143
  <Dropdown
144
  label="Select Model"
145
  selectedValue={selectedModel}
@@ -150,7 +151,6 @@ const InferencePopup = ({ isOpen, onClose, models }) => {
150
  searchPlaceholder="Search models..."
151
  />
152
 
153
- {/* Prompt Input */}
154
  <div>
155
  <label className="block text-sm font-medium text-secondary-700 mb-2">
156
  Prompt
@@ -164,6 +164,40 @@ const InferencePopup = ({ isOpen, onClose, models }) => {
164
  />
165
  </div>
166
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
167
  {/* Generate Button */}
168
  <button
169
  onClick={handleInference}
 
1
  import { useState } from "react";
2
  import Dropdown from "./Dropdown";
3
+ import NumberInput from "./NumberInput";
4
  import { useAPI } from "../hooks/useAPI";
5
+ import { devLog, devError } from "../utils/devLogger";
6
 
7
  const InferencePopup = ({ isOpen, onClose, models }) => {
8
  const [selectedModel, setSelectedModel] = useState("");
 
10
  const [response, setResponse] = useState("");
11
  const [isLoading, setIsLoading] = useState(false);
12
  const [error, setError] = useState("");
13
+ const [maxNewTokens, setMaxNewTokens] = useState(512);
14
+ const [temperature, setTemperature] = useState(0.7);
15
 
16
  const { inference, checkTaskStatus } = useAPI();
17
 
 
29
  const inferenceData = {
30
  model_name: selectedModel,
31
  prompt: prompt,
32
+ max_new_tokens: maxNewTokens,
33
+ temperature: temperature,
34
  };
35
 
36
+ devLog("Starting inference with data:", inferenceData);
37
  const result = await inference(inferenceData);
38
+ devLog("Got inference result:", result);
39
 
40
  if (result && result.task_id) {
 
41
  checkTaskStatus(
42
  result.task_id,
43
  (taskResult) => {
44
+ devLog("Inference task result:", taskResult);
45
+ if (taskResult && taskResult.response) {
46
+ setResponse(taskResult.response);
47
  } else if (taskResult && taskResult.error) {
48
  setError(`Inference failed: ${taskResult.error}`);
49
  } else {
 
53
  },
54
  (errorMessage) => {
55
  // Error callback for task status check
56
+ devError("Inference task failed:", errorMessage);
57
  setError(`Task failed: ${errorMessage}`);
58
  setIsLoading(false);
59
  }
 
69
  setIsLoading(false);
70
  }
71
  } catch (err) {
72
+ devError("Inference error:", err);
73
  setError(`Error: ${err.message}`);
74
  setIsLoading(false);
75
  }
 
140
  </div>
141
  </div>
142
 
 
143
  <div className="p-6 space-y-6 max-h-[calc(90vh-140px)] overflow-y-auto">
 
144
  <Dropdown
145
  label="Select Model"
146
  selectedValue={selectedModel}
 
151
  searchPlaceholder="Search models..."
152
  />
153
 
 
154
  <div>
155
  <label className="block text-sm font-medium text-secondary-700 mb-2">
156
  Prompt
 
164
  />
165
  </div>
166
 
167
+ {/* Configuration Section */}
168
+ <div className="grid grid-cols-1 md:grid-cols-2 gap-4">
169
+ <div>
170
+ <label className="block text-sm font-medium text-secondary-700 mb-2">
171
+ Max New Tokens
172
+ </label>
173
+ <NumberInput
174
+ value={maxNewTokens}
175
+ onChange={setMaxNewTokens}
176
+ min={1}
177
+ max={1024}
178
+ step={1}
179
+ className=""
180
+ disabled={isLoading}
181
+ />
182
+ </div>
183
+
184
+ <div>
185
+ <label className="block text-sm font-medium text-secondary-700 mb-2">
186
+ Temperature
187
+ </label>
188
+ <NumberInput
189
+ value={temperature}
190
+ onChange={setTemperature}
191
+ min={0.1}
192
+ max={2.0}
193
+ step={0.1}
194
+ allowDecimals={true}
195
+ className=""
196
+ disabled={isLoading}
197
+ />
198
+ </div>
199
+ </div>
200
+
201
  {/* Generate Button */}
202
  <button
203
  onClick={handleInference}
frontend/src/components/NumberInput.jsx CHANGED
@@ -1,4 +1,4 @@
1
- import { useState, useEffect } from "react";
2
 
3
  const NumberInput = ({
4
  label,
@@ -6,33 +6,83 @@ const NumberInput = ({
6
  onChange,
7
  min = 1,
8
  max = 48,
 
 
9
  className = "",
10
  compact = false,
 
11
  }) => {
12
  const [inputValue, setInputValue] = useState(value?.toString() || "");
 
13
 
14
  useEffect(() => {
15
  setInputValue(value?.toString() || "");
16
  }, [value]);
17
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
18
  const handleChange = (e) => {
19
  const inputVal = e.target.value;
20
- setInputValue(inputVal);
21
- if (inputVal === "") return;
22
- const numValue = parseInt(inputVal);
23
- if (!isNaN(numValue)) {
24
- const clampedValue = Math.max(min, Math.min(max, numValue));
25
- onChange(clampedValue);
26
- if (clampedValue !== numValue) {
27
- setInputValue(clampedValue.toString());
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
28
  }
29
  }
30
  };
31
 
32
  const handleBlur = () => {
33
- if (inputValue === "" || isNaN(parseInt(inputValue))) {
 
 
 
34
  onChange(min);
35
  setInputValue(min.toString());
 
 
 
 
 
36
  }
37
  };
38
 
@@ -53,7 +103,9 @@ const NumberInput = ({
53
  value={inputValue}
54
  onChange={handleChange}
55
  onBlur={handleBlur}
56
- className={`w-full bg-white border-2 border-secondary-300 hover:bg-primary-50 hover:shadow-lg focus:outline-none focus:ring-2 focus:ring-primary-500 transition-all duration-200 text-secondary-800 font-medium ${
 
 
57
  compact ? "p-2 rounded-lg text-sm" : "p-4 rounded-xl"
58
  }`}
59
  min={min}
 
1
+ import { useState, useEffect, useRef } from "react";
2
 
3
  const NumberInput = ({
4
  label,
 
6
  onChange,
7
  min = 1,
8
  max = 48,
9
+ step = 1,
10
+ allowDecimals = false,
11
  className = "",
12
  compact = false,
13
+ disabled = false,
14
  }) => {
15
  const [inputValue, setInputValue] = useState(value?.toString() || "");
16
+ const prevDisabledRef = useRef(disabled);
17
 
18
  useEffect(() => {
19
  setInputValue(value?.toString() || "");
20
  }, [value]);
21
 
22
+ useEffect(() => {
23
+ if (disabled && !prevDisabledRef.current && allowDecimals) {
24
+ const numValue = parseFloat(inputValue);
25
+ if (
26
+ inputValue === "" ||
27
+ inputValue === "." ||
28
+ isNaN(numValue) ||
29
+ numValue < min ||
30
+ numValue > max
31
+ ) {
32
+ const clampedValue = isNaN(numValue)
33
+ ? min
34
+ : Math.max(min, Math.min(max, numValue));
35
+ setInputValue(clampedValue.toString());
36
+ onChange(clampedValue);
37
+ }
38
+ }
39
+ prevDisabledRef.current = disabled;
40
+ }, [disabled, allowDecimals, inputValue, min, max, onChange]);
41
+
42
  const handleChange = (e) => {
43
  const inputVal = e.target.value;
44
+
45
+ if (allowDecimals) {
46
+ if (inputVal === ".") return;
47
+
48
+ setInputValue(inputVal);
49
+
50
+ if (inputVal !== "" && !inputVal.endsWith(".")) {
51
+ const numValue = parseFloat(inputVal);
52
+ if (!isNaN(numValue)) {
53
+ onChange(numValue);
54
+ }
55
+ }
56
+ } else {
57
+ const cleanedInput = inputVal.includes(".")
58
+ ? inputVal.split(".")[0]
59
+ : inputVal;
60
+ setInputValue(cleanedInput);
61
+
62
+ if (cleanedInput === "") return;
63
+ const numValue = parseInt(cleanedInput);
64
+ if (!isNaN(numValue)) {
65
+ const clampedValue = Math.max(min, Math.min(max, numValue));
66
+ onChange(clampedValue);
67
+ if (clampedValue !== numValue) {
68
+ setInputValue(clampedValue.toString());
69
+ }
70
  }
71
  }
72
  };
73
 
74
  const handleBlur = () => {
75
+ if (
76
+ inputValue === "" ||
77
+ isNaN(allowDecimals ? parseFloat(inputValue) : parseInt(inputValue))
78
+ ) {
79
  onChange(min);
80
  setInputValue(min.toString());
81
+ } else if (allowDecimals) {
82
+ const numValue = parseFloat(inputValue);
83
+ const clampedValue = Math.max(min, Math.min(max, numValue));
84
+ onChange(clampedValue);
85
+ setInputValue(clampedValue.toString());
86
  }
87
  };
88
 
 
103
  value={inputValue}
104
  onChange={handleChange}
105
  onBlur={handleBlur}
106
+ step={step}
107
+ disabled={disabled}
108
+ className={`w-full bg-white border-2 border-secondary-300 hover:bg-primary-50 hover:shadow-lg focus:outline-none focus:ring-2 focus:ring-primary-500 transition-all duration-200 text-secondary-800 font-medium disabled:opacity-50 disabled:cursor-not-allowed ${
109
  compact ? "p-2 rounded-lg text-sm" : "p-4 rounded-xl"
110
  }`}
111
  min={min}
frontend/src/components/Options.jsx CHANGED
@@ -4,6 +4,7 @@ import NumberInput from "./NumberInput";
4
  import InferencePopup from "./InferencePopup";
5
  import { setModelLayers } from "../utils/modelCookies";
6
  import { useAPI } from "../hooks/useAPI";
 
7
 
8
  const Options = ({
9
  models,
@@ -51,15 +52,15 @@ const Options = ({
51
  merged_name: mergedName,
52
  };
53
 
54
- console.log("Starting merge with data:", mergeData);
55
  const taskId = await mergeModels(mergeData);
56
- console.log("Got merge task ID:", taskId);
57
 
58
  if (taskId) {
59
  checkTaskStatus(
60
  taskId,
61
  (taskResult) => {
62
- console.log("Merge result:", taskResult);
63
  if (taskResult.response) {
64
  setMergeStatus("Merge successful!");
65
  const newModelName = taskResult.response || mergedName;
@@ -73,14 +74,14 @@ const Options = ({
73
  setIsLoading(false);
74
  },
75
  (error) => {
76
- console.error("Merge task failed:", error);
77
  setMergeStatus(`Merge failed: ${error}`);
78
  setIsLoading(false);
79
  }
80
  );
81
  }
82
  } catch (error) {
83
- console.error("Merge error:", error);
84
  setMergeStatus(`Error: ${error.message}`);
85
  setIsLoading(false);
86
  }
 
4
  import InferencePopup from "./InferencePopup";
5
  import { setModelLayers } from "../utils/modelCookies";
6
  import { useAPI } from "../hooks/useAPI";
7
+ import { devLog, devError } from "../utils/devLogger";
8
 
9
  const Options = ({
10
  models,
 
52
  merged_name: mergedName,
53
  };
54
 
55
+ devLog("Starting merge with data:", mergeData);
56
  const taskId = await mergeModels(mergeData);
57
+ devLog("Got merge task ID:", taskId);
58
 
59
  if (taskId) {
60
  checkTaskStatus(
61
  taskId,
62
  (taskResult) => {
63
+ devLog("Merge result:", taskResult);
64
  if (taskResult.response) {
65
  setMergeStatus("Merge successful!");
66
  const newModelName = taskResult.response || mergedName;
 
74
  setIsLoading(false);
75
  },
76
  (error) => {
77
+ devError("Merge task failed:", error);
78
  setMergeStatus(`Merge failed: ${error}`);
79
  setIsLoading(false);
80
  }
81
  );
82
  }
83
  } catch (error) {
84
+ devError("Merge error:", error);
85
  setMergeStatus(`Error: ${error.message}`);
86
  setIsLoading(false);
87
  }
frontend/src/hooks/useAPI.js CHANGED
@@ -1,4 +1,5 @@
1
  import { useCallback } from "react";
 
2
 
3
  const API_BASE = "https://tcmmichaelb139-evolutiontransformer.hf.space";
4
 
@@ -12,13 +13,13 @@ export const useAPI = () => {
12
 
13
  if (!response.ok) {
14
  const error = `HTTP ${response.status}: ${response.statusText}`;
15
- console.error("Task check failed:", error);
16
  if (errorCallback) errorCallback(error);
17
  return;
18
  }
19
 
20
  const data = await response.json();
21
- console.log("Task status:", data.status);
22
 
23
  if (data.status === "SUCCESS") {
24
  successCallback(data.result);
@@ -29,11 +30,11 @@ export const useAPI = () => {
29
  );
30
  } else if (data.status === "FAILURE") {
31
  const error = data.result || "Task failed";
32
- console.error("Task failed:", error);
33
  if (errorCallback) errorCallback(error);
34
  }
35
  } catch (error) {
36
- console.error("Task check error:", error);
37
  if (errorCallback) errorCallback(error.message);
38
  }
39
  },
@@ -42,25 +43,31 @@ export const useAPI = () => {
42
 
43
  const fetchModels = useCallback(async () => {
44
  try {
45
- console.log("Fetching models...");
46
  const response = await fetch(`${API_BASE}/list_models`, {
47
  method: "POST",
48
  headers: { "Content-Type": "application/json" },
49
  credentials: "include",
50
  });
51
 
 
 
 
 
 
 
52
  const data = await response.json();
53
- console.log("Fetch models response:", data);
54
  return data.task_id;
55
  } catch (error) {
56
- console.error("Fetch models error:", error);
57
  throw error;
58
  }
59
  }, []);
60
 
61
  const mergeModels = useCallback(async (mergeData) => {
62
  try {
63
- console.log("Merging models with data:", mergeData);
64
  const response = await fetch(`${API_BASE}/merge`, {
65
  method: "POST",
66
  headers: { "Content-Type": "application/json" },
@@ -68,18 +75,24 @@ export const useAPI = () => {
68
  credentials: "include",
69
  });
70
 
 
 
 
 
 
 
71
  const data = await response.json();
72
- console.log("Merge response:", data);
73
  return data.task_id;
74
  } catch (error) {
75
- console.error("Merge error:", error);
76
  throw error;
77
  }
78
  }, []);
79
 
80
  const inference = useCallback(async (inferenceData) => {
81
  try {
82
- console.log("Running inference with data:", inferenceData);
83
  const response = await fetch(`${API_BASE}/generate`, {
84
  method: "POST",
85
  headers: { "Content-Type": "application/json" },
@@ -89,15 +102,15 @@ export const useAPI = () => {
89
 
90
  if (!response.ok) {
91
  const error = `HTTP ${response.status}: ${response.statusText}`;
92
- console.error("Inference failed:", error);
93
  throw new Error(error);
94
  }
95
 
96
  const data = await response.json();
97
- console.log("Inference response:", data);
98
  return data;
99
  } catch (error) {
100
- console.error("Inference error:", error);
101
  throw error;
102
  }
103
  }, []);
 
1
  import { useCallback } from "react";
2
+ import { devLog, devError } from "../utils/devLogger";
3
 
4
  const API_BASE = "https://tcmmichaelb139-evolutiontransformer.hf.space";
5
 
 
13
 
14
  if (!response.ok) {
15
  const error = `HTTP ${response.status}: ${response.statusText}`;
16
+ devError("Task check failed:", error);
17
  if (errorCallback) errorCallback(error);
18
  return;
19
  }
20
 
21
  const data = await response.json();
22
+ devLog("Task status:", data.status);
23
 
24
  if (data.status === "SUCCESS") {
25
  successCallback(data.result);
 
30
  );
31
  } else if (data.status === "FAILURE") {
32
  const error = data.result || "Task failed";
33
+ devError("Task failed:", error);
34
  if (errorCallback) errorCallback(error);
35
  }
36
  } catch (error) {
37
+ devError("Task check error:", error);
38
  if (errorCallback) errorCallback(error.message);
39
  }
40
  },
 
43
 
44
  const fetchModels = useCallback(async () => {
45
  try {
46
+ devLog("Fetching models...");
47
  const response = await fetch(`${API_BASE}/list_models`, {
48
  method: "POST",
49
  headers: { "Content-Type": "application/json" },
50
  credentials: "include",
51
  });
52
 
53
+ if (!response.ok) {
54
+ const error = `HTTP ${response.status}: ${response.statusText}`;
55
+ devError("Fetch models failed:", error);
56
+ throw new Error(error);
57
+ }
58
+
59
  const data = await response.json();
60
+ devLog("Fetch models response:", data);
61
  return data.task_id;
62
  } catch (error) {
63
+ devError("Fetch models error:", error);
64
  throw error;
65
  }
66
  }, []);
67
 
68
  const mergeModels = useCallback(async (mergeData) => {
69
  try {
70
+ devLog("Merging models with data:", mergeData);
71
  const response = await fetch(`${API_BASE}/merge`, {
72
  method: "POST",
73
  headers: { "Content-Type": "application/json" },
 
75
  credentials: "include",
76
  });
77
 
78
+ if (!response.ok) {
79
+ const error = `HTTP ${response.status}: ${response.statusText}`;
80
+ devError("Merge failed:", error);
81
+ throw new Error(error);
82
+ }
83
+
84
  const data = await response.json();
85
+ devLog("Merge response:", data);
86
  return data.task_id;
87
  } catch (error) {
88
+ devError("Merge error:", error);
89
  throw error;
90
  }
91
  }, []);
92
 
93
  const inference = useCallback(async (inferenceData) => {
94
  try {
95
+ devLog("Running inference with data:", inferenceData);
96
  const response = await fetch(`${API_BASE}/generate`, {
97
  method: "POST",
98
  headers: { "Content-Type": "application/json" },
 
102
 
103
  if (!response.ok) {
104
  const error = `HTTP ${response.status}: ${response.statusText}`;
105
+ devError("Inference failed:", error);
106
  throw new Error(error);
107
  }
108
 
109
  const data = await response.json();
110
+ devLog("Inference response:", data);
111
  return data;
112
  } catch (error) {
113
+ devError("Inference error:", error);
114
  throw error;
115
  }
116
  }, []);
frontend/src/utils/devLogger.js ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Development-only logging utility
2
+ // These logs will only appear in development builds, not production
3
+
4
+ export const devLog = (...args) => {
5
+ if (import.meta.env.DEV) {
6
+ console.log(...args);
7
+ }
8
+ };
9
+
10
+ export const devError = (...args) => {
11
+ if (import.meta.env.DEV) {
12
+ console.error(...args);
13
+ }
14
+ };
15
+
16
+ export const devWarn = (...args) => {
17
+ if (import.meta.env.DEV) {
18
+ console.warn(...args);
19
+ }
20
+ };