File size: 4,308 Bytes
6dad776
eef69b7
6dad776
35ce27f
 
6dad776
 
 
35ce27f
6dad776
 
 
 
35ce27f
6dad776
 
 
 
 
 
35ce27f
 
6dad776
 
35ce27f
6dad776
 
 
 
 
 
 
 
 
 
35ce27f
 
 
 
6dad776
 
35ce27f
6dad776
 
35ce27f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6dad776
35ce27f
6dad776
 
35ce27f
 
 
 
 
6dad776
35ce27f
 
 
 
 
 
6dad776
35ce27f
 
 
6dad776
 
 
 
35ce27f
6dad776
 
35ce27f
6dad776
35ce27f
 
 
6dad776
35ce27f
6dad776
a0c0211
 
35ce27f
6dad776
 
 
 
 
 
 
35ce27f
 
 
 
 
 
 
 
6dad776
 
35ce27f
6dad776
 
35ce27f
 
6dad776
 
 
35ce27f
 
 
 
 
6dad776
35ce27f
6dad776
35ce27f
 
6dad776
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
import gradio as gr
import numpy as np
import onnxruntime as ort
from pathlib import Path
import traceback

MODEL_PATH = Path("model.onnx")

# Lazy-load so the Space doesn't crash on startup if model is missing
_SESSION = None
_INPUT_NAME = None


def load_session():
    global _SESSION, _INPUT_NAME
    if _SESSION is not None:
        return _SESSION, _INPUT_NAME

    if not MODEL_PATH.exists():
        raise FileNotFoundError(
            "model.onnx not found in the Space root.\n"
            "Upload your ONNX model to the Space and name it exactly: model.onnx"
        )

    # CPU provider is most compatible on Spaces
    sess = ort.InferenceSession(str(MODEL_PATH), providers=["CPUExecutionProvider"])
    inp_name = sess.get_inputs()[0].name

    _SESSION = sess
    _INPUT_NAME = inp_name
    return _SESSION, _INPUT_NAME


def predict_5(ret_1, ret_5, sma_ratio, rsi, vol):
    """
    For your stock-signal wrapper ONNX that expects RAW features in this order:
    [ret_1, ret_5, sma_ratio, rsi, vol]

    Output is p_up (probability-like) if you exported with sigmoid in the wrapper.
    """
    try:
        sess, inp_name = load_session()
        x = np.array([[ret_1, ret_5, sma_ratio, rsi, vol]], dtype=np.float32)
        y = sess.run(None, {inp_name: x})[0]
        p_up = float(np.array(y).reshape(-1)[0])

        # Optional simple signal thresholds for display
        if p_up >= 0.55:
            signal = "BUY (signal)"
        elif p_up <= 0.45:
            signal = "SELL (signal)"
        else:
            signal = "HOLD (signal)"

        details = {
            "input_order": ["ret_1", "ret_5", "sma_ratio", "rsi", "vol"],
            "p_up": p_up,
            "signal": signal,
        }
        return p_up, signal, details, "OK"
    except Exception:
        return None, None, None, traceback.format_exc()


def predict_vector(vector_text: str):
    """
    Generic mode if you want to paste a vector:
      "0.001, 0.01, 0.02, 55, 0.012"
    """
    try:
        sess, inp_name = load_session()

        parts = [p for p in vector_text.replace(",", " ").split() if p.strip()]
        vals = [float(p) for p in parts]
        x = np.array([vals], dtype=np.float32)

        y = sess.run(None, {inp_name: x})[0]
        out = np.array(y).reshape(-1)

        return float(out[0]), out.tolist(), "OK"
    except Exception:
        return None, None, traceback.format_exc()


with gr.Blocks(title="ONNX Inference (model.onnx)") as demo:
    gr.Markdown(
        """
# ONNX Inference (Hugging Face Space)

**How to use:**
1. Upload your ONNX model file into the Space repo root and name it **`model.onnx`**
2. Use the **5-Feature Input** tab (recommended) or the **Vector Input** tab.

This Space runs ONNX Runtime on CPU for maximum compatibility.
"""
    )

    with gr.Tab("5-Feature Input (recommended)"):
        with gr.Row():
            ret_1 = gr.Number(label="ret_1", value=0.001)
            ret_5 = gr.Number(label="ret_5", value=0.01)
            sma_ratio = gr.Number(label="sma_ratio", value=0.02)
            rsi = gr.Number(label="rsi", value=55.0)
            vol = gr.Number(label="vol", value=0.012)

        run_btn = gr.Button("Run inference", variant="primary")

        p_out = gr.Number(label="p_up (model output)")
        signal_out = gr.Textbox(label="Signal (optional thresholds)", lines=1)
        details_out = gr.JSON(label="Details")
        status_out = gr.Textbox(label="Status / Error", lines=10)

        run_btn.click(
            fn=predict_5,
            inputs=[ret_1, ret_5, sma_ratio, rsi, vol],
            outputs=[p_out, signal_out, details_out, status_out],
        )

    with gr.Tab("Vector Input (any shape)"):
        vector_text = gr.Textbox(
            label="Input vector (comma or space separated)",
            value="0.001, 0.01, 0.02, 55.0, 0.012",
        )
        run_btn2 = gr.Button("Run inference (vector)", variant="primary")

        first_out = gr.Number(label="First output value")
        full_out = gr.JSON(label="Full output array")
        status_out2 = gr.Textbox(label="Status / Error", lines=10)

        run_btn2.click(
            fn=predict_vector,
            inputs=[vector_text],
            outputs=[first_out, full_out, status_out2],
        )

if __name__ == "__main__":
    demo.launch()