Upload LSTM model artifacts (2025-10-04)
Browse files- README.md +29 -3
- config.json +20 -0
- preprocessing.json +80 -0
- pytorch_model.bin +3 -0
- training_stats.json +86 -0
README.md
CHANGED
|
@@ -1,3 +1,29 @@
|
|
| 1 |
-
---
|
| 2 |
-
|
| 3 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
language: en
|
| 3 |
+
library_name: pytorch
|
| 4 |
+
model_name: DataSynthis_ML_JobTask
|
| 5 |
+
tags:
|
| 6 |
+
- time-series
|
| 7 |
+
- forecasting
|
| 8 |
+
- lstm
|
| 9 |
+
- tesla
|
| 10 |
+
task: time-series-forecasting
|
| 11 |
+
license: mit
|
| 12 |
+
---
|
| 13 |
+
|
| 14 |
+
# DataSynthis_ML_JobTask — LSTM (Multivariate)
|
| 15 |
+
|
| 16 |
+
This repo hosts a PyTorch LSTM with attention for multivariate stock forecasting (OHLCV + technicals).
|
| 17 |
+
|
| 18 |
+
Included artifacts:
|
| 19 |
+
- pytorch_model.bin
|
| 20 |
+
- config.json
|
| 21 |
+
- training_stats.json
|
| 22 |
+
- preprocessing.json
|
| 23 |
+
|
| 24 |
+
Quick usage:
|
| 25 |
+
1) Recreate the LSTM architecture from the notebook and load `pytorch_model.bin` into it
|
| 26 |
+
2) Use `config.json` for feature list and `seq_length`
|
| 27 |
+
3) Apply scaling with the parameters in `preprocessing.json`:
|
| 28 |
+
- Transform: X_scaled = X * scale_ + min_
|
| 29 |
+
- Inverse: X = (X_scaled - min_) / scale_
|
config.json
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"model_name": "DataSynthis_ML_JobTask",
|
| 3 |
+
"model_type": "lstm_forecaster",
|
| 4 |
+
"framework": "pytorch",
|
| 5 |
+
"input_size": 10,
|
| 6 |
+
"hidden_size": 128,
|
| 7 |
+
"num_layers": 2,
|
| 8 |
+
"dropout": 0.2,
|
| 9 |
+
"seq_length": 60,
|
| 10 |
+
"feature_columns": [
|
| 11 |
+
"Open",
|
| 12 |
+
"High",
|
| 13 |
+
"Low",
|
| 14 |
+
"Close",
|
| 15 |
+
"Volume"
|
| 16 |
+
],
|
| 17 |
+
"target_column": "Close",
|
| 18 |
+
"test_size": 60,
|
| 19 |
+
"trained_device": "cuda:0"
|
| 20 |
+
}
|
preprocessing.json
ADDED
|
@@ -0,0 +1,80 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"type": "MinMaxScaler",
|
| 3 |
+
"feature_range": [
|
| 4 |
+
0,
|
| 5 |
+
1
|
| 6 |
+
],
|
| 7 |
+
"n_features_in_": 10,
|
| 8 |
+
"scale_": [
|
| 9 |
+
0.002487673569350158,
|
| 10 |
+
0.0024742349231429736,
|
| 11 |
+
0.002523574451582122,
|
| 12 |
+
0.0024975523914631745,
|
| 13 |
+
1.1068534149748413e-09,
|
| 14 |
+
2.4415443202382026,
|
| 15 |
+
0.0025431634113237526,
|
| 16 |
+
0.0027610656809249445,
|
| 17 |
+
11.024082474601364,
|
| 18 |
+
4.755031972326864
|
| 19 |
+
],
|
| 20 |
+
"min_": [
|
| 21 |
+
-0.023603046617220423,
|
| 22 |
+
-0.025562145308087716,
|
| 23 |
+
-0.023730010187354034,
|
| 24 |
+
-0.023921556976927598,
|
| 25 |
+
-0.011754783267032814,
|
| 26 |
+
0.5142581908762522,
|
| 27 |
+
-0.02514069596286517,
|
| 28 |
+
-0.030808891003557718,
|
| 29 |
+
-0.105367594961265,
|
| 30 |
+
-0.040093783553332325
|
| 31 |
+
],
|
| 32 |
+
"data_min_": [
|
| 33 |
+
9.48799991607666,
|
| 34 |
+
10.33133316040039,
|
| 35 |
+
9.403332710266112,
|
| 36 |
+
9.57800006866455,
|
| 37 |
+
10620000.0,
|
| 38 |
+
-0.21062824320390794,
|
| 39 |
+
9.885599899291993,
|
| 40 |
+
11.158333253860473,
|
| 41 |
+
0.009557946904336374,
|
| 42 |
+
0.008431864136070681
|
| 43 |
+
],
|
| 44 |
+
"data_max_": [
|
| 45 |
+
411.4700012207031,
|
| 46 |
+
414.4966735839844,
|
| 47 |
+
405.6666564941406,
|
| 48 |
+
409.9700012207031,
|
| 49 |
+
914082000.0,
|
| 50 |
+
0.19894859376394924,
|
| 51 |
+
403.09666748046874,
|
| 52 |
+
373.3373306274414,
|
| 53 |
+
0.10026844388255864,
|
| 54 |
+
0.21873539223425345
|
| 55 |
+
],
|
| 56 |
+
"data_range_": [
|
| 57 |
+
401.98200130462646,
|
| 58 |
+
404.165340423584,
|
| 59 |
+
396.2633237838745,
|
| 60 |
+
400.3920011520386,
|
| 61 |
+
903462000.0,
|
| 62 |
+
0.4095768369678572,
|
| 63 |
+
393.21106758117674,
|
| 64 |
+
362.17899737358096,
|
| 65 |
+
0.09071049697822225,
|
| 66 |
+
0.21030352809818276
|
| 67 |
+
],
|
| 68 |
+
"feature_names_in_": [
|
| 69 |
+
"Open",
|
| 70 |
+
"High",
|
| 71 |
+
"Low",
|
| 72 |
+
"Close",
|
| 73 |
+
"Volume",
|
| 74 |
+
"Returns",
|
| 75 |
+
"MA_5",
|
| 76 |
+
"MA_20",
|
| 77 |
+
"Volatility",
|
| 78 |
+
"Price_Range"
|
| 79 |
+
]
|
| 80 |
+
}
|
pytorch_model.bin
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:11758aa85bc9f15771a1ec3cc5137e3147a69a40cd5bf1ac8269a9e29651a790
|
| 3 |
+
size 2690907
|
training_stats.json
ADDED
|
@@ -0,0 +1,86 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"train_losses": [
|
| 3 |
+
0.09857238518695037,
|
| 4 |
+
0.03774987465391556,
|
| 5 |
+
0.02757480355600516,
|
| 6 |
+
0.022716590855270623,
|
| 7 |
+
0.017580508440732955,
|
| 8 |
+
0.012939690332859754,
|
| 9 |
+
0.009969860082492232,
|
| 10 |
+
0.010116041187817853,
|
| 11 |
+
0.007874186274905999,
|
| 12 |
+
0.007968392308490972,
|
| 13 |
+
0.005510548284898202,
|
| 14 |
+
0.006263008837898572,
|
| 15 |
+
0.005881796474568546,
|
| 16 |
+
0.004392406235759457,
|
| 17 |
+
0.005273079398709039,
|
| 18 |
+
0.0043719599954783915,
|
| 19 |
+
0.003990105966416498,
|
| 20 |
+
0.0037319724758466083,
|
| 21 |
+
0.0035608692715565363,
|
| 22 |
+
0.0034520207981889447,
|
| 23 |
+
0.0034128626730913917,
|
| 24 |
+
0.0034679228672757747,
|
| 25 |
+
0.0026424937532283366,
|
| 26 |
+
0.0034531054436229168,
|
| 27 |
+
0.002834749217921247,
|
| 28 |
+
0.003169918009856095,
|
| 29 |
+
0.0029859959652336937,
|
| 30 |
+
0.0024479756112365672,
|
| 31 |
+
0.002451325758981208,
|
| 32 |
+
0.002955105664053311,
|
| 33 |
+
0.0021894385456107555,
|
| 34 |
+
0.002697460786051427,
|
| 35 |
+
0.0025778446812182664,
|
| 36 |
+
0.002681724181942021,
|
| 37 |
+
0.0021836976093860963,
|
| 38 |
+
0.0024333174786685657,
|
| 39 |
+
0.0017869161946388581,
|
| 40 |
+
0.00232065737945959,
|
| 41 |
+
0.0022055609753200162,
|
| 42 |
+
0.0027215502535303435
|
| 43 |
+
],
|
| 44 |
+
"val_losses": [
|
| 45 |
+
0.09563604276627302,
|
| 46 |
+
0.010005674014488855,
|
| 47 |
+
0.011327980008597175,
|
| 48 |
+
0.009179237803133825,
|
| 49 |
+
0.010561205369109908,
|
| 50 |
+
0.009789475317423543,
|
| 51 |
+
0.012465575011447072,
|
| 52 |
+
0.010488318589826425,
|
| 53 |
+
0.032734413631260395,
|
| 54 |
+
0.0059351618789757294,
|
| 55 |
+
0.003052401317593952,
|
| 56 |
+
0.005954862145396571,
|
| 57 |
+
0.003063126699998975,
|
| 58 |
+
0.006719038724744071,
|
| 59 |
+
0.010634368130316338,
|
| 60 |
+
0.0019871961315705753,
|
| 61 |
+
0.0031255605863407254,
|
| 62 |
+
0.002428303414490074,
|
| 63 |
+
0.0022362192588237426,
|
| 64 |
+
0.0011847723993317534,
|
| 65 |
+
0.0034753913253856203,
|
| 66 |
+
0.002814347312475244,
|
| 67 |
+
0.0028507958243911466,
|
| 68 |
+
0.0017110408031536888,
|
| 69 |
+
0.0015859827399253845,
|
| 70 |
+
0.0015160128435430427,
|
| 71 |
+
0.0013432229170575738,
|
| 72 |
+
0.002925180442010363,
|
| 73 |
+
0.002052574942354113,
|
| 74 |
+
0.0013953568510866414,
|
| 75 |
+
0.0017695200610129784,
|
| 76 |
+
0.001256332907360047,
|
| 77 |
+
0.0013786422884246956,
|
| 78 |
+
0.0011955285299336538,
|
| 79 |
+
0.0012340102014907945,
|
| 80 |
+
0.001634793191139276,
|
| 81 |
+
0.0013133611452455323,
|
| 82 |
+
0.0014318732913428296,
|
| 83 |
+
0.0019119653152301908,
|
| 84 |
+
0.0025325697109413645
|
| 85 |
+
]
|
| 86 |
+
}
|