Upload MvMmfnist/test_20241112_174055.log with huggingface_hub
Browse files- MvMmfnist/test_20241112_174055.log +1014 -0
MvMmfnist/test_20241112_174055.log
ADDED
|
@@ -0,0 +1,1014 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
2024-11-12 17:40:56,140 - Environment info:
|
| 2 |
+
------------------------------------------------------------
|
| 3 |
+
sys.platform: linux
|
| 4 |
+
Python: 3.10.8 (main, Nov 24 2022, 14:13:03) [GCC 11.2.0]
|
| 5 |
+
CUDA available: True
|
| 6 |
+
CUDA_HOME: /usr/local/cuda-10.1
|
| 7 |
+
NVCC: Cuda compilation tools, release 10.1, V10.1.243
|
| 8 |
+
GPU 0: NVIDIA A800 80GB PCIe
|
| 9 |
+
GCC: gcc (Ubuntu 7.4.0-1ubuntu1~18.04.1) 7.4.0
|
| 10 |
+
PyTorch: 2.3.0
|
| 11 |
+
PyTorch compiling details: PyTorch built with:
|
| 12 |
+
- GCC 9.3
|
| 13 |
+
- C++ Version: 201703
|
| 14 |
+
- Intel(R) oneAPI Math Kernel Library Version 2023.1-Product Build 20230303 for Intel(R) 64 architecture applications
|
| 15 |
+
- Intel(R) MKL-DNN v3.3.6 (Git Hash 86e6af5974177e513fd3fee58425e1063e7f1361)
|
| 16 |
+
- OpenMP 201511 (a.k.a. OpenMP 4.5)
|
| 17 |
+
- LAPACK is enabled (usually provided by MKL)
|
| 18 |
+
- NNPACK is enabled
|
| 19 |
+
- CPU capability usage: AVX2
|
| 20 |
+
- CUDA Runtime 12.1
|
| 21 |
+
- NVCC architecture flags: -gencode;arch=compute_50,code=sm_50;-gencode;arch=compute_60,code=sm_60;-gencode;arch=compute_61,code=sm_61;-gencode;arch=compute_70,code=sm_70;-gencode;arch=compute_75,code=sm_75;-gencode;arch=compute_80,code=sm_80;-gencode;arch=compute_86,code=sm_86;-gencode;arch=compute_90,code=sm_90
|
| 22 |
+
- CuDNN 8.9.2
|
| 23 |
+
- Magma 2.6.1
|
| 24 |
+
- Build settings: BLAS_INFO=mkl, BUILD_TYPE=Release, CUDA_VERSION=12.1, CUDNN_VERSION=8.9.2, CXX_COMPILER=/opt/rh/devtoolset-9/root/usr/bin/c++, CXX_FLAGS= -D_GLIBCXX_USE_CXX11_ABI=0 -fabi-version=11 -fvisibility-inlines-hidden -DUSE_PTHREADPOOL -DNDEBUG -DUSE_KINETO -DLIBKINETO_NOROCTRACER -DUSE_FBGEMM -DUSE_QNNPACK -DUSE_PYTORCH_QNNPACK -DUSE_XNNPACK -DSYMBOLICATE_MOBILE_DEBUG_HANDLE -O2 -fPIC -Wall -Wextra -Werror=return-type -Werror=non-virtual-dtor -Werror=bool-operation -Wnarrowing -Wno-missing-field-initializers -Wno-type-limits -Wno-array-bounds -Wno-unknown-pragmas -Wno-unused-parameter -Wno-unused-function -Wno-unused-result -Wno-strict-overflow -Wno-strict-aliasing -Wno-stringop-overflow -Wsuggest-override -Wno-psabi -Wno-error=pedantic -Wno-error=old-style-cast -Wno-missing-braces -fdiagnostics-color=always -faligned-new -Wno-unused-but-set-variable -Wno-maybe-uninitialized -fno-math-errno -fno-trapping-math -Werror=format -Wno-stringop-overflow, LAPACK_INFO=mkl, PERF_WITH_AVX=1, PERF_WITH_AVX2=1, PERF_WITH_AVX512=1, TORCH_VERSION=2.3.0, USE_CUDA=ON, USE_CUDNN=ON, USE_CUSPARSELT=1, USE_EXCEPTION_PTR=1, USE_GFLAGS=OFF, USE_GLOG=OFF, USE_GLOO=ON, USE_MKL=ON, USE_MKLDNN=ON, USE_MPI=OFF, USE_NCCL=ON, USE_NNPACK=ON, USE_OPENMP=ON, USE_ROCM=OFF, USE_ROCM_KERNEL_ASSERT=OFF,
|
| 25 |
+
|
| 26 |
+
TorchVision: 0.18.0
|
| 27 |
+
OpenCV: 4.10.0
|
| 28 |
+
openstl: 1.0.0
|
| 29 |
+
------------------------------------------------------------
|
| 30 |
+
|
| 31 |
+
2024-11-12 17:40:56,332 -
|
| 32 |
+
device: cuda
|
| 33 |
+
dist: False
|
| 34 |
+
res_dir: work_dirs
|
| 35 |
+
ex_name: mv_mmnist/med3_ciatt_lr1e-3_m0_newdata
|
| 36 |
+
fp16: False
|
| 37 |
+
torchscript: False
|
| 38 |
+
seed: 42
|
| 39 |
+
fps: False
|
| 40 |
+
test: True
|
| 41 |
+
deterministic: False
|
| 42 |
+
batch_size: 16
|
| 43 |
+
val_batch_size: 16
|
| 44 |
+
num_workers: 4
|
| 45 |
+
data_root: /root/data/lsh/openstl_weather/openstl/data
|
| 46 |
+
dataname: mv_mmnist
|
| 47 |
+
pre_seq_length: 10
|
| 48 |
+
aft_seq_length: 10
|
| 49 |
+
total_length: 20
|
| 50 |
+
use_augment: False
|
| 51 |
+
use_prefetcher: False
|
| 52 |
+
drop_last: False
|
| 53 |
+
method: its
|
| 54 |
+
config_file: configs/mv_mmnist/its.py
|
| 55 |
+
model_type: TAU
|
| 56 |
+
drop: 0.0
|
| 57 |
+
drop_path: 0.0
|
| 58 |
+
overwrite: False
|
| 59 |
+
epoch: 200
|
| 60 |
+
log_step: 1
|
| 61 |
+
opt: adam
|
| 62 |
+
opt_eps: None
|
| 63 |
+
opt_betas: None
|
| 64 |
+
momentum: 0.9
|
| 65 |
+
weight_decay: 0.0
|
| 66 |
+
clip_grad: None
|
| 67 |
+
clip_mode: norm
|
| 68 |
+
no_display_method_info: False
|
| 69 |
+
sched: cosine
|
| 70 |
+
lr: 0.001
|
| 71 |
+
lr_k_decay: 1.0
|
| 72 |
+
warmup_lr: 1e-05
|
| 73 |
+
min_lr: 1e-06
|
| 74 |
+
final_div_factor: 10000.0
|
| 75 |
+
warmup_epoch: 0
|
| 76 |
+
decay_epoch: 100
|
| 77 |
+
decay_rate: 0.1
|
| 78 |
+
filter_bias_and_bn: False
|
| 79 |
+
gpus: [0]
|
| 80 |
+
metric_for_bestckpt: val_loss
|
| 81 |
+
ckpt_path: None
|
| 82 |
+
spatio_kernel_enc: 3
|
| 83 |
+
spatio_kernel_dec: 3
|
| 84 |
+
hid_S: 64
|
| 85 |
+
hid_T: 512
|
| 86 |
+
N_T: 8
|
| 87 |
+
N_S: 4
|
| 88 |
+
momentum_ema: 0
|
| 89 |
+
in_shape: [10, 3, 64, 64]
|
| 90 |
+
data_name: mv_mmnist
|
| 91 |
+
metrics: ['mse', 'mae', 'ssim', 'psnr']
|
| 92 |
+
2024-11-12 17:40:56,333 - Model info:
|
| 93 |
+
SimVP_Model(
|
| 94 |
+
(enc_u10_q): Encoder(
|
| 95 |
+
(enc): Sequential(
|
| 96 |
+
(0): ConvSC(
|
| 97 |
+
(conv): BasicConv2d(
|
| 98 |
+
(conv): Conv2d(1, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
|
| 99 |
+
(norm): GroupNorm(2, 64, eps=1e-05, affine=True)
|
| 100 |
+
(act): SiLU()
|
| 101 |
+
)
|
| 102 |
+
)
|
| 103 |
+
(1): ConvSC(
|
| 104 |
+
(conv): BasicConv2d(
|
| 105 |
+
(conv): Conv2d(64, 64, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1))
|
| 106 |
+
(norm): GroupNorm(2, 64, eps=1e-05, affine=True)
|
| 107 |
+
(act): SiLU()
|
| 108 |
+
)
|
| 109 |
+
)
|
| 110 |
+
(2): ConvSC(
|
| 111 |
+
(conv): BasicConv2d(
|
| 112 |
+
(conv): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
|
| 113 |
+
(norm): GroupNorm(2, 64, eps=1e-05, affine=True)
|
| 114 |
+
(act): SiLU()
|
| 115 |
+
)
|
| 116 |
+
)
|
| 117 |
+
(3): ConvSC(
|
| 118 |
+
(conv): BasicConv2d(
|
| 119 |
+
(conv): Conv2d(64, 64, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1))
|
| 120 |
+
(norm): GroupNorm(2, 64, eps=1e-05, affine=True)
|
| 121 |
+
(act): SiLU()
|
| 122 |
+
)
|
| 123 |
+
)
|
| 124 |
+
)
|
| 125 |
+
)
|
| 126 |
+
(enc_u10_k): Encoder(
|
| 127 |
+
(enc): Sequential(
|
| 128 |
+
(0): ConvSC(
|
| 129 |
+
(conv): BasicConv2d(
|
| 130 |
+
(conv): Conv2d(1, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
|
| 131 |
+
(norm): GroupNorm(2, 64, eps=1e-05, affine=True)
|
| 132 |
+
(act): SiLU()
|
| 133 |
+
)
|
| 134 |
+
)
|
| 135 |
+
(1): ConvSC(
|
| 136 |
+
(conv): BasicConv2d(
|
| 137 |
+
(conv): Conv2d(64, 64, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1))
|
| 138 |
+
(norm): GroupNorm(2, 64, eps=1e-05, affine=True)
|
| 139 |
+
(act): SiLU()
|
| 140 |
+
)
|
| 141 |
+
)
|
| 142 |
+
(2): ConvSC(
|
| 143 |
+
(conv): BasicConv2d(
|
| 144 |
+
(conv): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
|
| 145 |
+
(norm): GroupNorm(2, 64, eps=1e-05, affine=True)
|
| 146 |
+
(act): SiLU()
|
| 147 |
+
)
|
| 148 |
+
)
|
| 149 |
+
(3): ConvSC(
|
| 150 |
+
(conv): BasicConv2d(
|
| 151 |
+
(conv): Conv2d(64, 64, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1))
|
| 152 |
+
(norm): GroupNorm(2, 64, eps=1e-05, affine=True)
|
| 153 |
+
(act): SiLU()
|
| 154 |
+
)
|
| 155 |
+
)
|
| 156 |
+
)
|
| 157 |
+
)
|
| 158 |
+
(enc_v10_q): Encoder(
|
| 159 |
+
(enc): Sequential(
|
| 160 |
+
(0): ConvSC(
|
| 161 |
+
(conv): BasicConv2d(
|
| 162 |
+
(conv): Conv2d(1, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
|
| 163 |
+
(norm): GroupNorm(2, 64, eps=1e-05, affine=True)
|
| 164 |
+
(act): SiLU()
|
| 165 |
+
)
|
| 166 |
+
)
|
| 167 |
+
(1): ConvSC(
|
| 168 |
+
(conv): BasicConv2d(
|
| 169 |
+
(conv): Conv2d(64, 64, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1))
|
| 170 |
+
(norm): GroupNorm(2, 64, eps=1e-05, affine=True)
|
| 171 |
+
(act): SiLU()
|
| 172 |
+
)
|
| 173 |
+
)
|
| 174 |
+
(2): ConvSC(
|
| 175 |
+
(conv): BasicConv2d(
|
| 176 |
+
(conv): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
|
| 177 |
+
(norm): GroupNorm(2, 64, eps=1e-05, affine=True)
|
| 178 |
+
(act): SiLU()
|
| 179 |
+
)
|
| 180 |
+
)
|
| 181 |
+
(3): ConvSC(
|
| 182 |
+
(conv): BasicConv2d(
|
| 183 |
+
(conv): Conv2d(64, 64, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1))
|
| 184 |
+
(norm): GroupNorm(2, 64, eps=1e-05, affine=True)
|
| 185 |
+
(act): SiLU()
|
| 186 |
+
)
|
| 187 |
+
)
|
| 188 |
+
)
|
| 189 |
+
)
|
| 190 |
+
(enc_v10_k): Encoder(
|
| 191 |
+
(enc): Sequential(
|
| 192 |
+
(0): ConvSC(
|
| 193 |
+
(conv): BasicConv2d(
|
| 194 |
+
(conv): Conv2d(1, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
|
| 195 |
+
(norm): GroupNorm(2, 64, eps=1e-05, affine=True)
|
| 196 |
+
(act): SiLU()
|
| 197 |
+
)
|
| 198 |
+
)
|
| 199 |
+
(1): ConvSC(
|
| 200 |
+
(conv): BasicConv2d(
|
| 201 |
+
(conv): Conv2d(64, 64, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1))
|
| 202 |
+
(norm): GroupNorm(2, 64, eps=1e-05, affine=True)
|
| 203 |
+
(act): SiLU()
|
| 204 |
+
)
|
| 205 |
+
)
|
| 206 |
+
(2): ConvSC(
|
| 207 |
+
(conv): BasicConv2d(
|
| 208 |
+
(conv): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
|
| 209 |
+
(norm): GroupNorm(2, 64, eps=1e-05, affine=True)
|
| 210 |
+
(act): SiLU()
|
| 211 |
+
)
|
| 212 |
+
)
|
| 213 |
+
(3): ConvSC(
|
| 214 |
+
(conv): BasicConv2d(
|
| 215 |
+
(conv): Conv2d(64, 64, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1))
|
| 216 |
+
(norm): GroupNorm(2, 64, eps=1e-05, affine=True)
|
| 217 |
+
(act): SiLU()
|
| 218 |
+
)
|
| 219 |
+
)
|
| 220 |
+
)
|
| 221 |
+
)
|
| 222 |
+
(enc_t2m_q): Encoder(
|
| 223 |
+
(enc): Sequential(
|
| 224 |
+
(0): ConvSC(
|
| 225 |
+
(conv): BasicConv2d(
|
| 226 |
+
(conv): Conv2d(1, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
|
| 227 |
+
(norm): GroupNorm(2, 64, eps=1e-05, affine=True)
|
| 228 |
+
(act): SiLU()
|
| 229 |
+
)
|
| 230 |
+
)
|
| 231 |
+
(1): ConvSC(
|
| 232 |
+
(conv): BasicConv2d(
|
| 233 |
+
(conv): Conv2d(64, 64, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1))
|
| 234 |
+
(norm): GroupNorm(2, 64, eps=1e-05, affine=True)
|
| 235 |
+
(act): SiLU()
|
| 236 |
+
)
|
| 237 |
+
)
|
| 238 |
+
(2): ConvSC(
|
| 239 |
+
(conv): BasicConv2d(
|
| 240 |
+
(conv): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
|
| 241 |
+
(norm): GroupNorm(2, 64, eps=1e-05, affine=True)
|
| 242 |
+
(act): SiLU()
|
| 243 |
+
)
|
| 244 |
+
)
|
| 245 |
+
(3): ConvSC(
|
| 246 |
+
(conv): BasicConv2d(
|
| 247 |
+
(conv): Conv2d(64, 64, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1))
|
| 248 |
+
(norm): GroupNorm(2, 64, eps=1e-05, affine=True)
|
| 249 |
+
(act): SiLU()
|
| 250 |
+
)
|
| 251 |
+
)
|
| 252 |
+
)
|
| 253 |
+
)
|
| 254 |
+
(enc_t2m_k): Encoder(
|
| 255 |
+
(enc): Sequential(
|
| 256 |
+
(0): ConvSC(
|
| 257 |
+
(conv): BasicConv2d(
|
| 258 |
+
(conv): Conv2d(1, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
|
| 259 |
+
(norm): GroupNorm(2, 64, eps=1e-05, affine=True)
|
| 260 |
+
(act): SiLU()
|
| 261 |
+
)
|
| 262 |
+
)
|
| 263 |
+
(1): ConvSC(
|
| 264 |
+
(conv): BasicConv2d(
|
| 265 |
+
(conv): Conv2d(64, 64, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1))
|
| 266 |
+
(norm): GroupNorm(2, 64, eps=1e-05, affine=True)
|
| 267 |
+
(act): SiLU()
|
| 268 |
+
)
|
| 269 |
+
)
|
| 270 |
+
(2): ConvSC(
|
| 271 |
+
(conv): BasicConv2d(
|
| 272 |
+
(conv): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
|
| 273 |
+
(norm): GroupNorm(2, 64, eps=1e-05, affine=True)
|
| 274 |
+
(act): SiLU()
|
| 275 |
+
)
|
| 276 |
+
)
|
| 277 |
+
(3): ConvSC(
|
| 278 |
+
(conv): BasicConv2d(
|
| 279 |
+
(conv): Conv2d(64, 64, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1))
|
| 280 |
+
(norm): GroupNorm(2, 64, eps=1e-05, affine=True)
|
| 281 |
+
(act): SiLU()
|
| 282 |
+
)
|
| 283 |
+
)
|
| 284 |
+
)
|
| 285 |
+
)
|
| 286 |
+
(dec_u10_q): Decoder(
|
| 287 |
+
(dec): Sequential(
|
| 288 |
+
(0): ConvSC(
|
| 289 |
+
(conv): BasicConv2d(
|
| 290 |
+
(conv): Sequential(
|
| 291 |
+
(0): Conv2d(64, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
|
| 292 |
+
(1): PixelShuffle(upscale_factor=2)
|
| 293 |
+
)
|
| 294 |
+
(norm): GroupNorm(2, 64, eps=1e-05, affine=True)
|
| 295 |
+
(act): SiLU()
|
| 296 |
+
)
|
| 297 |
+
)
|
| 298 |
+
(1): ConvSC(
|
| 299 |
+
(conv): BasicConv2d(
|
| 300 |
+
(conv): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
|
| 301 |
+
(norm): GroupNorm(2, 64, eps=1e-05, affine=True)
|
| 302 |
+
(act): SiLU()
|
| 303 |
+
)
|
| 304 |
+
)
|
| 305 |
+
(2): ConvSC(
|
| 306 |
+
(conv): BasicConv2d(
|
| 307 |
+
(conv): Sequential(
|
| 308 |
+
(0): Conv2d(64, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
|
| 309 |
+
(1): PixelShuffle(upscale_factor=2)
|
| 310 |
+
)
|
| 311 |
+
(norm): GroupNorm(2, 64, eps=1e-05, affine=True)
|
| 312 |
+
(act): SiLU()
|
| 313 |
+
)
|
| 314 |
+
)
|
| 315 |
+
(3): ConvSC(
|
| 316 |
+
(conv): BasicConv2d(
|
| 317 |
+
(conv): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
|
| 318 |
+
(norm): GroupNorm(2, 64, eps=1e-05, affine=True)
|
| 319 |
+
(act): SiLU()
|
| 320 |
+
)
|
| 321 |
+
)
|
| 322 |
+
)
|
| 323 |
+
(readout): Conv2d(64, 1, kernel_size=(1, 1), stride=(1, 1))
|
| 324 |
+
)
|
| 325 |
+
(dec_u10_k): Decoder(
|
| 326 |
+
(dec): Sequential(
|
| 327 |
+
(0): ConvSC(
|
| 328 |
+
(conv): BasicConv2d(
|
| 329 |
+
(conv): Sequential(
|
| 330 |
+
(0): Conv2d(64, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
|
| 331 |
+
(1): PixelShuffle(upscale_factor=2)
|
| 332 |
+
)
|
| 333 |
+
(norm): GroupNorm(2, 64, eps=1e-05, affine=True)
|
| 334 |
+
(act): SiLU()
|
| 335 |
+
)
|
| 336 |
+
)
|
| 337 |
+
(1): ConvSC(
|
| 338 |
+
(conv): BasicConv2d(
|
| 339 |
+
(conv): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
|
| 340 |
+
(norm): GroupNorm(2, 64, eps=1e-05, affine=True)
|
| 341 |
+
(act): SiLU()
|
| 342 |
+
)
|
| 343 |
+
)
|
| 344 |
+
(2): ConvSC(
|
| 345 |
+
(conv): BasicConv2d(
|
| 346 |
+
(conv): Sequential(
|
| 347 |
+
(0): Conv2d(64, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
|
| 348 |
+
(1): PixelShuffle(upscale_factor=2)
|
| 349 |
+
)
|
| 350 |
+
(norm): GroupNorm(2, 64, eps=1e-05, affine=True)
|
| 351 |
+
(act): SiLU()
|
| 352 |
+
)
|
| 353 |
+
)
|
| 354 |
+
(3): ConvSC(
|
| 355 |
+
(conv): BasicConv2d(
|
| 356 |
+
(conv): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
|
| 357 |
+
(norm): GroupNorm(2, 64, eps=1e-05, affine=True)
|
| 358 |
+
(act): SiLU()
|
| 359 |
+
)
|
| 360 |
+
)
|
| 361 |
+
)
|
| 362 |
+
(readout): Conv2d(64, 1, kernel_size=(1, 1), stride=(1, 1))
|
| 363 |
+
)
|
| 364 |
+
(dec_v10_q): Decoder(
|
| 365 |
+
(dec): Sequential(
|
| 366 |
+
(0): ConvSC(
|
| 367 |
+
(conv): BasicConv2d(
|
| 368 |
+
(conv): Sequential(
|
| 369 |
+
(0): Conv2d(64, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
|
| 370 |
+
(1): PixelShuffle(upscale_factor=2)
|
| 371 |
+
)
|
| 372 |
+
(norm): GroupNorm(2, 64, eps=1e-05, affine=True)
|
| 373 |
+
(act): SiLU()
|
| 374 |
+
)
|
| 375 |
+
)
|
| 376 |
+
(1): ConvSC(
|
| 377 |
+
(conv): BasicConv2d(
|
| 378 |
+
(conv): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
|
| 379 |
+
(norm): GroupNorm(2, 64, eps=1e-05, affine=True)
|
| 380 |
+
(act): SiLU()
|
| 381 |
+
)
|
| 382 |
+
)
|
| 383 |
+
(2): ConvSC(
|
| 384 |
+
(conv): BasicConv2d(
|
| 385 |
+
(conv): Sequential(
|
| 386 |
+
(0): Conv2d(64, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
|
| 387 |
+
(1): PixelShuffle(upscale_factor=2)
|
| 388 |
+
)
|
| 389 |
+
(norm): GroupNorm(2, 64, eps=1e-05, affine=True)
|
| 390 |
+
(act): SiLU()
|
| 391 |
+
)
|
| 392 |
+
)
|
| 393 |
+
(3): ConvSC(
|
| 394 |
+
(conv): BasicConv2d(
|
| 395 |
+
(conv): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
|
| 396 |
+
(norm): GroupNorm(2, 64, eps=1e-05, affine=True)
|
| 397 |
+
(act): SiLU()
|
| 398 |
+
)
|
| 399 |
+
)
|
| 400 |
+
)
|
| 401 |
+
(readout): Conv2d(64, 1, kernel_size=(1, 1), stride=(1, 1))
|
| 402 |
+
)
|
| 403 |
+
(dec_v10_k): Decoder(
|
| 404 |
+
(dec): Sequential(
|
| 405 |
+
(0): ConvSC(
|
| 406 |
+
(conv): BasicConv2d(
|
| 407 |
+
(conv): Sequential(
|
| 408 |
+
(0): Conv2d(64, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
|
| 409 |
+
(1): PixelShuffle(upscale_factor=2)
|
| 410 |
+
)
|
| 411 |
+
(norm): GroupNorm(2, 64, eps=1e-05, affine=True)
|
| 412 |
+
(act): SiLU()
|
| 413 |
+
)
|
| 414 |
+
)
|
| 415 |
+
(1): ConvSC(
|
| 416 |
+
(conv): BasicConv2d(
|
| 417 |
+
(conv): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
|
| 418 |
+
(norm): GroupNorm(2, 64, eps=1e-05, affine=True)
|
| 419 |
+
(act): SiLU()
|
| 420 |
+
)
|
| 421 |
+
)
|
| 422 |
+
(2): ConvSC(
|
| 423 |
+
(conv): BasicConv2d(
|
| 424 |
+
(conv): Sequential(
|
| 425 |
+
(0): Conv2d(64, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
|
| 426 |
+
(1): PixelShuffle(upscale_factor=2)
|
| 427 |
+
)
|
| 428 |
+
(norm): GroupNorm(2, 64, eps=1e-05, affine=True)
|
| 429 |
+
(act): SiLU()
|
| 430 |
+
)
|
| 431 |
+
)
|
| 432 |
+
(3): ConvSC(
|
| 433 |
+
(conv): BasicConv2d(
|
| 434 |
+
(conv): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
|
| 435 |
+
(norm): GroupNorm(2, 64, eps=1e-05, affine=True)
|
| 436 |
+
(act): SiLU()
|
| 437 |
+
)
|
| 438 |
+
)
|
| 439 |
+
)
|
| 440 |
+
(readout): Conv2d(64, 1, kernel_size=(1, 1), stride=(1, 1))
|
| 441 |
+
)
|
| 442 |
+
(dec_t2m_q): Decoder(
|
| 443 |
+
(dec): Sequential(
|
| 444 |
+
(0): ConvSC(
|
| 445 |
+
(conv): BasicConv2d(
|
| 446 |
+
(conv): Sequential(
|
| 447 |
+
(0): Conv2d(64, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
|
| 448 |
+
(1): PixelShuffle(upscale_factor=2)
|
| 449 |
+
)
|
| 450 |
+
(norm): GroupNorm(2, 64, eps=1e-05, affine=True)
|
| 451 |
+
(act): SiLU()
|
| 452 |
+
)
|
| 453 |
+
)
|
| 454 |
+
(1): ConvSC(
|
| 455 |
+
(conv): BasicConv2d(
|
| 456 |
+
(conv): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
|
| 457 |
+
(norm): GroupNorm(2, 64, eps=1e-05, affine=True)
|
| 458 |
+
(act): SiLU()
|
| 459 |
+
)
|
| 460 |
+
)
|
| 461 |
+
(2): ConvSC(
|
| 462 |
+
(conv): BasicConv2d(
|
| 463 |
+
(conv): Sequential(
|
| 464 |
+
(0): Conv2d(64, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
|
| 465 |
+
(1): PixelShuffle(upscale_factor=2)
|
| 466 |
+
)
|
| 467 |
+
(norm): GroupNorm(2, 64, eps=1e-05, affine=True)
|
| 468 |
+
(act): SiLU()
|
| 469 |
+
)
|
| 470 |
+
)
|
| 471 |
+
(3): ConvSC(
|
| 472 |
+
(conv): BasicConv2d(
|
| 473 |
+
(conv): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
|
| 474 |
+
(norm): GroupNorm(2, 64, eps=1e-05, affine=True)
|
| 475 |
+
(act): SiLU()
|
| 476 |
+
)
|
| 477 |
+
)
|
| 478 |
+
)
|
| 479 |
+
(readout): Conv2d(64, 1, kernel_size=(1, 1), stride=(1, 1))
|
| 480 |
+
)
|
| 481 |
+
(dec_t2m_k): Decoder(
|
| 482 |
+
(dec): Sequential(
|
| 483 |
+
(0): ConvSC(
|
| 484 |
+
(conv): BasicConv2d(
|
| 485 |
+
(conv): Sequential(
|
| 486 |
+
(0): Conv2d(64, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
|
| 487 |
+
(1): PixelShuffle(upscale_factor=2)
|
| 488 |
+
)
|
| 489 |
+
(norm): GroupNorm(2, 64, eps=1e-05, affine=True)
|
| 490 |
+
(act): SiLU()
|
| 491 |
+
)
|
| 492 |
+
)
|
| 493 |
+
(1): ConvSC(
|
| 494 |
+
(conv): BasicConv2d(
|
| 495 |
+
(conv): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
|
| 496 |
+
(norm): GroupNorm(2, 64, eps=1e-05, affine=True)
|
| 497 |
+
(act): SiLU()
|
| 498 |
+
)
|
| 499 |
+
)
|
| 500 |
+
(2): ConvSC(
|
| 501 |
+
(conv): BasicConv2d(
|
| 502 |
+
(conv): Sequential(
|
| 503 |
+
(0): Conv2d(64, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
|
| 504 |
+
(1): PixelShuffle(upscale_factor=2)
|
| 505 |
+
)
|
| 506 |
+
(norm): GroupNorm(2, 64, eps=1e-05, affine=True)
|
| 507 |
+
(act): SiLU()
|
| 508 |
+
)
|
| 509 |
+
)
|
| 510 |
+
(3): ConvSC(
|
| 511 |
+
(conv): BasicConv2d(
|
| 512 |
+
(conv): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
|
| 513 |
+
(norm): GroupNorm(2, 64, eps=1e-05, affine=True)
|
| 514 |
+
(act): SiLU()
|
| 515 |
+
)
|
| 516 |
+
)
|
| 517 |
+
)
|
| 518 |
+
(readout): Conv2d(64, 1, kernel_size=(1, 1), stride=(1, 1))
|
| 519 |
+
)
|
| 520 |
+
(hid_q): CIMidNet(
|
| 521 |
+
(conv1): Conv2d(640, 512, kernel_size=(1, 1), stride=(1, 1))
|
| 522 |
+
(layers): ModuleList(
|
| 523 |
+
(0-7): 8 x CIAttBlock(
|
| 524 |
+
(norm_1): GroupNorm(1, 512, eps=1e-05, affine=True)
|
| 525 |
+
(norm_2): GroupNorm(1, 512, eps=1e-05, affine=True)
|
| 526 |
+
(attn_1): MultiHeadAttention_S(
|
| 527 |
+
(q_Conv): Sequential(
|
| 528 |
+
(0): Conv2d(512, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
|
| 529 |
+
(1): GroupNorm(1, 512, eps=1e-05, affine=True)
|
| 530 |
+
)
|
| 531 |
+
(v_Conv): Sequential(
|
| 532 |
+
(0): Conv2d(512, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
|
| 533 |
+
(1): GroupNorm(1, 512, eps=1e-05, affine=True)
|
| 534 |
+
)
|
| 535 |
+
(k_Conv): Sequential(
|
| 536 |
+
(0): Conv2d(512, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
|
| 537 |
+
(1): GroupNorm(1, 512, eps=1e-05, affine=True)
|
| 538 |
+
)
|
| 539 |
+
(v_post_f): Sequential(
|
| 540 |
+
(0): Conv2d(512, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
|
| 541 |
+
(1): GroupNorm(1, 512, eps=1e-05, affine=True)
|
| 542 |
+
(2): SiLU()
|
| 543 |
+
)
|
| 544 |
+
)
|
| 545 |
+
(ff): layerNormFeedForward(
|
| 546 |
+
(ff1): TAUSubBlock(
|
| 547 |
+
(norm1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
|
| 548 |
+
(attn): TemporalAttention(
|
| 549 |
+
(proj_1): Conv2d(512, 512, kernel_size=(1, 1), stride=(1, 1))
|
| 550 |
+
(activation): GELU(approximate='none')
|
| 551 |
+
(spatial_gating_unit): TemporalAttentionModule(
|
| 552 |
+
(conv0): Conv2d(512, 512, kernel_size=(5, 5), stride=(1, 1), padding=(2, 2), groups=512)
|
| 553 |
+
(conv_spatial): Conv2d(512, 512, kernel_size=(7, 7), stride=(1, 1), padding=(9, 9), dilation=(3, 3), groups=512)
|
| 554 |
+
(conv1): Conv2d(512, 512, kernel_size=(1, 1), stride=(1, 1))
|
| 555 |
+
(avg_pool): AdaptiveAvgPool2d(output_size=1)
|
| 556 |
+
(fc): Sequential(
|
| 557 |
+
(0): Linear(in_features=512, out_features=16, bias=False)
|
| 558 |
+
(1): ReLU(inplace=True)
|
| 559 |
+
(2): Linear(in_features=16, out_features=512, bias=False)
|
| 560 |
+
(3): Sigmoid()
|
| 561 |
+
)
|
| 562 |
+
)
|
| 563 |
+
(proj_2): Conv2d(512, 512, kernel_size=(1, 1), stride=(1, 1))
|
| 564 |
+
)
|
| 565 |
+
(drop_path): DropPath(drop_prob=0.100)
|
| 566 |
+
(norm2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
|
| 567 |
+
(mlp): MixMlp(
|
| 568 |
+
(fc1): Conv2d(512, 2048, kernel_size=(1, 1), stride=(1, 1))
|
| 569 |
+
(dwconv): DWConv(
|
| 570 |
+
(dwconv): Conv2d(2048, 2048, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=2048)
|
| 571 |
+
)
|
| 572 |
+
(act): GELU(approximate='none')
|
| 573 |
+
(fc2): Conv2d(2048, 512, kernel_size=(1, 1), stride=(1, 1))
|
| 574 |
+
(drop): Dropout(p=0.0, inplace=False)
|
| 575 |
+
)
|
| 576 |
+
)
|
| 577 |
+
)
|
| 578 |
+
)
|
| 579 |
+
)
|
| 580 |
+
(conv2): Conv2d(512, 640, kernel_size=(1, 1), stride=(1, 1))
|
| 581 |
+
)
|
| 582 |
+
(hid_k): CIMidNet(
|
| 583 |
+
(conv1): Conv2d(640, 512, kernel_size=(1, 1), stride=(1, 1))
|
| 584 |
+
(layers): ModuleList(
|
| 585 |
+
(0-7): 8 x CIAttBlock(
|
| 586 |
+
(norm_1): GroupNorm(1, 512, eps=1e-05, affine=True)
|
| 587 |
+
(norm_2): GroupNorm(1, 512, eps=1e-05, affine=True)
|
| 588 |
+
(attn_1): MultiHeadAttention_S(
|
| 589 |
+
(q_Conv): Sequential(
|
| 590 |
+
(0): Conv2d(512, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
|
| 591 |
+
(1): GroupNorm(1, 512, eps=1e-05, affine=True)
|
| 592 |
+
)
|
| 593 |
+
(v_Conv): Sequential(
|
| 594 |
+
(0): Conv2d(512, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
|
| 595 |
+
(1): GroupNorm(1, 512, eps=1e-05, affine=True)
|
| 596 |
+
)
|
| 597 |
+
(k_Conv): Sequential(
|
| 598 |
+
(0): Conv2d(512, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
|
| 599 |
+
(1): GroupNorm(1, 512, eps=1e-05, affine=True)
|
| 600 |
+
)
|
| 601 |
+
(v_post_f): Sequential(
|
| 602 |
+
(0): Conv2d(512, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
|
| 603 |
+
(1): GroupNorm(1, 512, eps=1e-05, affine=True)
|
| 604 |
+
(2): SiLU()
|
| 605 |
+
)
|
| 606 |
+
)
|
| 607 |
+
(ff): layerNormFeedForward(
|
| 608 |
+
(ff1): TAUSubBlock(
|
| 609 |
+
(norm1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
|
| 610 |
+
(attn): TemporalAttention(
|
| 611 |
+
(proj_1): Conv2d(512, 512, kernel_size=(1, 1), stride=(1, 1))
|
| 612 |
+
(activation): GELU(approximate='none')
|
| 613 |
+
(spatial_gating_unit): TemporalAttentionModule(
|
| 614 |
+
(conv0): Conv2d(512, 512, kernel_size=(5, 5), stride=(1, 1), padding=(2, 2), groups=512)
|
| 615 |
+
(conv_spatial): Conv2d(512, 512, kernel_size=(7, 7), stride=(1, 1), padding=(9, 9), dilation=(3, 3), groups=512)
|
| 616 |
+
(conv1): Conv2d(512, 512, kernel_size=(1, 1), stride=(1, 1))
|
| 617 |
+
(avg_pool): AdaptiveAvgPool2d(output_size=1)
|
| 618 |
+
(fc): Sequential(
|
| 619 |
+
(0): Linear(in_features=512, out_features=16, bias=False)
|
| 620 |
+
(1): ReLU(inplace=True)
|
| 621 |
+
(2): Linear(in_features=16, out_features=512, bias=False)
|
| 622 |
+
(3): Sigmoid()
|
| 623 |
+
)
|
| 624 |
+
)
|
| 625 |
+
(proj_2): Conv2d(512, 512, kernel_size=(1, 1), stride=(1, 1))
|
| 626 |
+
)
|
| 627 |
+
(drop_path): DropPath(drop_prob=0.100)
|
| 628 |
+
(norm2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
|
| 629 |
+
(mlp): MixMlp(
|
| 630 |
+
(fc1): Conv2d(512, 2048, kernel_size=(1, 1), stride=(1, 1))
|
| 631 |
+
(dwconv): DWConv(
|
| 632 |
+
(dwconv): Conv2d(2048, 2048, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=2048)
|
| 633 |
+
)
|
| 634 |
+
(act): GELU(approximate='none')
|
| 635 |
+
(fc2): Conv2d(2048, 512, kernel_size=(1, 1), stride=(1, 1))
|
| 636 |
+
(drop): Dropout(p=0.0, inplace=False)
|
| 637 |
+
)
|
| 638 |
+
)
|
| 639 |
+
)
|
| 640 |
+
)
|
| 641 |
+
)
|
| 642 |
+
(conv2): Conv2d(512, 640, kernel_size=(1, 1), stride=(1, 1))
|
| 643 |
+
)
|
| 644 |
+
)
|
| 645 |
+
| module | #parameters or shape | #flops |
|
| 646 |
+
|:-----------------------------|:-----------------------|:-----------|
|
| 647 |
+
| model | 68.544M | 81.216G |
|
| 648 |
+
| enc_u10_q.enc | 0.112M | 0.893G |
|
| 649 |
+
| enc_u10_q.enc.0.conv | 0.768K | 36.7M |
|
| 650 |
+
| enc_u10_q.enc.0.conv.conv | 0.64K | 23.593M |
|
| 651 |
+
| enc_u10_q.enc.0.conv.norm | 0.128K | 13.107M |
|
| 652 |
+
| enc_u10_q.enc.1.conv | 37.056K | 0.381G |
|
| 653 |
+
| enc_u10_q.enc.1.conv.conv | 36.928K | 0.377G |
|
| 654 |
+
| enc_u10_q.enc.1.conv.norm | 0.128K | 3.277M |
|
| 655 |
+
| enc_u10_q.enc.2.conv | 37.056K | 0.381G |
|
| 656 |
+
| enc_u10_q.enc.2.conv.conv | 36.928K | 0.377G |
|
| 657 |
+
| enc_u10_q.enc.2.conv.norm | 0.128K | 3.277M |
|
| 658 |
+
| enc_u10_q.enc.3.conv | 37.056K | 95.191M |
|
| 659 |
+
| enc_u10_q.enc.3.conv.conv | 36.928K | 94.372M |
|
| 660 |
+
| enc_u10_q.enc.3.conv.norm | 0.128K | 0.819M |
|
| 661 |
+
| enc_u10_k.enc | 0.112M | 1.787G |
|
| 662 |
+
| enc_u10_k.enc.0.conv | 0.768K | 73.4M |
|
| 663 |
+
| enc_u10_k.enc.0.conv.conv | 0.64K | 47.186M |
|
| 664 |
+
| enc_u10_k.enc.0.conv.norm | 0.128K | 26.214M |
|
| 665 |
+
| enc_u10_k.enc.1.conv | 37.056K | 0.762G |
|
| 666 |
+
| enc_u10_k.enc.1.conv.conv | 36.928K | 0.755G |
|
| 667 |
+
| enc_u10_k.enc.1.conv.norm | 0.128K | 6.554M |
|
| 668 |
+
| enc_u10_k.enc.2.conv | 37.056K | 0.762G |
|
| 669 |
+
| enc_u10_k.enc.2.conv.conv | 36.928K | 0.755G |
|
| 670 |
+
| enc_u10_k.enc.2.conv.norm | 0.128K | 6.554M |
|
| 671 |
+
| enc_u10_k.enc.3.conv | 37.056K | 0.19G |
|
| 672 |
+
| enc_u10_k.enc.3.conv.conv | 36.928K | 0.189G |
|
| 673 |
+
| enc_u10_k.enc.3.conv.norm | 0.128K | 1.638M |
|
| 674 |
+
| enc_v10_q.enc | 0.112M | 0.893G |
|
| 675 |
+
| enc_v10_q.enc.0.conv | 0.768K | 36.7M |
|
| 676 |
+
| enc_v10_q.enc.0.conv.conv | 0.64K | 23.593M |
|
| 677 |
+
| enc_v10_q.enc.0.conv.norm | 0.128K | 13.107M |
|
| 678 |
+
| enc_v10_q.enc.1.conv | 37.056K | 0.381G |
|
| 679 |
+
| enc_v10_q.enc.1.conv.conv | 36.928K | 0.377G |
|
| 680 |
+
| enc_v10_q.enc.1.conv.norm | 0.128K | 3.277M |
|
| 681 |
+
| enc_v10_q.enc.2.conv | 37.056K | 0.381G |
|
| 682 |
+
| enc_v10_q.enc.2.conv.conv | 36.928K | 0.377G |
|
| 683 |
+
| enc_v10_q.enc.2.conv.norm | 0.128K | 3.277M |
|
| 684 |
+
| enc_v10_q.enc.3.conv | 37.056K | 95.191M |
|
| 685 |
+
| enc_v10_q.enc.3.conv.conv | 36.928K | 94.372M |
|
| 686 |
+
| enc_v10_q.enc.3.conv.norm | 0.128K | 0.819M |
|
| 687 |
+
| enc_v10_k.enc | 0.112M | 1.787G |
|
| 688 |
+
| enc_v10_k.enc.0.conv | 0.768K | 73.4M |
|
| 689 |
+
| enc_v10_k.enc.0.conv.conv | 0.64K | 47.186M |
|
| 690 |
+
| enc_v10_k.enc.0.conv.norm | 0.128K | 26.214M |
|
| 691 |
+
| enc_v10_k.enc.1.conv | 37.056K | 0.762G |
|
| 692 |
+
| enc_v10_k.enc.1.conv.conv | 36.928K | 0.755G |
|
| 693 |
+
| enc_v10_k.enc.1.conv.norm | 0.128K | 6.554M |
|
| 694 |
+
| enc_v10_k.enc.2.conv | 37.056K | 0.762G |
|
| 695 |
+
| enc_v10_k.enc.2.conv.conv | 36.928K | 0.755G |
|
| 696 |
+
| enc_v10_k.enc.2.conv.norm | 0.128K | 6.554M |
|
| 697 |
+
| enc_v10_k.enc.3.conv | 37.056K | 0.19G |
|
| 698 |
+
| enc_v10_k.enc.3.conv.conv | 36.928K | 0.189G |
|
| 699 |
+
| enc_v10_k.enc.3.conv.norm | 0.128K | 1.638M |
|
| 700 |
+
| enc_t2m_q.enc | 0.112M | 0.893G |
|
| 701 |
+
| enc_t2m_q.enc.0.conv | 0.768K | 36.7M |
|
| 702 |
+
| enc_t2m_q.enc.0.conv.conv | 0.64K | 23.593M |
|
| 703 |
+
| enc_t2m_q.enc.0.conv.norm | 0.128K | 13.107M |
|
| 704 |
+
| enc_t2m_q.enc.1.conv | 37.056K | 0.381G |
|
| 705 |
+
| enc_t2m_q.enc.1.conv.conv | 36.928K | 0.377G |
|
| 706 |
+
| enc_t2m_q.enc.1.conv.norm | 0.128K | 3.277M |
|
| 707 |
+
| enc_t2m_q.enc.2.conv | 37.056K | 0.381G |
|
| 708 |
+
| enc_t2m_q.enc.2.conv.conv | 36.928K | 0.377G |
|
| 709 |
+
| enc_t2m_q.enc.2.conv.norm | 0.128K | 3.277M |
|
| 710 |
+
| enc_t2m_q.enc.3.conv | 37.056K | 95.191M |
|
| 711 |
+
| enc_t2m_q.enc.3.conv.conv | 36.928K | 94.372M |
|
| 712 |
+
| enc_t2m_q.enc.3.conv.norm | 0.128K | 0.819M |
|
| 713 |
+
| enc_t2m_k.enc | 0.112M | 1.787G |
|
| 714 |
+
| enc_t2m_k.enc.0.conv | 0.768K | 73.4M |
|
| 715 |
+
| enc_t2m_k.enc.0.conv.conv | 0.64K | 47.186M |
|
| 716 |
+
| enc_t2m_k.enc.0.conv.norm | 0.128K | 26.214M |
|
| 717 |
+
| enc_t2m_k.enc.1.conv | 37.056K | 0.762G |
|
| 718 |
+
| enc_t2m_k.enc.1.conv.conv | 36.928K | 0.755G |
|
| 719 |
+
| enc_t2m_k.enc.1.conv.norm | 0.128K | 6.554M |
|
| 720 |
+
| enc_t2m_k.enc.2.conv | 37.056K | 0.762G |
|
| 721 |
+
| enc_t2m_k.enc.2.conv.conv | 36.928K | 0.755G |
|
| 722 |
+
| enc_t2m_k.enc.2.conv.norm | 0.128K | 6.554M |
|
| 723 |
+
| enc_t2m_k.enc.3.conv | 37.056K | 0.19G |
|
| 724 |
+
| enc_t2m_k.enc.3.conv.conv | 36.928K | 0.189G |
|
| 725 |
+
| enc_t2m_k.enc.3.conv.norm | 0.128K | 1.638M |
|
| 726 |
+
| dec_u10_q | 0.37M | 3.81G |
|
| 727 |
+
| dec_u10_q.dec | 0.37M | 3.808G |
|
| 728 |
+
| dec_u10_q.dec.0.conv | 0.148M | 0.381G |
|
| 729 |
+
| dec_u10_q.dec.1.conv | 37.056K | 0.381G |
|
| 730 |
+
| dec_u10_q.dec.2.conv | 0.148M | 1.523G |
|
| 731 |
+
| dec_u10_q.dec.3.conv | 37.056K | 1.523G |
|
| 732 |
+
| dec_u10_q.readout | 65 | 2.621M |
|
| 733 |
+
| dec_u10_q.readout.weight | (1, 64, 1, 1) | |
|
| 734 |
+
| dec_u10_q.readout.bias | (1,) | |
|
| 735 |
+
| dec_u10_k | 0.37M | 3.81G |
|
| 736 |
+
| dec_u10_k.dec | 0.37M | 3.808G |
|
| 737 |
+
| dec_u10_k.dec.0.conv | 0.148M | 0.381G |
|
| 738 |
+
| dec_u10_k.dec.1.conv | 37.056K | 0.381G |
|
| 739 |
+
| dec_u10_k.dec.2.conv | 0.148M | 1.523G |
|
| 740 |
+
| dec_u10_k.dec.3.conv | 37.056K | 1.523G |
|
| 741 |
+
| dec_u10_k.readout | 65 | 2.621M |
|
| 742 |
+
| dec_u10_k.readout.weight | (1, 64, 1, 1) | |
|
| 743 |
+
| dec_u10_k.readout.bias | (1,) | |
|
| 744 |
+
| dec_v10_q | 0.37M | 3.81G |
|
| 745 |
+
| dec_v10_q.dec | 0.37M | 3.808G |
|
| 746 |
+
| dec_v10_q.dec.0.conv | 0.148M | 0.381G |
|
| 747 |
+
| dec_v10_q.dec.1.conv | 37.056K | 0.381G |
|
| 748 |
+
| dec_v10_q.dec.2.conv | 0.148M | 1.523G |
|
| 749 |
+
| dec_v10_q.dec.3.conv | 37.056K | 1.523G |
|
| 750 |
+
| dec_v10_q.readout | 65 | 2.621M |
|
| 751 |
+
| dec_v10_q.readout.weight | (1, 64, 1, 1) | |
|
| 752 |
+
| dec_v10_q.readout.bias | (1,) | |
|
| 753 |
+
| dec_v10_k | 0.37M | 3.81G |
|
| 754 |
+
| dec_v10_k.dec | 0.37M | 3.808G |
|
| 755 |
+
| dec_v10_k.dec.0.conv | 0.148M | 0.381G |
|
| 756 |
+
| dec_v10_k.dec.1.conv | 37.056K | 0.381G |
|
| 757 |
+
| dec_v10_k.dec.2.conv | 0.148M | 1.523G |
|
| 758 |
+
| dec_v10_k.dec.3.conv | 37.056K | 1.523G |
|
| 759 |
+
| dec_v10_k.readout | 65 | 2.621M |
|
| 760 |
+
| dec_v10_k.readout.weight | (1, 64, 1, 1) | |
|
| 761 |
+
| dec_v10_k.readout.bias | (1,) | |
|
| 762 |
+
| dec_t2m_q | 0.37M | 3.81G |
|
| 763 |
+
| dec_t2m_q.dec | 0.37M | 3.808G |
|
| 764 |
+
| dec_t2m_q.dec.0.conv | 0.148M | 0.381G |
|
| 765 |
+
| dec_t2m_q.dec.1.conv | 37.056K | 0.381G |
|
| 766 |
+
| dec_t2m_q.dec.2.conv | 0.148M | 1.523G |
|
| 767 |
+
| dec_t2m_q.dec.3.conv | 37.056K | 1.523G |
|
| 768 |
+
| dec_t2m_q.readout | 65 | 2.621M |
|
| 769 |
+
| dec_t2m_q.readout.weight | (1, 64, 1, 1) | |
|
| 770 |
+
| dec_t2m_q.readout.bias | (1,) | |
|
| 771 |
+
| dec_t2m_k | 0.37M | 3.81G |
|
| 772 |
+
| dec_t2m_k.dec | 0.37M | 3.808G |
|
| 773 |
+
| dec_t2m_k.dec.0.conv | 0.148M | 0.381G |
|
| 774 |
+
| dec_t2m_k.dec.1.conv | 37.056K | 0.381G |
|
| 775 |
+
| dec_t2m_k.dec.2.conv | 0.148M | 1.523G |
|
| 776 |
+
| dec_t2m_k.dec.3.conv | 37.056K | 1.523G |
|
| 777 |
+
| dec_t2m_k.readout | 65 | 2.621M |
|
| 778 |
+
| dec_t2m_k.readout.weight | (1, 64, 1, 1) | |
|
| 779 |
+
| dec_t2m_k.readout.bias | (1,) | |
|
| 780 |
+
| hid_q | 32.826M | 25.157G |
|
| 781 |
+
| hid_q.conv1 | 0.328M | 0.252G |
|
| 782 |
+
| hid_q.conv1.weight | (512, 640, 1, 1) | |
|
| 783 |
+
| hid_q.conv1.bias | (512,) | |
|
| 784 |
+
| hid_q.layers | 32.17M | 24.653G |
|
| 785 |
+
| hid_q.layers.0 | 4.021M | 3.082G |
|
| 786 |
+
| hid_q.layers.1 | 4.021M | 3.082G |
|
| 787 |
+
| hid_q.layers.2 | 4.021M | 3.082G |
|
| 788 |
+
| hid_q.layers.3 | 4.021M | 3.082G |
|
| 789 |
+
| hid_q.layers.4 | 4.021M | 3.082G |
|
| 790 |
+
| hid_q.layers.5 | 4.021M | 3.082G |
|
| 791 |
+
| hid_q.layers.6 | 4.021M | 3.082G |
|
| 792 |
+
| hid_q.layers.7 | 4.021M | 3.082G |
|
| 793 |
+
| hid_q.conv2 | 0.328M | 0.252G |
|
| 794 |
+
| hid_q.conv2.weight | (640, 512, 1, 1) | |
|
| 795 |
+
| hid_q.conv2.bias | (640,) | |
|
| 796 |
+
| hid_k | 32.826M | 25.157G |
|
| 797 |
+
| hid_k.conv1 | 0.328M | 0.252G |
|
| 798 |
+
| hid_k.conv1.weight | (512, 640, 1, 1) | |
|
| 799 |
+
| hid_k.conv1.bias | (512,) | |
|
| 800 |
+
| hid_k.layers | 32.17M | 24.653G |
|
| 801 |
+
| hid_k.layers.0 | 4.021M | 3.082G |
|
| 802 |
+
| hid_k.layers.1 | 4.021M | 3.082G |
|
| 803 |
+
| hid_k.layers.2 | 4.021M | 3.082G |
|
| 804 |
+
| hid_k.layers.3 | 4.021M | 3.082G |
|
| 805 |
+
| hid_k.layers.4 | 4.021M | 3.082G |
|
| 806 |
+
| hid_k.layers.5 | 4.021M | 3.082G |
|
| 807 |
+
| hid_k.layers.6 | 4.021M | 3.082G |
|
| 808 |
+
| hid_k.layers.7 | 4.021M | 3.082G |
|
| 809 |
+
| hid_k.conv2 | 0.328M | 0.252G |
|
| 810 |
+
| hid_k.conv2.weight | (640, 512, 1, 1) | |
|
| 811 |
+
| hid_k.conv2.bias | (640,) | |
|
| 812 |
+
--------------------------------------------------------------------------------
|
| 813 |
+
|
| 814 |
+
2024-11-12 17:46:07,210 - w1 : 1.9824299466392266 | w2 : 0.0597038581172535 | w3 : 0.019894629905273076
|
| 815 |
+
2024-11-12 17:51:23,077 - Epoch 1: Lr: 0.0009999 | Train Loss: 0.2179366 | Vali Loss: 0.1064958 | Rec Loss: 0.0320693 | Latent Loss: 0.0744265 | Pre Loss: 0.0315628
|
| 816 |
+
2024-11-12 17:56:44,748 - Epoch 2: Lr: 0.0009998 | Train Loss: 0.1413911 | Vali Loss: 0.0917718 | Rec Loss: 0.0292384 | Latent Loss: 0.0625334 | Pre Loss: 0.0286205
|
| 817 |
+
2024-11-12 18:02:09,913 - Epoch 3: Lr: 0.0009994 | Train Loss: 0.1062470 | Vali Loss: 0.0842272 | Rec Loss: 0.0276109 | Latent Loss: 0.0566162 | Pre Loss: 0.0264605
|
| 818 |
+
2024-11-12 18:07:37,650 - Epoch 4: Lr: 0.0009990 | Train Loss: 0.0896681 | Vali Loss: 0.0787217 | Rec Loss: 0.0255277 | Latent Loss: 0.0531941 | Pre Loss: 0.0256292
|
| 819 |
+
2024-11-12 18:13:08,627 - Epoch 5: Lr: 0.0009985 | Train Loss: 0.0795521 | Vali Loss: 0.0679551 | Rec Loss: 0.0245178 | Latent Loss: 0.0434373 | Pre Loss: 0.0244314
|
| 820 |
+
2024-11-12 18:18:43,025 - Epoch 6: Lr: 0.0009978 | Train Loss: 0.0748503 | Vali Loss: 0.0698862 | Rec Loss: 0.0245942 | Latent Loss: 0.0452921 | Pre Loss: 0.0237178
|
| 821 |
+
2024-11-12 18:24:19,472 - Epoch 7: Lr: 0.0009970 | Train Loss: 0.0757530 | Vali Loss: 0.0639117 | Rec Loss: 0.0243705 | Latent Loss: 0.0395411 | Pre Loss: 0.0227817
|
| 822 |
+
2024-11-12 18:29:59,312 - Epoch 8: Lr: 0.0009961 | Train Loss: 0.0726289 | Vali Loss: 0.0664225 | Rec Loss: 0.0229928 | Latent Loss: 0.0434297 | Pre Loss: 0.0227751
|
| 823 |
+
2024-11-12 18:35:40,513 - Epoch 9: Lr: 0.0009950 | Train Loss: 0.0698427 | Vali Loss: 0.0611381 | Rec Loss: 0.0229339 | Latent Loss: 0.0382042 | Pre Loss: 0.0221360
|
| 824 |
+
2024-11-12 18:41:26,524 - Epoch 10: Lr: 0.0009939 | Train Loss: 0.0708630 | Vali Loss: 0.0616351 | Rec Loss: 0.0226371 | Latent Loss: 0.0389980 | Pre Loss: 0.0215934
|
| 825 |
+
2024-11-12 18:47:15,391 - Epoch 11: Lr: 0.0009926 | Train Loss: 0.0651742 | Vali Loss: 0.0561315 | Rec Loss: 0.0221701 | Latent Loss: 0.0339614 | Pre Loss: 0.0208683
|
| 826 |
+
2024-11-12 18:53:07,905 - Epoch 12: Lr: 0.0009912 | Train Loss: 0.0612987 | Vali Loss: 0.0529133 | Rec Loss: 0.0215742 | Latent Loss: 0.0313392 | Pre Loss: 0.0202986
|
| 827 |
+
2024-11-12 18:59:04,048 - Epoch 13: Lr: 0.0009896 | Train Loss: 0.0582426 | Vali Loss: 0.0523291 | Rec Loss: 0.0207016 | Latent Loss: 0.0316275 | Pre Loss: 0.0201663
|
| 828 |
+
2024-11-12 19:05:03,539 - Epoch 14: Lr: 0.0009880 | Train Loss: 0.0584667 | Vali Loss: 0.0519960 | Rec Loss: 0.0207176 | Latent Loss: 0.0312784 | Pre Loss: 0.0201933
|
| 829 |
+
2024-11-12 19:11:06,157 - Epoch 15: Lr: 0.0009862 | Train Loss: 0.0578432 | Vali Loss: 0.0532566 | Rec Loss: 0.0206760 | Latent Loss: 0.0325806 | Pre Loss: 0.0198487
|
| 830 |
+
2024-11-12 19:17:10,456 - Epoch 16: Lr: 0.0009843 | Train Loss: 0.0574470 | Vali Loss: 0.0530245 | Rec Loss: 0.0201388 | Latent Loss: 0.0328857 | Pre Loss: 0.0196058
|
| 831 |
+
2024-11-12 19:23:17,694 - Epoch 17: Lr: 0.0009823 | Train Loss: 0.0563114 | Vali Loss: 0.0509097 | Rec Loss: 0.0199237 | Latent Loss: 0.0309860 | Pre Loss: 0.0197462
|
| 832 |
+
2024-11-12 19:29:30,767 - Epoch 18: Lr: 0.0009802 | Train Loss: 0.0560896 | Vali Loss: 0.0511374 | Rec Loss: 0.0198565 | Latent Loss: 0.0312810 | Pre Loss: 0.0197640
|
| 833 |
+
2024-11-12 19:35:45,334 - Epoch 19: Lr: 0.0009779 | Train Loss: 0.0554596 | Vali Loss: 0.0482234 | Rec Loss: 0.0189860 | Latent Loss: 0.0292374 | Pre Loss: 0.0185189
|
| 834 |
+
2024-11-12 19:42:05,597 - Epoch 20: Lr: 0.0009756 | Train Loss: 0.0531250 | Vali Loss: 0.0507526 | Rec Loss: 0.0190520 | Latent Loss: 0.0317007 | Pre Loss: 0.0183090
|
| 835 |
+
2024-11-12 19:48:27,586 - Epoch 21: Lr: 0.0009731 | Train Loss: 0.0526246 | Vali Loss: 0.0456903 | Rec Loss: 0.0185719 | Latent Loss: 0.0271184 | Pre Loss: 0.0180610
|
| 836 |
+
2024-11-12 19:54:53,716 - Epoch 22: Lr: 0.0009705 | Train Loss: 0.0532047 | Vali Loss: 0.0478828 | Rec Loss: 0.0184654 | Latent Loss: 0.0294174 | Pre Loss: 0.0180788
|
| 837 |
+
2024-11-12 20:01:21,752 - Epoch 23: Lr: 0.0009678 | Train Loss: 0.0516321 | Vali Loss: 0.0466723 | Rec Loss: 0.0183587 | Latent Loss: 0.0283136 | Pre Loss: 0.0179209
|
| 838 |
+
2024-11-12 20:07:53,136 - Epoch 24: Lr: 0.0009649 | Train Loss: 0.0506253 | Vali Loss: 0.0483531 | Rec Loss: 0.0180644 | Latent Loss: 0.0302887 | Pre Loss: 0.0172811
|
| 839 |
+
2024-11-12 20:14:29,805 - Epoch 25: Lr: 0.0009620 | Train Loss: 0.0514809 | Vali Loss: 0.0474071 | Rec Loss: 0.0179728 | Latent Loss: 0.0294343 | Pre Loss: 0.0175392
|
| 840 |
+
2024-11-12 20:21:08,070 - Epoch 26: Lr: 0.0009589 | Train Loss: 0.0501679 | Vali Loss: 0.0479947 | Rec Loss: 0.0175719 | Latent Loss: 0.0304228 | Pre Loss: 0.0170769
|
| 841 |
+
2024-11-12 20:27:51,494 - Epoch 27: Lr: 0.0009557 | Train Loss: 0.0515359 | Vali Loss: 0.0474806 | Rec Loss: 0.0176195 | Latent Loss: 0.0298611 | Pre Loss: 0.0173007
|
| 842 |
+
2024-11-12 20:34:38,141 - Epoch 28: Lr: 0.0009525 | Train Loss: 0.0507013 | Vali Loss: 0.0457178 | Rec Loss: 0.0172503 | Latent Loss: 0.0284675 | Pre Loss: 0.0168624
|
| 843 |
+
2024-11-12 20:41:27,514 - Epoch 29: Lr: 0.0009491 | Train Loss: 0.0486515 | Vali Loss: 0.0479886 | Rec Loss: 0.0175811 | Latent Loss: 0.0304075 | Pre Loss: 0.0175120
|
| 844 |
+
2024-11-12 20:48:19,193 - Epoch 30: Lr: 0.0009456 | Train Loss: 0.0492980 | Vali Loss: 0.0478889 | Rec Loss: 0.0173638 | Latent Loss: 0.0305251 | Pre Loss: 0.0170756
|
| 845 |
+
2024-11-12 20:55:16,375 - Epoch 31: Lr: 0.0009419 | Train Loss: 0.0491991 | Vali Loss: 0.0501551 | Rec Loss: 0.0173867 | Latent Loss: 0.0327684 | Pre Loss: 0.0172594
|
| 846 |
+
2024-11-12 21:02:16,833 - Epoch 32: Lr: 0.0009382 | Train Loss: 0.0493678 | Vali Loss: 0.0454871 | Rec Loss: 0.0168090 | Latent Loss: 0.0286781 | Pre Loss: 0.0164255
|
| 847 |
+
2024-11-12 21:09:21,703 - Epoch 33: Lr: 0.0009344 | Train Loss: 0.0499863 | Vali Loss: 0.0492082 | Rec Loss: 0.0169317 | Latent Loss: 0.0322765 | Pre Loss: 0.0167634
|
| 848 |
+
2024-11-12 21:16:29,268 - Epoch 34: Lr: 0.0009304 | Train Loss: 0.0493426 | Vali Loss: 0.0481798 | Rec Loss: 0.0166075 | Latent Loss: 0.0315724 | Pre Loss: 0.0161976
|
| 849 |
+
2024-11-12 21:23:41,579 - Epoch 35: Lr: 0.0009264 | Train Loss: 0.0485375 | Vali Loss: 0.0469319 | Rec Loss: 0.0163999 | Latent Loss: 0.0305320 | Pre Loss: 0.0159891
|
| 850 |
+
2024-11-12 21:30:56,947 - Epoch 36: Lr: 0.0009222 | Train Loss: 0.0481543 | Vali Loss: 0.0462840 | Rec Loss: 0.0165936 | Latent Loss: 0.0296904 | Pre Loss: 0.0160774
|
| 851 |
+
2024-11-12 21:38:15,355 - Epoch 37: Lr: 0.0009180 | Train Loss: 0.0476893 | Vali Loss: 0.0459986 | Rec Loss: 0.0166430 | Latent Loss: 0.0293556 | Pre Loss: 0.0163926
|
| 852 |
+
2024-11-12 21:45:37,217 - Epoch 38: Lr: 0.0009136 | Train Loss: 0.0466059 | Vali Loss: 0.0437210 | Rec Loss: 0.0158623 | Latent Loss: 0.0278587 | Pre Loss: 0.0154103
|
| 853 |
+
2024-11-12 21:53:04,370 - Epoch 39: Lr: 0.0009092 | Train Loss: 0.0464931 | Vali Loss: 0.0439378 | Rec Loss: 0.0160861 | Latent Loss: 0.0278518 | Pre Loss: 0.0157589
|
| 854 |
+
2024-11-12 22:00:32,935 - Epoch 40: Lr: 0.0009046 | Train Loss: 0.0447973 | Vali Loss: 0.0464864 | Rec Loss: 0.0165211 | Latent Loss: 0.0299653 | Pre Loss: 0.0164638
|
| 855 |
+
2024-11-12 22:08:05,966 - Epoch 41: Lr: 0.0008999 | Train Loss: 0.0448013 | Vali Loss: 0.0440178 | Rec Loss: 0.0159846 | Latent Loss: 0.0280332 | Pre Loss: 0.0157454
|
| 856 |
+
2024-11-12 22:15:41,024 - Epoch 42: Lr: 0.0008952 | Train Loss: 0.0446265 | Vali Loss: 0.0434645 | Rec Loss: 0.0158746 | Latent Loss: 0.0275899 | Pre Loss: 0.0156275
|
| 857 |
+
2024-11-12 22:23:23,500 - Epoch 43: Lr: 0.0008903 | Train Loss: 0.0436073 | Vali Loss: 0.0423922 | Rec Loss: 0.0156740 | Latent Loss: 0.0267182 | Pre Loss: 0.0152447
|
| 858 |
+
2024-11-12 22:31:06,596 - Epoch 44: Lr: 0.0008854 | Train Loss: 0.0427057 | Vali Loss: 0.0420294 | Rec Loss: 0.0157603 | Latent Loss: 0.0262691 | Pre Loss: 0.0154664
|
| 859 |
+
2024-11-12 22:38:53,720 - Epoch 45: Lr: 0.0008803 | Train Loss: 0.0420013 | Vali Loss: 0.0409309 | Rec Loss: 0.0156275 | Latent Loss: 0.0253034 | Pre Loss: 0.0153511
|
| 860 |
+
2024-11-12 22:46:45,158 - Epoch 46: Lr: 0.0008752 | Train Loss: 0.0418653 | Vali Loss: 0.0408055 | Rec Loss: 0.0159289 | Latent Loss: 0.0248766 | Pre Loss: 0.0157681
|
| 861 |
+
2024-11-12 22:54:39,705 - Epoch 47: Lr: 0.0008699 | Train Loss: 0.0411758 | Vali Loss: 0.0401105 | Rec Loss: 0.0156248 | Latent Loss: 0.0244857 | Pre Loss: 0.0152882
|
| 862 |
+
2024-11-12 23:02:38,939 - Epoch 48: Lr: 0.0008646 | Train Loss: 0.0402309 | Vali Loss: 0.0400719 | Rec Loss: 0.0155923 | Latent Loss: 0.0244796 | Pre Loss: 0.0152030
|
| 863 |
+
2024-11-12 23:10:39,694 - Epoch 49: Lr: 0.0008592 | Train Loss: 0.0404111 | Vali Loss: 0.0405820 | Rec Loss: 0.0159969 | Latent Loss: 0.0245851 | Pre Loss: 0.0156107
|
| 864 |
+
2024-11-12 23:18:41,263 - Epoch 50: Lr: 0.0008537 | Train Loss: 0.0393008 | Vali Loss: 0.0394572 | Rec Loss: 0.0156420 | Latent Loss: 0.0238152 | Pre Loss: 0.0154284
|
| 865 |
+
2024-11-12 23:26:48,308 - Epoch 51: Lr: 0.0008481 | Train Loss: 0.0389817 | Vali Loss: 0.0407820 | Rec Loss: 0.0152802 | Latent Loss: 0.0255019 | Pre Loss: 0.0147308
|
| 866 |
+
2024-11-12 23:34:57,401 - Epoch 52: Lr: 0.0008424 | Train Loss: 0.0405326 | Vali Loss: 0.0401196 | Rec Loss: 0.0153136 | Latent Loss: 0.0248060 | Pre Loss: 0.0152191
|
| 867 |
+
2024-11-12 23:43:08,016 - Epoch 53: Lr: 0.0008367 | Train Loss: 0.0399418 | Vali Loss: 0.0390719 | Rec Loss: 0.0153355 | Latent Loss: 0.0237365 | Pre Loss: 0.0150080
|
| 868 |
+
2024-11-12 23:51:24,829 - Epoch 54: Lr: 0.0008308 | Train Loss: 0.0385613 | Vali Loss: 0.0386345 | Rec Loss: 0.0152658 | Latent Loss: 0.0233687 | Pre Loss: 0.0149651
|
| 869 |
+
2024-11-12 23:59:47,385 - Epoch 55: Lr: 0.0008249 | Train Loss: 0.0389355 | Vali Loss: 0.0380017 | Rec Loss: 0.0151223 | Latent Loss: 0.0228793 | Pre Loss: 0.0147387
|
| 870 |
+
2024-11-13 00:08:11,503 - Epoch 56: Lr: 0.0008189 | Train Loss: 0.0388482 | Vali Loss: 0.0375697 | Rec Loss: 0.0150351 | Latent Loss: 0.0225346 | Pre Loss: 0.0144703
|
| 871 |
+
2024-11-13 00:16:38,028 - Epoch 57: Lr: 0.0008128 | Train Loss: 0.0383744 | Vali Loss: 0.0375771 | Rec Loss: 0.0147132 | Latent Loss: 0.0228640 | Pre Loss: 0.0142515
|
| 872 |
+
2024-11-13 00:25:07,060 - Epoch 58: Lr: 0.0008066 | Train Loss: 0.0376353 | Vali Loss: 0.0380641 | Rec Loss: 0.0148954 | Latent Loss: 0.0231687 | Pre Loss: 0.0145569
|
| 873 |
+
2024-11-13 00:33:40,224 - Epoch 59: Lr: 0.0008004 | Train Loss: 0.0376926 | Vali Loss: 0.0375467 | Rec Loss: 0.0146381 | Latent Loss: 0.0229086 | Pre Loss: 0.0143016
|
| 874 |
+
2024-11-13 00:42:18,134 - Epoch 60: Lr: 0.0007941 | Train Loss: 0.0376747 | Vali Loss: 0.0372038 | Rec Loss: 0.0147100 | Latent Loss: 0.0224938 | Pre Loss: 0.0142889
|
| 875 |
+
2024-11-13 00:50:57,276 - Epoch 61: Lr: 0.0007877 | Train Loss: 0.0374373 | Vali Loss: 0.0390231 | Rec Loss: 0.0147411 | Latent Loss: 0.0242820 | Pre Loss: 0.0144308
|
| 876 |
+
2024-11-13 00:59:39,044 - Epoch 62: Lr: 0.0007813 | Train Loss: 0.0376981 | Vali Loss: 0.0401980 | Rec Loss: 0.0150912 | Latent Loss: 0.0251068 | Pre Loss: 0.0148102
|
| 877 |
+
2024-11-13 01:08:25,900 - Epoch 63: Lr: 0.0007747 | Train Loss: 0.0387563 | Vali Loss: 0.0381311 | Rec Loss: 0.0146184 | Latent Loss: 0.0235128 | Pre Loss: 0.0141077
|
| 878 |
+
2024-11-13 01:17:15,251 - Epoch 64: Lr: 0.0007681 | Train Loss: 0.0385607 | Vali Loss: 0.0382080 | Rec Loss: 0.0143749 | Latent Loss: 0.0238330 | Pre Loss: 0.0139498
|
| 879 |
+
2024-11-13 01:26:08,122 - Epoch 65: Lr: 0.0007615 | Train Loss: 0.0385769 | Vali Loss: 0.0381712 | Rec Loss: 0.0144931 | Latent Loss: 0.0236781 | Pre Loss: 0.0140193
|
| 880 |
+
2024-11-13 01:35:01,356 - Epoch 66: Lr: 0.0007548 | Train Loss: 0.0386126 | Vali Loss: 0.0377264 | Rec Loss: 0.0143659 | Latent Loss: 0.0233604 | Pre Loss: 0.0140580
|
| 881 |
+
2024-11-13 01:44:01,803 - Epoch 67: Lr: 0.0007480 | Train Loss: 0.0375147 | Vali Loss: 0.0371596 | Rec Loss: 0.0143094 | Latent Loss: 0.0228502 | Pre Loss: 0.0137981
|
| 882 |
+
2024-11-13 01:53:05,516 - Epoch 68: Lr: 0.0007411 | Train Loss: 0.0376426 | Vali Loss: 0.0371402 | Rec Loss: 0.0144182 | Latent Loss: 0.0227220 | Pre Loss: 0.0140126
|
| 883 |
+
2024-11-13 02:02:12,135 - Epoch 69: Lr: 0.0007342 | Train Loss: 0.0372720 | Vali Loss: 0.0400003 | Rec Loss: 0.0145452 | Latent Loss: 0.0254551 | Pre Loss: 0.0143594
|
| 884 |
+
2024-11-13 02:11:20,666 - Epoch 70: Lr: 0.0007273 | Train Loss: 0.0370756 | Vali Loss: 0.0371792 | Rec Loss: 0.0141913 | Latent Loss: 0.0229880 | Pre Loss: 0.0137052
|
| 885 |
+
2024-11-13 02:20:33,453 - Epoch 71: Lr: 0.0007202 | Train Loss: 0.0373651 | Vali Loss: 0.0375881 | Rec Loss: 0.0142808 | Latent Loss: 0.0233073 | Pre Loss: 0.0139151
|
| 886 |
+
2024-11-13 02:29:50,543 - Epoch 72: Lr: 0.0007132 | Train Loss: 0.0373200 | Vali Loss: 0.0367737 | Rec Loss: 0.0140907 | Latent Loss: 0.0226830 | Pre Loss: 0.0136385
|
| 887 |
+
2024-11-13 02:39:11,176 - Epoch 73: Lr: 0.0007061 | Train Loss: 0.0373588 | Vali Loss: 0.0369920 | Rec Loss: 0.0142136 | Latent Loss: 0.0227784 | Pre Loss: 0.0139104
|
| 888 |
+
2024-11-13 02:48:32,492 - Epoch 74: Lr: 0.0006989 | Train Loss: 0.0367414 | Vali Loss: 0.0358157 | Rec Loss: 0.0140157 | Latent Loss: 0.0218001 | Pre Loss: 0.0136418
|
| 889 |
+
2024-11-13 02:58:00,121 - Epoch 75: Lr: 0.0006917 | Train Loss: 0.0354856 | Vali Loss: 0.0362462 | Rec Loss: 0.0140268 | Latent Loss: 0.0222194 | Pre Loss: 0.0135061
|
| 890 |
+
2024-11-13 03:07:30,364 - Epoch 76: Lr: 0.0006844 | Train Loss: 0.0355047 | Vali Loss: 0.0360007 | Rec Loss: 0.0138197 | Latent Loss: 0.0221810 | Pre Loss: 0.0134047
|
| 891 |
+
2024-11-13 03:17:00,728 - Epoch 77: Lr: 0.0006771 | Train Loss: 0.0354140 | Vali Loss: 0.0361308 | Rec Loss: 0.0138303 | Latent Loss: 0.0223005 | Pre Loss: 0.0134271
|
| 892 |
+
2024-11-13 03:26:35,263 - Epoch 78: Lr: 0.0006697 | Train Loss: 0.0362230 | Vali Loss: 0.0369964 | Rec Loss: 0.0138454 | Latent Loss: 0.0231510 | Pre Loss: 0.0134351
|
| 893 |
+
2024-11-13 03:36:13,076 - Epoch 79: Lr: 0.0006623 | Train Loss: 0.0364093 | Vali Loss: 0.0373455 | Rec Loss: 0.0139383 | Latent Loss: 0.0234071 | Pre Loss: 0.0135482
|
| 894 |
+
2024-11-13 03:45:55,275 - Epoch 80: Lr: 0.0006549 | Train Loss: 0.0368220 | Vali Loss: 0.0373895 | Rec Loss: 0.0139846 | Latent Loss: 0.0234049 | Pre Loss: 0.0136771
|
| 895 |
+
2024-11-13 03:55:38,620 - Epoch 81: Lr: 0.0006474 | Train Loss: 0.0369614 | Vali Loss: 0.0379454 | Rec Loss: 0.0139437 | Latent Loss: 0.0240017 | Pre Loss: 0.0134442
|
| 896 |
+
2024-11-13 04:05:26,040 - Epoch 82: Lr: 0.0006399 | Train Loss: 0.0366337 | Vali Loss: 0.0366650 | Rec Loss: 0.0138344 | Latent Loss: 0.0228306 | Pre Loss: 0.0133879
|
| 897 |
+
2024-11-13 04:15:19,545 - Epoch 83: Lr: 0.0006323 | Train Loss: 0.0364320 | Vali Loss: 0.0359162 | Rec Loss: 0.0137965 | Latent Loss: 0.0221197 | Pre Loss: 0.0132712
|
| 898 |
+
2024-11-13 04:25:13,807 - Epoch 84: Lr: 0.0006247 | Train Loss: 0.0358261 | Vali Loss: 0.0388882 | Rec Loss: 0.0138466 | Latent Loss: 0.0250416 | Pre Loss: 0.0133447
|
| 899 |
+
2024-11-13 04:35:10,043 - Epoch 85: Lr: 0.0006171 | Train Loss: 0.0369626 | Vali Loss: 0.0379119 | Rec Loss: 0.0137643 | Latent Loss: 0.0241476 | Pre Loss: 0.0133828
|
| 900 |
+
2024-11-13 04:45:09,696 - Epoch 86: Lr: 0.0006095 | Train Loss: 0.0373805 | Vali Loss: 0.0375386 | Rec Loss: 0.0136933 | Latent Loss: 0.0238454 | Pre Loss: 0.0131809
|
| 901 |
+
2024-11-13 04:55:13,719 - Epoch 87: Lr: 0.0006018 | Train Loss: 0.0376037 | Vali Loss: 0.0367576 | Rec Loss: 0.0134632 | Latent Loss: 0.0232945 | Pre Loss: 0.0129441
|
| 902 |
+
2024-11-13 05:05:23,902 - Epoch 88: Lr: 0.0005941 | Train Loss: 0.0370181 | Vali Loss: 0.0380912 | Rec Loss: 0.0137515 | Latent Loss: 0.0243397 | Pre Loss: 0.0134377
|
| 903 |
+
2024-11-13 05:15:35,320 - Epoch 89: Lr: 0.0005864 | Train Loss: 0.0366393 | Vali Loss: 0.0372555 | Rec Loss: 0.0136550 | Latent Loss: 0.0236005 | Pre Loss: 0.0131919
|
| 904 |
+
2024-11-13 05:25:47,911 - Epoch 90: Lr: 0.0005786 | Train Loss: 0.0369846 | Vali Loss: 0.0362763 | Rec Loss: 0.0134288 | Latent Loss: 0.0228476 | Pre Loss: 0.0129606
|
| 905 |
+
2024-11-13 05:36:07,152 - Epoch 91: Lr: 0.0005709 | Train Loss: 0.0360251 | Vali Loss: 0.0359376 | Rec Loss: 0.0131509 | Latent Loss: 0.0227867 | Pre Loss: 0.0125168
|
| 906 |
+
2024-11-13 05:46:28,076 - Epoch 92: Lr: 0.0005631 | Train Loss: 0.0353120 | Vali Loss: 0.0362496 | Rec Loss: 0.0133410 | Latent Loss: 0.0229086 | Pre Loss: 0.0128468
|
| 907 |
+
2024-11-13 05:56:51,841 - Epoch 93: Lr: 0.0005553 | Train Loss: 0.0360267 | Vali Loss: 0.0360484 | Rec Loss: 0.0132952 | Latent Loss: 0.0227533 | Pre Loss: 0.0127923
|
| 908 |
+
2024-11-13 06:07:16,382 - Epoch 94: Lr: 0.0005475 | Train Loss: 0.0361931 | Vali Loss: 0.0367969 | Rec Loss: 0.0130779 | Latent Loss: 0.0237190 | Pre Loss: 0.0124806
|
| 909 |
+
2024-11-13 06:17:47,304 - Epoch 95: Lr: 0.0005397 | Train Loss: 0.0362583 | Vali Loss: 0.0355606 | Rec Loss: 0.0130180 | Latent Loss: 0.0225426 | Pre Loss: 0.0123812
|
| 910 |
+
2024-11-13 06:28:22,735 - Epoch 96: Lr: 0.0005319 | Train Loss: 0.0360632 | Vali Loss: 0.0379306 | Rec Loss: 0.0133811 | Latent Loss: 0.0245495 | Pre Loss: 0.0129437
|
| 911 |
+
2024-11-13 06:38:59,763 - Epoch 97: Lr: 0.0005240 | Train Loss: 0.0366763 | Vali Loss: 0.0373419 | Rec Loss: 0.0131426 | Latent Loss: 0.0241994 | Pre Loss: 0.0127685
|
| 912 |
+
2024-11-13 06:49:40,052 - Epoch 98: Lr: 0.0005162 | Train Loss: 0.0370155 | Vali Loss: 0.0373686 | Rec Loss: 0.0130701 | Latent Loss: 0.0242986 | Pre Loss: 0.0125748
|
| 913 |
+
2024-11-13 07:00:24,606 - Epoch 99: Lr: 0.0005083 | Train Loss: 0.0366915 | Vali Loss: 0.0378722 | Rec Loss: 0.0132745 | Latent Loss: 0.0245977 | Pre Loss: 0.0127006
|
| 914 |
+
2024-11-13 07:11:12,106 - Epoch 100: Lr: 0.0005005 | Train Loss: 0.0372017 | Vali Loss: 0.0364578 | Rec Loss: 0.0131736 | Latent Loss: 0.0232842 | Pre Loss: 0.0126272
|
| 915 |
+
2024-11-13 07:22:02,866 - Epoch 101: Lr: 0.0004927 | Train Loss: 0.0368155 | Vali Loss: 0.0367904 | Rec Loss: 0.0130749 | Latent Loss: 0.0237155 | Pre Loss: 0.0124589
|
| 916 |
+
2024-11-13 07:32:54,116 - Epoch 102: Lr: 0.0004848 | Train Loss: 0.0365907 | Vali Loss: 0.0384306 | Rec Loss: 0.0134194 | Latent Loss: 0.0250112 | Pre Loss: 0.0128222
|
| 917 |
+
2024-11-13 07:43:56,416 - Epoch 103: Lr: 0.0004770 | Train Loss: 0.0367178 | Vali Loss: 0.0363197 | Rec Loss: 0.0128479 | Latent Loss: 0.0234719 | Pre Loss: 0.0122695
|
| 918 |
+
2024-11-13 07:54:55,476 - Epoch 104: Lr: 0.0004691 | Train Loss: 0.0363133 | Vali Loss: 0.0377730 | Rec Loss: 0.0128380 | Latent Loss: 0.0249350 | Pre Loss: 0.0122910
|
| 919 |
+
2024-11-13 08:05:54,321 - Epoch 105: Lr: 0.0004613 | Train Loss: 0.0369966 | Vali Loss: 0.0371554 | Rec Loss: 0.0128336 | Latent Loss: 0.0243219 | Pre Loss: 0.0122996
|
| 920 |
+
2024-11-13 08:16:56,946 - Epoch 106: Lr: 0.0004535 | Train Loss: 0.0375038 | Vali Loss: 0.0374710 | Rec Loss: 0.0130063 | Latent Loss: 0.0244647 | Pre Loss: 0.0123708
|
| 921 |
+
2024-11-13 08:28:03,010 - Epoch 107: Lr: 0.0004457 | Train Loss: 0.0368209 | Vali Loss: 0.0368753 | Rec Loss: 0.0127151 | Latent Loss: 0.0241603 | Pre Loss: 0.0122661
|
| 922 |
+
2024-11-13 08:39:15,905 - Epoch 108: Lr: 0.0004379 | Train Loss: 0.0365304 | Vali Loss: 0.0383509 | Rec Loss: 0.0128394 | Latent Loss: 0.0255115 | Pre Loss: 0.0123024
|
| 923 |
+
2024-11-13 08:50:27,431 - Epoch 109: Lr: 0.0004301 | Train Loss: 0.0375829 | Vali Loss: 0.0365726 | Rec Loss: 0.0127646 | Latent Loss: 0.0238080 | Pre Loss: 0.0121941
|
| 924 |
+
2024-11-13 09:01:42,656 - Epoch 110: Lr: 0.0004224 | Train Loss: 0.0371245 | Vali Loss: 0.0374195 | Rec Loss: 0.0127459 | Latent Loss: 0.0246737 | Pre Loss: 0.0121895
|
| 925 |
+
2024-11-13 09:13:03,864 - Epoch 111: Lr: 0.0004146 | Train Loss: 0.0363739 | Vali Loss: 0.0370379 | Rec Loss: 0.0127691 | Latent Loss: 0.0242689 | Pre Loss: 0.0122089
|
| 926 |
+
2024-11-13 09:24:30,996 - Epoch 112: Lr: 0.0004069 | Train Loss: 0.0363683 | Vali Loss: 0.0371355 | Rec Loss: 0.0128249 | Latent Loss: 0.0243107 | Pre Loss: 0.0123073
|
| 927 |
+
2024-11-13 09:35:58,870 - Epoch 113: Lr: 0.0003992 | Train Loss: 0.0368683 | Vali Loss: 0.0373291 | Rec Loss: 0.0128375 | Latent Loss: 0.0244916 | Pre Loss: 0.0122977
|
| 928 |
+
2024-11-13 09:47:30,119 - Epoch 114: Lr: 0.0003915 | Train Loss: 0.0362472 | Vali Loss: 0.0365146 | Rec Loss: 0.0126342 | Latent Loss: 0.0238804 | Pre Loss: 0.0120941
|
| 929 |
+
2024-11-13 09:59:06,889 - Epoch 115: Lr: 0.0003839 | Train Loss: 0.0367525 | Vali Loss: 0.0364834 | Rec Loss: 0.0127618 | Latent Loss: 0.0237215 | Pre Loss: 0.0121414
|
| 930 |
+
2024-11-13 10:10:44,409 - Epoch 116: Lr: 0.0003763 | Train Loss: 0.0363697 | Vali Loss: 0.0360516 | Rec Loss: 0.0125708 | Latent Loss: 0.0234808 | Pre Loss: 0.0119722
|
| 931 |
+
2024-11-13 10:22:26,416 - Epoch 117: Lr: 0.0003687 | Train Loss: 0.0355757 | Vali Loss: 0.0368120 | Rec Loss: 0.0126076 | Latent Loss: 0.0242044 | Pre Loss: 0.0119823
|
| 932 |
+
2024-11-13 10:34:12,516 - Epoch 118: Lr: 0.0003611 | Train Loss: 0.0359404 | Vali Loss: 0.0368372 | Rec Loss: 0.0127527 | Latent Loss: 0.0240844 | Pre Loss: 0.0121151
|
| 933 |
+
2024-11-13 10:46:02,735 - Epoch 119: Lr: 0.0003536 | Train Loss: 0.0359532 | Vali Loss: 0.0363683 | Rec Loss: 0.0125431 | Latent Loss: 0.0238252 | Pre Loss: 0.0119990
|
| 934 |
+
2024-11-13 10:57:57,334 - Epoch 120: Lr: 0.0003461 | Train Loss: 0.0363805 | Vali Loss: 0.0371629 | Rec Loss: 0.0127177 | Latent Loss: 0.0244452 | Pre Loss: 0.0120852
|
| 935 |
+
2024-11-13 11:09:54,432 - Epoch 121: Lr: 0.0003387 | Train Loss: 0.0364508 | Vali Loss: 0.0361884 | Rec Loss: 0.0126301 | Latent Loss: 0.0235583 | Pre Loss: 0.0121435
|
| 936 |
+
2024-11-13 11:21:54,853 - Epoch 122: Lr: 0.0003313 | Train Loss: 0.0362169 | Vali Loss: 0.0362845 | Rec Loss: 0.0124173 | Latent Loss: 0.0238672 | Pre Loss: 0.0117462
|
| 937 |
+
2024-11-13 11:34:00,113 - Epoch 123: Lr: 0.0003239 | Train Loss: 0.0359912 | Vali Loss: 0.0360238 | Rec Loss: 0.0125234 | Latent Loss: 0.0235004 | Pre Loss: 0.0118866
|
| 938 |
+
2024-11-13 11:46:10,337 - Epoch 124: Lr: 0.0003166 | Train Loss: 0.0357540 | Vali Loss: 0.0362449 | Rec Loss: 0.0124803 | Latent Loss: 0.0237646 | Pre Loss: 0.0119524
|
| 939 |
+
2024-11-13 11:58:21,811 - Epoch 125: Lr: 0.0003093 | Train Loss: 0.0355637 | Vali Loss: 0.0361819 | Rec Loss: 0.0123601 | Latent Loss: 0.0238218 | Pre Loss: 0.0117450
|
| 940 |
+
2024-11-13 12:10:36,625 - Epoch 126: Lr: 0.0003021 | Train Loss: 0.0355101 | Vali Loss: 0.0359399 | Rec Loss: 0.0123531 | Latent Loss: 0.0235868 | Pre Loss: 0.0117669
|
| 941 |
+
2024-11-13 12:22:56,094 - Epoch 127: Lr: 0.0002949 | Train Loss: 0.0353797 | Vali Loss: 0.0358468 | Rec Loss: 0.0124377 | Latent Loss: 0.0234091 | Pre Loss: 0.0118169
|
| 942 |
+
2024-11-13 12:35:16,267 - Epoch 128: Lr: 0.0002878 | Train Loss: 0.0352728 | Vali Loss: 0.0362220 | Rec Loss: 0.0123078 | Latent Loss: 0.0239141 | Pre Loss: 0.0116543
|
| 943 |
+
2024-11-13 12:47:41,111 - Epoch 129: Lr: 0.0002808 | Train Loss: 0.0355488 | Vali Loss: 0.0359658 | Rec Loss: 0.0123394 | Latent Loss: 0.0236263 | Pre Loss: 0.0117556
|
| 944 |
+
2024-11-13 13:00:06,638 - Epoch 130: Lr: 0.0002737 | Train Loss: 0.0352268 | Vali Loss: 0.0355592 | Rec Loss: 0.0121590 | Latent Loss: 0.0234002 | Pre Loss: 0.0115224
|
| 945 |
+
2024-11-13 13:12:41,053 - Epoch 131: Lr: 0.0002668 | Train Loss: 0.0350704 | Vali Loss: 0.0357644 | Rec Loss: 0.0124306 | Latent Loss: 0.0233338 | Pre Loss: 0.0117848
|
| 946 |
+
2024-11-13 13:25:19,591 - Epoch 132: Lr: 0.0002599 | Train Loss: 0.0348435 | Vali Loss: 0.0366512 | Rec Loss: 0.0124309 | Latent Loss: 0.0242203 | Pre Loss: 0.0119191
|
| 947 |
+
2024-11-13 13:37:57,115 - Epoch 133: Lr: 0.0002530 | Train Loss: 0.0351295 | Vali Loss: 0.0361797 | Rec Loss: 0.0123686 | Latent Loss: 0.0238112 | Pre Loss: 0.0117093
|
| 948 |
+
2024-11-13 13:50:38,543 - Epoch 134: Lr: 0.0002462 | Train Loss: 0.0349120 | Vali Loss: 0.0356546 | Rec Loss: 0.0123070 | Latent Loss: 0.0233476 | Pre Loss: 0.0116800
|
| 949 |
+
2024-11-13 14:03:23,988 - Epoch 135: Lr: 0.0002395 | Train Loss: 0.0345933 | Vali Loss: 0.0360186 | Rec Loss: 0.0122441 | Latent Loss: 0.0237745 | Pre Loss: 0.0115156
|
| 950 |
+
2024-11-13 14:16:11,517 - Epoch 136: Lr: 0.0002329 | Train Loss: 0.0346003 | Vali Loss: 0.0357039 | Rec Loss: 0.0123629 | Latent Loss: 0.0233410 | Pre Loss: 0.0116873
|
| 951 |
+
2024-11-13 14:29:08,269 - Epoch 137: Lr: 0.0002263 | Train Loss: 0.0345396 | Vali Loss: 0.0349320 | Rec Loss: 0.0122604 | Latent Loss: 0.0226716 | Pre Loss: 0.0115924
|
| 952 |
+
2024-11-13 14:42:08,277 - Epoch 138: Lr: 0.0002197 | Train Loss: 0.0339010 | Vali Loss: 0.0350674 | Rec Loss: 0.0121405 | Latent Loss: 0.0229269 | Pre Loss: 0.0114262
|
| 953 |
+
2024-11-13 14:55:14,380 - Epoch 139: Lr: 0.0002133 | Train Loss: 0.0342487 | Vali Loss: 0.0345518 | Rec Loss: 0.0122033 | Latent Loss: 0.0223485 | Pre Loss: 0.0116180
|
| 954 |
+
2024-11-13 15:08:22,014 - Epoch 140: Lr: 0.0002069 | Train Loss: 0.0342416 | Vali Loss: 0.0347040 | Rec Loss: 0.0120912 | Latent Loss: 0.0226127 | Pre Loss: 0.0113986
|
| 955 |
+
2024-11-13 15:21:32,365 - Epoch 141: Lr: 0.0002006 | Train Loss: 0.0337782 | Vali Loss: 0.0351397 | Rec Loss: 0.0120899 | Latent Loss: 0.0230498 | Pre Loss: 0.0114778
|
| 956 |
+
2024-11-13 15:34:43,167 - Epoch 142: Lr: 0.0001944 | Train Loss: 0.0337270 | Vali Loss: 0.0343302 | Rec Loss: 0.0119587 | Latent Loss: 0.0223716 | Pre Loss: 0.0113135
|
| 957 |
+
2024-11-13 15:48:02,707 - Epoch 143: Lr: 0.0001882 | Train Loss: 0.0337138 | Vali Loss: 0.0339653 | Rec Loss: 0.0119845 | Latent Loss: 0.0219808 | Pre Loss: 0.0113890
|
| 958 |
+
2024-11-13 16:01:38,038 - Epoch 144: Lr: 0.0001821 | Train Loss: 0.0335713 | Vali Loss: 0.0338977 | Rec Loss: 0.0120272 | Latent Loss: 0.0218705 | Pre Loss: 0.0114010
|
| 959 |
+
2024-11-13 16:15:07,879 - Epoch 145: Lr: 0.0001761 | Train Loss: 0.0334548 | Vali Loss: 0.0341943 | Rec Loss: 0.0120572 | Latent Loss: 0.0221370 | Pre Loss: 0.0114533
|
| 960 |
+
2024-11-13 16:28:34,438 - Epoch 146: Lr: 0.0001702 | Train Loss: 0.0329147 | Vali Loss: 0.0338438 | Rec Loss: 0.0120235 | Latent Loss: 0.0218203 | Pre Loss: 0.0113722
|
| 961 |
+
2024-11-13 16:42:08,793 - Epoch 147: Lr: 0.0001643 | Train Loss: 0.0330750 | Vali Loss: 0.0341052 | Rec Loss: 0.0119992 | Latent Loss: 0.0221059 | Pre Loss: 0.0113597
|
| 962 |
+
2024-11-13 16:55:45,404 - Epoch 148: Lr: 0.0001586 | Train Loss: 0.0327575 | Vali Loss: 0.0336969 | Rec Loss: 0.0119525 | Latent Loss: 0.0217444 | Pre Loss: 0.0113182
|
| 963 |
+
2024-11-13 17:10:37,209 - Epoch 149: Lr: 0.0001529 | Train Loss: 0.0329790 | Vali Loss: 0.0337609 | Rec Loss: 0.0118945 | Latent Loss: 0.0218664 | Pre Loss: 0.0112231
|
| 964 |
+
2024-11-13 17:30:58,802 - Epoch 150: Lr: 0.0001473 | Train Loss: 0.0332622 | Vali Loss: 0.0334425 | Rec Loss: 0.0119639 | Latent Loss: 0.0214787 | Pre Loss: 0.0112908
|
| 965 |
+
2024-11-13 17:47:36,868 - Epoch 151: Lr: 0.0001418 | Train Loss: 0.0326990 | Vali Loss: 0.0331087 | Rec Loss: 0.0119279 | Latent Loss: 0.0211808 | Pre Loss: 0.0112699
|
| 966 |
+
2024-11-13 18:01:42,464 - Epoch 152: Lr: 0.0001364 | Train Loss: 0.0322676 | Vali Loss: 0.0337667 | Rec Loss: 0.0118625 | Latent Loss: 0.0219042 | Pre Loss: 0.0112244
|
| 967 |
+
2024-11-13 18:15:32,336 - Epoch 153: Lr: 0.0001311 | Train Loss: 0.0322455 | Vali Loss: 0.0332881 | Rec Loss: 0.0119097 | Latent Loss: 0.0213784 | Pre Loss: 0.0112521
|
| 968 |
+
2024-11-13 18:29:21,786 - Epoch 154: Lr: 0.0001258 | Train Loss: 0.0323438 | Vali Loss: 0.0329069 | Rec Loss: 0.0118985 | Latent Loss: 0.0210084 | Pre Loss: 0.0112434
|
| 969 |
+
2024-11-13 18:43:21,656 - Epoch 155: Lr: 0.0001207 | Train Loss: 0.0318821 | Vali Loss: 0.0326818 | Rec Loss: 0.0117788 | Latent Loss: 0.0209030 | Pre Loss: 0.0110836
|
| 970 |
+
2024-11-13 18:57:26,284 - Epoch 156: Lr: 0.0001156 | Train Loss: 0.0315599 | Vali Loss: 0.0326506 | Rec Loss: 0.0118655 | Latent Loss: 0.0207851 | Pre Loss: 0.0111793
|
| 971 |
+
2024-11-13 19:11:28,568 - Epoch 157: Lr: 0.0001107 | Train Loss: 0.0316597 | Vali Loss: 0.0321176 | Rec Loss: 0.0119158 | Latent Loss: 0.0202018 | Pre Loss: 0.0112825
|
| 972 |
+
2024-11-13 19:25:36,463 - Epoch 158: Lr: 0.0001058 | Train Loss: 0.0313754 | Vali Loss: 0.0323011 | Rec Loss: 0.0117968 | Latent Loss: 0.0205043 | Pre Loss: 0.0111712
|
| 973 |
+
2024-11-13 19:39:48,035 - Epoch 159: Lr: 0.0001011 | Train Loss: 0.0313783 | Vali Loss: 0.0319316 | Rec Loss: 0.0117812 | Latent Loss: 0.0201504 | Pre Loss: 0.0110983
|
| 974 |
+
2024-11-13 19:54:00,124 - Epoch 160: Lr: 0.0000964 | Train Loss: 0.0310056 | Vali Loss: 0.0319436 | Rec Loss: 0.0117452 | Latent Loss: 0.0201984 | Pre Loss: 0.0111113
|
| 975 |
+
2024-11-13 20:08:11,104 - Epoch 161: Lr: 0.0000918 | Train Loss: 0.0310110 | Vali Loss: 0.0317556 | Rec Loss: 0.0116753 | Latent Loss: 0.0200803 | Pre Loss: 0.0110158
|
| 976 |
+
2024-11-13 20:22:24,568 - Epoch 162: Lr: 0.0000874 | Train Loss: 0.0312359 | Vali Loss: 0.0319783 | Rec Loss: 0.0117965 | Latent Loss: 0.0201817 | Pre Loss: 0.0111016
|
| 977 |
+
2024-11-13 20:36:40,088 - Epoch 163: Lr: 0.0000830 | Train Loss: 0.0307263 | Vali Loss: 0.0315194 | Rec Loss: 0.0117468 | Latent Loss: 0.0197727 | Pre Loss: 0.0110939
|
| 978 |
+
2024-11-13 20:50:57,804 - Epoch 164: Lr: 0.0000788 | Train Loss: 0.0305238 | Vali Loss: 0.0309965 | Rec Loss: 0.0116697 | Latent Loss: 0.0193268 | Pre Loss: 0.0110214
|
| 979 |
+
2024-11-13 21:05:19,724 - Epoch 165: Lr: 0.0000746 | Train Loss: 0.0304910 | Vali Loss: 0.0308138 | Rec Loss: 0.0117249 | Latent Loss: 0.0190888 | Pre Loss: 0.0110854
|
| 980 |
+
2024-11-13 21:19:44,136 - Epoch 166: Lr: 0.0000706 | Train Loss: 0.0300001 | Vali Loss: 0.0308606 | Rec Loss: 0.0117004 | Latent Loss: 0.0191602 | Pre Loss: 0.0110311
|
| 981 |
+
2024-11-13 21:34:12,496 - Epoch 167: Lr: 0.0000666 | Train Loss: 0.0299453 | Vali Loss: 0.0313340 | Rec Loss: 0.0117347 | Latent Loss: 0.0195993 | Pre Loss: 0.0111045
|
| 982 |
+
2024-11-13 21:48:42,395 - Epoch 168: Lr: 0.0000628 | Train Loss: 0.0300625 | Vali Loss: 0.0307237 | Rec Loss: 0.0115786 | Latent Loss: 0.0191451 | Pre Loss: 0.0109635
|
| 983 |
+
2024-11-13 22:03:17,637 - Epoch 169: Lr: 0.0000591 | Train Loss: 0.0301079 | Vali Loss: 0.0306925 | Rec Loss: 0.0115730 | Latent Loss: 0.0191195 | Pre Loss: 0.0109126
|
| 984 |
+
2024-11-13 22:17:55,832 - Epoch 170: Lr: 0.0000554 | Train Loss: 0.0299060 | Vali Loss: 0.0307122 | Rec Loss: 0.0115437 | Latent Loss: 0.0191684 | Pre Loss: 0.0108732
|
| 985 |
+
2024-11-13 22:32:38,714 - Epoch 171: Lr: 0.0000519 | Train Loss: 0.0295044 | Vali Loss: 0.0308895 | Rec Loss: 0.0116537 | Latent Loss: 0.0192358 | Pre Loss: 0.0110093
|
| 986 |
+
2024-11-13 22:47:22,586 - Epoch 172: Lr: 0.0000485 | Train Loss: 0.0298790 | Vali Loss: 0.0304769 | Rec Loss: 0.0116211 | Latent Loss: 0.0188558 | Pre Loss: 0.0109739
|
| 987 |
+
2024-11-13 23:02:11,496 - Epoch 173: Lr: 0.0000453 | Train Loss: 0.0294234 | Vali Loss: 0.0303300 | Rec Loss: 0.0116224 | Latent Loss: 0.0187076 | Pre Loss: 0.0109595
|
| 988 |
+
2024-11-13 23:17:02,964 - Epoch 174: Lr: 0.0000421 | Train Loss: 0.0292904 | Vali Loss: 0.0303092 | Rec Loss: 0.0116127 | Latent Loss: 0.0186965 | Pre Loss: 0.0109421
|
| 989 |
+
2024-11-13 23:32:04,207 - Epoch 175: Lr: 0.0000390 | Train Loss: 0.0292626 | Vali Loss: 0.0301656 | Rec Loss: 0.0116291 | Latent Loss: 0.0185365 | Pre Loss: 0.0109323
|
| 990 |
+
2024-11-13 23:47:10,160 - Epoch 176: Lr: 0.0000361 | Train Loss: 0.0291600 | Vali Loss: 0.0300045 | Rec Loss: 0.0115661 | Latent Loss: 0.0184384 | Pre Loss: 0.0108645
|
| 991 |
+
2024-11-14 00:02:21,368 - Epoch 177: Lr: 0.0000332 | Train Loss: 0.0289977 | Vali Loss: 0.0300591 | Rec Loss: 0.0115594 | Latent Loss: 0.0184996 | Pre Loss: 0.0109035
|
| 992 |
+
2024-11-14 00:17:36,543 - Epoch 178: Lr: 0.0000305 | Train Loss: 0.0290346 | Vali Loss: 0.0300183 | Rec Loss: 0.0115460 | Latent Loss: 0.0184723 | Pre Loss: 0.0108723
|
| 993 |
+
2024-11-14 00:32:54,871 - Epoch 179: Lr: 0.0000279 | Train Loss: 0.0289352 | Vali Loss: 0.0298609 | Rec Loss: 0.0115650 | Latent Loss: 0.0182959 | Pre Loss: 0.0109038
|
| 994 |
+
2024-11-14 00:48:15,843 - Epoch 180: Lr: 0.0000254 | Train Loss: 0.0289109 | Vali Loss: 0.0297873 | Rec Loss: 0.0115294 | Latent Loss: 0.0182579 | Pre Loss: 0.0108640
|
| 995 |
+
2024-11-14 01:03:39,682 - Epoch 181: Lr: 0.0000231 | Train Loss: 0.0287436 | Vali Loss: 0.0297885 | Rec Loss: 0.0115994 | Latent Loss: 0.0181890 | Pre Loss: 0.0109177
|
| 996 |
+
2024-11-14 01:19:02,422 - Epoch 182: Lr: 0.0000208 | Train Loss: 0.0286875 | Vali Loss: 0.0295863 | Rec Loss: 0.0115673 | Latent Loss: 0.0180190 | Pre Loss: 0.0108633
|
| 997 |
+
2024-11-14 01:34:36,382 - Epoch 183: Lr: 0.0000187 | Train Loss: 0.0285684 | Vali Loss: 0.0293864 | Rec Loss: 0.0115279 | Latent Loss: 0.0178585 | Pre Loss: 0.0108706
|
| 998 |
+
2024-11-14 01:50:10,944 - Epoch 184: Lr: 0.0000167 | Train Loss: 0.0284263 | Vali Loss: 0.0295520 | Rec Loss: 0.0114974 | Latent Loss: 0.0180546 | Pre Loss: 0.0108481
|
| 999 |
+
2024-11-14 02:05:49,208 - Epoch 185: Lr: 0.0000148 | Train Loss: 0.0285181 | Vali Loss: 0.0295676 | Rec Loss: 0.0115136 | Latent Loss: 0.0180540 | Pre Loss: 0.0108423
|
| 1000 |
+
2024-11-14 02:21:26,571 - Epoch 186: Lr: 0.0000130 | Train Loss: 0.0285046 | Vali Loss: 0.0295303 | Rec Loss: 0.0115545 | Latent Loss: 0.0179758 | Pre Loss: 0.0108727
|
| 1001 |
+
2024-11-14 02:37:12,396 - Epoch 187: Lr: 0.0000114 | Train Loss: 0.0285103 | Vali Loss: 0.0293962 | Rec Loss: 0.0114810 | Latent Loss: 0.0179152 | Pre Loss: 0.0108145
|
| 1002 |
+
2024-11-14 02:53:02,494 - Epoch 188: Lr: 0.0000098 | Train Loss: 0.0284704 | Vali Loss: 0.0294403 | Rec Loss: 0.0115041 | Latent Loss: 0.0179362 | Pre Loss: 0.0108330
|
| 1003 |
+
2024-11-14 03:08:50,235 - Epoch 189: Lr: 0.0000084 | Train Loss: 0.0284290 | Vali Loss: 0.0293318 | Rec Loss: 0.0114991 | Latent Loss: 0.0178327 | Pre Loss: 0.0108087
|
| 1004 |
+
2024-11-14 03:24:42,482 - Epoch 190: Lr: 0.0000071 | Train Loss: 0.0284050 | Vali Loss: 0.0293675 | Rec Loss: 0.0115041 | Latent Loss: 0.0178634 | Pre Loss: 0.0108258
|
| 1005 |
+
2024-11-14 03:40:39,414 - Epoch 191: Lr: 0.0000060 | Train Loss: 0.0283160 | Vali Loss: 0.0293251 | Rec Loss: 0.0114767 | Latent Loss: 0.0178485 | Pre Loss: 0.0108392
|
| 1006 |
+
2024-11-14 03:56:40,525 - Epoch 192: Lr: 0.0000049 | Train Loss: 0.0283510 | Vali Loss: 0.0293785 | Rec Loss: 0.0114884 | Latent Loss: 0.0178901 | Pre Loss: 0.0108106
|
| 1007 |
+
2024-11-14 04:12:42,720 - Epoch 193: Lr: 0.0000040 | Train Loss: 0.0283576 | Vali Loss: 0.0293288 | Rec Loss: 0.0114865 | Latent Loss: 0.0178423 | Pre Loss: 0.0108077
|
| 1008 |
+
2024-11-14 04:28:46,784 - Epoch 194: Lr: 0.0000032 | Train Loss: 0.0283764 | Vali Loss: 0.0292748 | Rec Loss: 0.0114811 | Latent Loss: 0.0177937 | Pre Loss: 0.0108026
|
| 1009 |
+
2024-11-14 04:44:57,674 - Epoch 195: Lr: 0.0000025 | Train Loss: 0.0284286 | Vali Loss: 0.0292821 | Rec Loss: 0.0115072 | Latent Loss: 0.0177749 | Pre Loss: 0.0108136
|
| 1010 |
+
2024-11-14 05:01:10,809 - Epoch 196: Lr: 0.0000020 | Train Loss: 0.0282681 | Vali Loss: 0.0292596 | Rec Loss: 0.0114819 | Latent Loss: 0.0177776 | Pre Loss: 0.0108069
|
| 1011 |
+
2024-11-14 05:17:26,613 - Epoch 197: Lr: 0.0000016 | Train Loss: 0.0281687 | Vali Loss: 0.0292532 | Rec Loss: 0.0114764 | Latent Loss: 0.0177768 | Pre Loss: 0.0108154
|
| 1012 |
+
2024-11-14 05:33:47,302 - Epoch 198: Lr: 0.0000012 | Train Loss: 0.0283151 | Vali Loss: 0.0292646 | Rec Loss: 0.0114921 | Latent Loss: 0.0177724 | Pre Loss: 0.0108038
|
| 1013 |
+
2024-11-14 05:50:11,307 - Epoch 199: Lr: 0.0000011 | Train Loss: 0.0282656 | Vali Loss: 0.0292677 | Rec Loss: 0.0114744 | Latent Loss: 0.0177933 | Pre Loss: 0.0108276
|
| 1014 |
+
2024-11-14 05:53:06,170 - mse:132.8868865966797, mae:328.6748962402344, ssim:0.9086267643737793, psnr:19.906916804109503
|